Package com.facebook.hive.orc.OrcTestUtils

Examples of com.facebook.hive.orc.OrcTestUtils.DoubleRow


    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here


    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here

    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here

    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here

    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here

    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here

    double[] values = new double[131702];

    // The first compression block is all 0's
    for (int i = 0; i < 32768; i++) {
      values[i] = 0;
      writer.addRow(new DoubleRow(values[i]));
    }

    // The second compression block is random doubles
    for (int i = 0; i < 32768; i++) {
      values[i + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768]));
    }

    // The third compression block is all 0's
    // (important so it compresses to the same size as the first)
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768] = 0;
      writer.addRow(new DoubleRow(values[i + 32768 + 32768]));
    }

    // The fourth compression block is random
    for (int i = 0; i < 32768; i++) {
      values[i + 32768 + 32768 + 32768] = rand.nextDouble();
      writer.addRow(new DoubleRow(values[i + 32768 + 32768 + 32768]));
    }

    writer.close();
    OrcConf.setIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES, 2);
    OrcConf.setBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ, false);
View Full Code Here

TOP

Related Classes of com.facebook.hive.orc.OrcTestUtils.DoubleRow

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.