Examples of BytesRefWritable


Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

  byte[][] columnRandom;

  BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
  columnRandom = new byte[columnNum][];
  for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
  }

  for (int i = 0; i < rowCount; i++) {
      nextRandomRow(columnRandom, bytes, columnCount);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    byte[][] byteArray = buildBytesArray();

    BytesRefArrayWritable bytesWritable = new BytesRefArrayWritable(byteArray.length);
    for (int i = 0; i < byteArray.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(byteArray[i], 0, byteArray[i].length);
      bytesWritable.set(i, cu);
    }

    //Convert byte array to HowlRecord using isd, convert howlrecord back to byte array
    //using osd, compare the two arrays
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    BytesRefArrayWritable bytes2 = new BytesRefArrayWritable(record_2.length);
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes2.set(i, cu);
    }
    writer.append(bytes2);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    BytesRefArrayWritable bytes2 = new BytesRefArrayWritable(record_2.length);
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes2.set(i, cu);
    }
    writer.append(bytes2);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    BytesRefArrayWritable bytes2 = new BytesRefArrayWritable(record_2.length);
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes2.set(i, cu);
    }
    writer.append(bytes2);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

      BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
      for (int i = 0; i < bytesArray.length; i++) {
        BytesRefWritable cu = null;
        cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
        bytes.set(i, cu);
      }
      for (int i = 0; i < writeCount; i++) {
        writer.append(bytes);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    bytes.clear();
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(fieldsData.length);
    for (int i = 0; i < fieldsData.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(fieldsData[i], 0, fieldsData[i].length);
      bytes.set(i, cu);
    }

    for (int i = 0; i < count; i++) {
      writer.append(bytes);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      if (i == intervalRecordCount) {
        System.out.println("write position:" + writer.getLength());
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    resetRandomGenerators();

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    // zero length key is not allowed by block compress writer, so we use a byte
    // writable
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.