Package org.apache.hadoop.hive.serde2.columnar

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable


    for (int recIdx = 0; recIdx < recCount; recIdx++) {
      for (int i = 0; i < record.length; i++) {
        record[i] = new Integer(rand.nextInt()).toString().getBytes("UTF-8");
      }
      for (int i = 0; i < record.length; i++) {
        BytesRefWritable cu = new BytesRefWritable(record[i], 0,
            record[i].length);
        bytes.set(i, cu);
      }
      writer.append(bytes);
      bytes.clear();
View Full Code Here


    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(fieldsData.length);
    for (int i = 0; i < fieldsData.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(fieldsData[i], 0, fieldsData[i].length);
      bytes.set(i, cu);
    }

    for (int i = 0; i < count; i++) {
      writer.append(bytes);
View Full Code Here

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      if (i == intervalRecordCount) {
        System.out.println("write position:" + writer.getLength());
View Full Code Here

    resetRandomGenerators();

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    // zero length key is not allowed by block compress writer, so we use a byte
    // writable
View Full Code Here

    byte[][] columnRandom;

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    for (int i = 0; i < rowCount; i++) {
      nextRandomRow(columnRandom, bytes);
View Full Code Here

   */
  private void printRecord(BytesRefArrayWritable value, StringBuilder buf)
      throws IOException {
    int n = value.size();
    if (n > 0) {
      BytesRefWritable v = value.unCheckedGet(0);
      ByteBuffer bb = ByteBuffer.wrap(v.getData(), v.getStart(), v.getLength());
      buf.append(decoder.decode(bb));
      for (int i = 1; i < n; i++) {
        // do not put the TAB for the last column
        buf.append(RCFileCat.TAB);

        v = value.unCheckedGet(i);
        bb = ByteBuffer.wrap(v.getData(), v.getStart(), v.getLength());
        buf.append(decoder.decode(bb));
      }
      buf.append(RCFileCat.NEWLINE);
    }
  }
View Full Code Here

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      writer.append(bytes);
    }
View Full Code Here

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      writer.append(bytes);
    }
View Full Code Here

      // Data
      BytesRefArrayWritable braw = new BytesRefArrayWritable(8);
      String[] data = {"123", "456", "789", "1000", "5.3", "hive and hadoop", "1.", "NULL"};
      for (int i = 0; i < 8; i++) {
        braw.set(i, new BytesRefWritable(data[i].getBytes()));
      }
      // Test
      deserializeAndSerializeColumnar(serDe, braw, data);
      System.out.println("test: testColumnarSerde - OK");
View Full Code Here

  public HiveIndexResult(List<String> indexFiles, JobConf conf) throws IOException,
      HiveException {
    job = conf;

    bytesRef[0] = new BytesRefWritable();
    bytesRef[1] = new BytesRefWritable();
    ignoreHdfsLoc = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_INDEX_IGNORE_HDFS_LOC);

    if (indexFiles != null && indexFiles.size() > 0) {
      List<Path> paths = new ArrayList<Path>();
      for (String indexFile : indexFiles) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.