Package org.apache.hadoop.hive.serde2.columnar

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable


    byte [][][] records = getRecords();
    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null, new DefaultCodec());

    BytesRefArrayWritable bytes = writeBytesToFile(records[0], writer);
    BytesRefArrayWritable bytes2 = writeBytesToFile(records[1], writer);

    writer.close();
    return new BytesRefArrayWritable[]{bytes,bytes2};
  }
View Full Code Here


    writer.close();
    return new BytesRefArrayWritable[]{bytes,bytes2};
  }

  private BytesRefArrayWritable writeBytesToFile(byte[][] record, RCFile.Writer writer) throws IOException {
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record.length);
    for (int i = 0; i < record.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record[i], 0, record[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    return bytes;
  }
View Full Code Here

    RecordReader<?,?> rr = iF.createRecordReader(split,tac);
    rr.initialize(split, tac);
    HCatRecord[] tuples = getExpectedRecords();
    for(int j=0; j < 2; j++){
      Assert.assertTrue(rr.nextKeyValue());
      BytesRefArrayWritable w = (BytesRefArrayWritable)rr.getCurrentValue();
      Assert.assertEquals(bytesArr[j], w);
      HCatRecord t = sd.convertToHCatRecord(null,w);
      Assert.assertEquals(8, t.size());
      Assert.assertEquals(t,tuples[j]);
    }
View Full Code Here

    RecordReader<?,?> rr = iF.createRecordReader(split,tac);
    rr.initialize(split, tac);
    HCatRecord[] tuples = getPrunedRecords();
    for(int j=0; j < 2; j++){
      Assert.assertTrue(rr.nextKeyValue());
      BytesRefArrayWritable w = (BytesRefArrayWritable)rr.getCurrentValue();
      Assert.assertFalse(bytesArr[j].equals(w));
      Assert.assertEquals(w.size(), 8);
      HCatRecord t = sd.convertToHCatRecord(null,w);
      Assert.assertEquals(5, t.size());
      Assert.assertEquals(t,tuples[j]);
    }
    assertFalse(rr.nextKeyValue());
View Full Code Here

    RecordReader<?,?> rr = iF.createRecordReader(split,tac);
    rr.initialize(split, tac);
    HCatRecord[] tuples = getReorderedCols();
    for(int j=0; j < 2; j++){
      Assert.assertTrue(rr.nextKeyValue());
      BytesRefArrayWritable w = (BytesRefArrayWritable)rr.getCurrentValue();
      Assert.assertFalse(bytesArr[j].equals(w));
      Assert.assertEquals(w.size(), 8);
      HCatRecord t = sd.convertToHCatRecord(null,w);
      Assert.assertEquals(7, t.size());
      Assert.assertEquals(t,tuples[j]);
    }
    assertFalse(rr.nextKeyValue());
View Full Code Here

      Configuration conf = new Configuration();
      Properties tbl = createProperties();
      serDe.initialize(conf, tbl);

      // Data
      BytesRefArrayWritable braw = new BytesRefArrayWritable(8);
      String[] data = {"123", "456", "789", "1000", "5.3", "hive and hadoop", "1.", "NULL"};
      for (int i = 0; i < 8; i++) {
        braw.set(i, new BytesRefWritable(data[i].getBytes()));
      }
      // Test
      deserializeAndSerializeColumnar(serDe, braw, data);
      System.out.println("test: testColumnarSerde - OK");
View Full Code Here

    assertEquals("serialized size correct after deserialization", size, serDe.getSerDeStats()
        .getRawDataSize());
    assertNotSame(0, size);

    BytesRefArrayWritable serializedData = (BytesRefArrayWritable) serDe.serialize(row, oi);
    size = 0;
    for (int i = 0; i < serializedData.size(); i++) {
      size += serializedData.get(i).getLength();
    }

    assertEquals("serialized size correct after serialization", size, serDe.getSerDeStats()
        .getRawDataSize());
    assertNotSame(0, size);
View Full Code Here

      if (skippedColIDs[columnID]) {
        return null;
      }

      if (rest == null) {
        rest = new BytesRefArrayWritable();
      }

      rest.resetValid(recordsNumInValBuffer);

      if (!currentValue.inited) {
View Full Code Here

    this.start = in.getPosition();
    more = start < end;

    key = new LongWritable();
    value = new BytesRefArrayWritable();
  }
View Full Code Here

      if (!(val instanceof BytesRefArrayWritable)) {
        throw new UnsupportedOperationException(
            "Currently the writer can only accept BytesRefArrayWritable");
      }

      BytesRefArrayWritable columns = (BytesRefArrayWritable) val;
      int size = columns.size();
      for (int i = 0; i < size; i++) {
        BytesRefWritable cu = columns.get(i);
        int plainLen = cu.getLength();
        columnBufferSize += plainLen;
        columnValuePlainLength[i] += plainLen;
        columnBuffers[i].append(cu);
      }

      if (size < columnNumber) {
        for (int i = columns.size(); i < columnNumber; i++) {
          columnBuffers[i].append(BytesRefWritable.ZeroBytesRefWritable);
        }
      }

      bufferedRecords++;
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.