Package org.apache.avro.generic.GenericData

Examples of org.apache.avro.generic.GenericData.Record


  public static class CamusAvroWrapper extends CamusWrapper<Record> {

      public CamusAvroWrapper(Record record) {
            super(record);
            Record header = (Record) super.getRecord().get("header");
             if (header != null) {
               if (header.get("server") != null) {
                   put(new Text("server"), new Text(header.get("server").toString()));
               }
               if (header.get("service") != null) {
                   put(new Text("service"), new Text(header.get("service").toString()));
               }
            }
        }
View Full Code Here


            }
        }
     
      @Override
      public long getTimestamp() {
          Record header = (Record) super.getRecord().get("header");

          if (header != null && header.get("time") != null) {
              return (Long) header.get("time");
          } else if (super.getRecord().get("timestamp") != null) {
              return (Long) super.getRecord().get("timestamp");
          } else {
              return System.currentTimeMillis();
          }
View Full Code Here

  }

  @Test(expected=AvroRuntimeException.class)
  public void testRecordCreateEmptySchema() throws Exception {
    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
    Record r = new GenericData.Record(s);
  }
View Full Code Here

  public void testRecordPutInvalidField() throws Exception {
    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
    List<Schema.Field> fields = new ArrayList<Schema.Field>();
    fields.add(new Schema.Field("someFieldName", s, "docs", null));
    s.setFields(fields);
    Record r = new GenericData.Record(s);
    r.put("invalidFieldName", "someValue");
  }
View Full Code Here

    ByteArrayOutputStream b1 = new ByteArrayOutputStream(5);
    ByteArrayOutputStream b2 = new ByteArrayOutputStream(5);
    BinaryEncoder b1Enc = EncoderFactory.get().binaryEncoder(b1, null);
    BinaryEncoder b2Enc = EncoderFactory.get().binaryEncoder(b2, null);
    // Prepare two different datums
    Record testDatum1 = new Record(record);
    testDatum1.put(0, 1);
    Record testDatum2 = new Record(record);
    testDatum2.put(0, 2);
    GenericDatumWriter<Record> gWriter = new GenericDatumWriter<Record>(record);
    Integer start1 = 0, start2 = 0;
    try {
      // Write two datums in each stream
      // and get the offset length after the first write in each.
View Full Code Here

    return this;
  }
 
  @Override
  public Record build() {
    Record record;
    try {
      record = new GenericData.Record(schema);
    } catch (Exception e) {
      throw new AvroRuntimeException(e);
    }
   
    for (Field field : fields) {
      Object value;
      try {
        value = getWithDefault(field);
      } catch(IOException e) {
        throw new AvroRuntimeException(e);
      }
      if (value != null) {
        record.put(field.pos(), value);
      }
    }
   
    return record;
  }
View Full Code Here

 
  /**
   * Moves data between a Tuple and an Avro Record
   */
  public Record toRecord(ITuple tuple, Record reuse) throws IOException {
    Record record = reuse;
    if (record == null){
      record = new Record(avroSchema);
    }
    for(int i = 0; i < pangoolSchema.getFields().size(); i++) {
      Object obj = tuple.get(i);
      Field field = pangoolSchema.getField(i);
      switch(field.getType()){
      case INT:
      case LONG:
      case FLOAT:
      case BOOLEAN:
      case DOUBLE:
      case BYTES:
        record.put(i, obj); //optimistic
        break;
      case OBJECT:
        Serializer customSer = customSerializers[i];
        DataOutputBuffer buffer = buffers[i];
        buffer.reset();
        if (customSer != null){
          customSer.open(buffer);
          customSer.serialize(obj);
          customSer.close(); //TODO is this safe ?
        } else {
          hadoopSer.ser(obj, buffer);
        }
        //TODO this byteBuffer instances should be cached and reused
        ByteBuffer byteBuffer = ByteBuffer.wrap(buffer.getData(), 0,buffer.getLength());
        record.put(i, byteBuffer);
        break;
      case ENUM:
        record.put(i,obj.toString());
        break;
      case STRING:
        record.put(i,new Utf8(obj.toString())); //could be directly String ?
        break;
      default:
          throw
          new IOException("Not correspondence to Avro type from Pangool type " + field.getType());
      }
View Full Code Here

    return this;
  }
 
  @Override
  public Record build() {
    Record record;
    try {
      record = new GenericData.Record(schema());
    } catch (Exception e) {
      throw new AvroRuntimeException(e);
    }
   
    for (Field field : fields()) {
      Object value;
      try {
        value = getWithDefault(field);
      } catch(IOException e) {
        throw new AvroRuntimeException(e);
      }
      if (value != null) {
        record.put(field.pos(), value);
      }
    }
   
    return record;
  }
View Full Code Here

  }

  @Test(expected=AvroRuntimeException.class)
  public void testRecordCreateEmptySchema() throws Exception {
    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
    Record r = new GenericData.Record(s);
  }
View Full Code Here

  public void testRecordPutInvalidField() throws Exception {
    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
    List<Schema.Field> fields = new ArrayList<Schema.Field>();
    fields.add(new Schema.Field("someFieldName", s, "docs", null));
    s.setFields(fields);
    Record r = new GenericData.Record(s);
    r.put("invalidFieldName", "someValue");
  }
View Full Code Here

TOP

Related Classes of org.apache.avro.generic.GenericData.Record

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.