/**
* Moves data between a Tuple and an Avro Record
*/
public Record toRecord(ITuple tuple, Record reuse) throws IOException {
Record record = reuse;
if (record == null){
record = new Record(avroSchema);
}
for(int i = 0; i < pangoolSchema.getFields().size(); i++) {
Object obj = tuple.get(i);
Field field = pangoolSchema.getField(i);
switch(field.getType()){
case INT:
case LONG:
case FLOAT:
case BOOLEAN:
case DOUBLE:
case BYTES:
record.put(i, obj); //optimistic
break;
case OBJECT:
Serializer customSer = customSerializers[i];
DataOutputBuffer buffer = buffers[i];
buffer.reset();
if (customSer != null){
customSer.open(buffer);
customSer.serialize(obj);
customSer.close(); //TODO is this safe ?
} else {
hadoopSer.ser(obj, buffer);
}
//TODO this byteBuffer instances should be cached and reused
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer.getData(), 0,buffer.getLength());
record.put(i, byteBuffer);
break;
case ENUM:
record.put(i,obj.toString());
break;
case STRING:
record.put(i,new Utf8(obj.toString())); //could be directly String ?
break;
default:
throw
new IOException("Not correspondence to Avro type from Pangool type " + field.getType());
}