}
case NULL:
break;
case PROTOBUF:
datum = tuple.get(i);
ProtobufDatum protobufDatum = (ProtobufDatum) datum;
protobufJsonFormat.print(protobufDatum.get(), outputStream);
break;
default:
outputStream.write(lTuple.getTextBytes(i)); //better usage for insertion to table of lazy tuple
break;
}
if(colNum - 1 > i){
outputStream.write((byte) delimiter);
}
if (enabledStats) {
datum = tuple.get(i);
stats.analyzeField(i, datum);
}
}
} else {
for (int i = 0; i < schema.getColumnNum(); i++) {
datum = tuple.get(i);
if (enabledStats) {
stats.analyzeField(i, datum);
}
if (datum instanceof NullDatum) {
outputStream.write(nullChars);
} else {
col = schema.getColumn(i);
switch (col.getDataType().getType()) {
case BOOLEAN:
outputStream.write(tuple.getBoolean(i).asBool() ? trueBytes : falseBytes); //Compatibility with Apache Hive
break;
case BIT:
outputStream.write(tuple.getByte(i).asTextBytes());
break;
case BLOB:
outputStream.write(Base64.encodeBase64(tuple.getBytes(i).asByteArray(), false));
break;
case CHAR:
CharDatum charDatum = tuple.getChar(i);
byte[] pad = new byte[col.getDataType().getLength() - datum.size()];
outputStream.write(charDatum.asTextBytes());
outputStream.write(pad);
break;
case TEXT:
outputStream.write(tuple.getText(i).asTextBytes());
break;
case INT2:
outputStream.write(tuple.getShort(i).asTextBytes());
break;
case INT4:
outputStream.write(tuple.getInt(i).asTextBytes());
break;
case INT8:
outputStream.write(tuple.getLong(i).asTextBytes());
break;
case FLOAT4:
outputStream.write(tuple.getFloat(i).asTextBytes());
break;
case FLOAT8:
outputStream.write(tuple.getDouble(i).asTextBytes());
break;
case INET4:
outputStream.write(tuple.getIPv4(i).asTextBytes());
break;
case INET6:
outputStream.write(tuple.getIPv6(i).toString().getBytes());
break;
case PROTOBUF:
ProtobufDatum protobuf = (ProtobufDatum) datum;
ProtobufJsonFormat.getInstance().print(protobuf.get(), outputStream);
break;
default:
throw new UnsupportedOperationException("Cannot write such field: "
+ tuple.get(i).type());
}