{
ArrayParserData data = new ArrayParserData(deserializer, columnIndexes, schema,
partitionValues);
int numColumns = schema.numColumns();
HiveTableDesc tableDesc = schema.getTableDesc();
for (int i = 0; i < numColumns; ++i) {
data.structFields[i] = i < data.inspector.getAllStructFieldRefs().size() ?
data.inspector.getAllStructFieldRefs().get(i) : NULL_STRUCT_FIELD;
ObjectInspector fieldInspector = data.structFields[i].getFieldObjectInspector();
data.hiveTypes[i] = HiveType.fromHiveObjectInspector(fieldInspector);
if (data.hiveTypes[i].isPrimitive()) {
data.primitiveInspectors[i] = (PrimitiveObjectInspector) fieldInspector;
}
}
boolean hasCollections = false;
for (int i = 0; i < columnIndexes.length; ++i) {
int columnId = columnIndexes[i];
if (data.hiveTypes[columnId].isCollection()) {
hasCollections = true;
break;
}
}
RecordParser<Writable> parser = null;
if (!hasCollections && exampleValue instanceof BytesRefArrayWritable) {
parser = new BytesParser(partitionValues, data);
} else {
parser = new ArrayParser(partitionValues, data);
}
Class<? extends RecordParser> forcedParserClass = FORCE_PARSER.get(conf);
if (forcedParserClass == null) {
LOG.info("Using {} to parse hive records from table {}",
parser.getClass().getSimpleName(), tableDesc.dotString());
} else {
LOG.info("Using {} chosen by user instead of {} to parse hive records from table {}",
forcedParserClass.getSimpleName(), parser.getClass().getSimpleName(),
tableDesc.dotString());
parser = createForcedParser(deserializer, schema, partitionValues,
data, forcedParserClass);
}
return parser;