String partName = String.valueOf(partSpec);
// HiveConf.setVar(hconf, HiveConf.ConfVars.HIVETABLENAME, tableName);
// HiveConf.setVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME, partName);
Deserializer deserializer = (Deserializer) sdclass.newInstance();
deserializer.initialize(hconf, tblProps);
StructObjectInspector rawRowObjectInspector = (StructObjectInspector) deserializer
.getObjectInspector();
MapOpCtx opCtx = null;
// Next check if this table has partitions and if so
// get the list of partition names as well as allocate
// the serdes for the partition columns
String pcols = tblProps
.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
// Log LOG = LogFactory.getLog(MapOperator.class.getName());
if (pcols != null && pcols.length() > 0) {
String[] partKeys = pcols.trim().split("/");
List<String> partNames = new ArrayList<String>(partKeys.length);
Object[] partValues = new Object[partKeys.length];
List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>(
partKeys.length);
for (int i = 0; i < partKeys.length; i++) {
String key = partKeys[i];
partNames.add(key);
// Partitions do not exist for this table
if (partSpec == null) {
partValues[i] = new Text();
} else {
partValues[i] = new Text(partSpec.get(key));
}
partObjectInspectors
.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory
.getStandardStructObjectInspector(partNames, partObjectInspectors);
Object[] rowWithPart = new Object[2];
rowWithPart[1] = partValues;
StructObjectInspector rowObjectInspector = ObjectInspectorFactory
.getUnionStructObjectInspector(Arrays
.asList(new StructObjectInspector[] {rawRowObjectInspector, partObjectInspector}));
// LOG.info("dump " + tableName + " " + partName + " " +
// rowObjectInspector.getTypeName());
opCtx = new MapOpCtx(true, rowObjectInspector, rawRowObjectInspector ,partObjectInspector,rowWithPart, deserializer);