HadoopUtils.setWorkOutputDir(taskAttemptContext);
Configuration conf = taskAttemptContext.getConfiguration();
OutputConf outputConf = new OutputConf(conf, myProfileId);
OutputInfo oti = outputConf.readOutputTableInfo();
HiveUtils.setRCileNumColumns(conf, oti.getColumnInfo().size());
HadoopUtils.setOutputKeyWritableClass(conf, NullWritable.class);
Serializer serializer = oti.createSerializer(conf);
HadoopUtils.setOutputValueWritableClass(conf,
serializer.getSerializedClass());
org.apache.hadoop.mapred.OutputFormat baseOutputFormat =
ReflectionUtils.newInstance(oti.getOutputFormatClass(), conf);
// CHECKSTYLE: stop LineLength
org.apache.hadoop.mapred.RecordWriter<WritableComparable, Writable> baseWriter =
getBaseRecordWriter(taskAttemptContext, baseOutputFormat);
// CHECKSTYLE: resume LineLength
StructObjectInspector soi = Inspectors.createFor(oti.getColumnInfo());
HiveApiRecordWriter arw =
new HiveApiRecordWriter(baseWriter, serializer, soi);
return arw;