createRecordReader(InputSplit inputSplit, TaskAttemptContext context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
JobConf jobConf = new JobConf(conf);
HiveApiInputSplit apiInputSplit;
if (inputSplit instanceof HiveApiInputSplit) {
apiInputSplit = (HiveApiInputSplit) inputSplit;
} else {
throw new IllegalArgumentException("InputSplit not a HiveApiInputSplit");
}
apiInputSplit.setConf(jobConf);
// CHECKSTYLE: stop LineLength
org.apache.hadoop.mapred.RecordReader<WritableComparable, Writable> baseRecordReader =
apiInputSplit.getBaseRecordReader(jobConf, context);
// CHECKSTYLE: resume LineLength
HiveUtils.setReadColumnIds(conf, apiInputSplit.getColumnIds());
boolean reuseRecord = conf.getBoolean(REUSE_RECORD_KEY, true);
HiveApiRecordReader reader = new HiveApiRecordReader(
baseRecordReader,
apiInputSplit.getDeserializer(),
apiInputSplit.getPartitionValues(),
apiInputSplit.getTableSchema().numColumns(),
reuseRecord);
reader.setObserver(observer);
return reader;
}