// 2. Constructing a conditional task consisting of a move task and a map reduce task
//
MapRedTask currTask = (MapRedTask) ctx.getCurrTask();
MoveWork dummyMv = new MoveWork(null, null, null,
new LoadFileDesc(fsInputDesc.getDirName(), finalName, true, null, null), false);
MapredWork cplan;
if(parseCtx.getConf().getBoolVar(HiveConf.ConfVars.
HIVEMERGERCFILEBLOCKLEVEL) &&
fsInputDesc.getTableInfo().getInputFileFormatClass().
equals(RCFileInputFormat.class)) {
// Check if InputFormatClass is valid
String inputFormatClass = parseCtx.getConf().
getVar(HiveConf.ConfVars.HIVEMERGEINPUTFORMATBLOCKLEVEL);
try {
Class c = (Class <? extends InputFormat>) Class.forName(inputFormatClass);
LOG.info("RCFile format- Using block level merge");
cplan = createRCFileMergeTask(fsInputDesc, finalName,
dpCtx != null && dpCtx.getNumDPCols() > 0);
} catch (ClassNotFoundException e) {
String msg = "Illegal input format class: " + inputFormatClass;
throw new SemanticException(msg);
}
} else {
cplan = createMergeTask(ctx.getConf(), tsMerge, fsInputDesc);
// use CombineHiveInputFormat for map-only merging
}
cplan.setInputformat("org.apache.hadoop.hive.ql.io.CombineHiveInputFormat");
// NOTE: we should gather stats in MR1 rather than MR2 at merge job since we don't
// know if merge MR2 will be triggered at execution time
ConditionalTask cndTsk = createCondTask(ctx.getConf(), ctx.getCurrTask(), dummyMv, cplan,
fsInputDesc.getDirName());