* @param opProcCtx context
*/
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException {
UnionOperator union = (UnionOperator)nd;
GenMRProcContext ctx = (GenMRProcContext)opProcCtx;
ParseContext parseCtx = ctx.getParseCtx();
UnionProcContext uCtx = parseCtx.getUCtx();
// Map-only subqueries can be optimized in future to not write to a file in future
Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
// The plan needs to be broken only if one of the sub-queries involve a map-reduce job
if (uCtx.isMapOnlySubq()) {
mapCurrCtx.put((Operator<? extends Serializable>)nd, new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrTopOp(), ctx.getCurrAliasId()));
return null;
}
ctx.setCurrUnionOp(union);
UnionParseContext uPrsCtx = uCtx.getUnionParseContext(union);
assert uPrsCtx != null;
Task<? extends Serializable> currTask = ctx.getCurrTask();
int pos = UnionProcFactory.getPositionParent(union, stack);
// is the current task a root task
if (uPrsCtx.getRootTask(pos) && (!ctx.getRootTasks().contains(currTask)))
ctx.getRootTasks().add(currTask);
GenMRUnionCtx uCtxTask = ctx.getUnionTask(union);
Task<? extends Serializable> uTask = null;
Operator<? extends Serializable> parent = union.getParentOperators().get(pos);
mapredWork uPlan = null;
// union is encountered for the first time
if (uCtxTask == null) {
uCtxTask = new GenMRUnionCtx();
uPlan = GenMapRedUtils.getMapRedWork();
uTask = TaskFactory.get(uPlan, parseCtx.getConf());
uCtxTask.setUTask(uTask);
ctx.setUnionTask(union, uCtxTask);
}
else {
uTask = uCtxTask.getUTask();
uPlan = (mapredWork)uTask.getWork();
}
tableDesc tt_desc = PlanUtils.getIntermediateFileTableDesc(
PlanUtils.getFieldSchemasFromRowSchema(parent.getSchema(), "temporarycol"));
// generate the temporary file
Context baseCtx = parseCtx.getContext();
String taskTmpDir = baseCtx.getMRTmpFileURI();
// Add the path to alias mapping
uCtxTask.addTaskTmpDir(taskTmpDir);
uCtxTask.addTTDesc(tt_desc);
// The union task is empty. The files created for all the inputs are assembled in the
// union context and later used to initialize the union plan
// Create a file sink operator for this file name
Operator<? extends Serializable> fs_op =
OperatorFactory.get
(new fileSinkDesc(taskTmpDir, tt_desc,
parseCtx.getConf().getBoolVar(HiveConf.ConfVars.COMPRESSINTERMEDIATE)),
parent.getSchema());
assert parent.getChildOperators().size() == 1;
parent.getChildOperators().set(0, fs_op);