*/
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack.
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1",
ReduceSinkOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"),
UnionProcFactory.getMapRedUnion());
opRules.put(new RuleRegExp("R2",
UnionOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"),
UnionProcFactory.getUnknownUnion());
opRules.put(new RuleRegExp("R3",
TableScanOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"),
UnionProcFactory.getMapUnion());
opRules.put(new RuleRegExp("R4",
MapJoinOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"),
UnionProcFactory.getMapJoinUnion());
// The dispatcher fires the processor for the matching rule and passes the
// context along
UnionProcContext uCtx = new UnionProcContext();
uCtx.setParseContext(pCtx);
Dispatcher disp = new DefaultRuleDispatcher(UnionProcFactory.getNoUnion(),
opRules, uCtx);
GraphWalker ogw = new PreOrderWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
pCtx.setUCtx(uCtx);
// Walk the tree again to see if the union can be removed completely
HiveConf conf = pCtx.getConf();
opRules.clear();
if (conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_UNION_REMOVE)) {
if (!conf.getBoolVar(HiveConf.ConfVars.HIVE_HADOOP_SUPPORTS_SUBDIRECTORIES)) {
throw new
SemanticException(ErrorMsg.HIVE_UNION_REMOVE_OPTIMIZATION_NEEDS_SUBDIRECTORIES.getMsg());
}
opRules.put(new RuleRegExp("R5", UnionOperator.getOperatorName() + "%" +
".*" + FileSinkOperator.getOperatorName() + "%"),
UnionProcFactory.getUnionNoProcessFile());
disp = new DefaultRuleDispatcher(UnionProcFactory.getNoUnion(), opRules, uCtx);
ogw = new PreOrderWalker(disp);