Object... nodeOutputs) throws SemanticException {
Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd;
LOG.info("MapWorkVectorizationNodeProcessor processing Operator: " + op.getName() + "...");
VectorizationContext vContext = null;
if (op instanceof TableScanOperator) {
vContext = getVectorizationContext(op, physicalContext);
for (String onefile : mWork.getPathToAliases().keySet()) {
List<String> aliases = mWork.getPathToAliases().get(onefile);
for (String alias : aliases) {
Operator<? extends OperatorDesc> opRoot = mWork.getAliasToWork().get(alias);
if (op == opRoot) {
// The same vectorization context is copied multiple times into
// the MapWork scratch columnMap
// Each partition gets a copy
//
vContext.setFileKey(onefile);
scratchColumnContext.put(onefile, vContext);
if (LOG.isDebugEnabled()) {
LOG.debug("Vectorized MapWork operator " + op.getName() + " vectorization context " + vContext.toString());
}
break;
}
}
}
vContextsByOp.put(op, vContext);
} else {
vContext = walkStackToFindVectorizationContext(stack, op);
if (vContext == null) {
throw new SemanticException(
String.format("Did not find vectorization context for operator %s in operator stack",
op.getName()));
}
}
assert vContext != null;
// When Vectorized GROUPBY outputs rows instead of vectorized row batchs, we don't
// vectorize the operators below it.
if (nonVectorizableChildOfGroupBy(op)) {
// No need to vectorize
if (!opsDone.contains(op)) {
opsDone.add(op);
}
return null;
}
Operator<? extends OperatorDesc> vectorOp = doVectorize(op, vContext);
if (LOG.isDebugEnabled()) {
LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " vectorization context " + vContext.toString());
if (vectorOp instanceof VectorizationContextRegion) {
VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp;
VectorizationContext vOutContext = vcRegion.getOuputVectorizationContext();
LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " added vectorization context " + vContext.toString());
}
}
return null;