*
*/
private static class ReplaceTableScanOpProc implements NodeProcessor {
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
TableScanOperator scanOperator = (TableScanOperator)nd;
rewriteQueryCtx = (RewriteQueryUsingAggregateIndexCtx)ctx;
String baseTableName = rewriteQueryCtx.getBaseTableName();
String alias = null;
if(baseTableName.contains(":")){
alias = (baseTableName.split(":"))[0];
}
//Need to remove the original TableScanOperators from these data structures
// and add new ones
Map<TableScanOperator, Table> topToTable =
rewriteQueryCtx.getParseContext().getTopToTable();
Map<String, Operator<? extends Serializable>> topOps =
rewriteQueryCtx.getParseContext().getTopOps();
Map<Operator<? extends Serializable>, OpParseContext> opParseContext =
rewriteQueryCtx.getParseContext().getOpParseCtx();
//need this to set rowResolver for new scanOperator
OpParseContext operatorContext = opParseContext.get(scanOperator);
//remove original TableScanOperator
topToTable.remove(scanOperator);
topOps.remove(baseTableName);
opParseContext.remove(scanOperator);
//construct a new descriptor for the index table scan
TableScanDesc indexTableScanDesc = new TableScanDesc();
indexTableScanDesc.setGatherStats(false);
String indexTableName = rewriteQueryCtx.getIndexName();
Table indexTableHandle = null;
try {
indexTableHandle = rewriteQueryCtx.getHiveDb().getTable(indexTableName);
} catch (HiveException e) {
LOG.error("Error while getting the table handle for index table.");
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
String k = indexTableName + Path.SEPARATOR;
indexTableScanDesc.setStatsAggPrefix(k);
scanOperator.setConf(indexTableScanDesc);
//Construct the new RowResolver for the new TableScanOperator
RowResolver rr = new RowResolver();
try {
StructObjectInspector rowObjectInspector =
(StructObjectInspector) indexTableHandle.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
rr.put(indexTableName, fields.get(i).getFieldName(), new ColumnInfo(fields
.get(i).getFieldName(), TypeInfoUtils
.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), indexTableName, false));
}
} catch (SerDeException e) {
LOG.error("Error while creating the RowResolver for new TableScanOperator.");
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
//Set row resolver for new table
operatorContext.setRowResolver(rr);
String tabNameWithAlias = null;
if(alias != null){
tabNameWithAlias = alias + ":" + indexTableName;
}else{
tabNameWithAlias = indexTableName;
}
//Scan operator now points to other table
topToTable.put(scanOperator, indexTableHandle);
scanOperator.getConf().setAlias(tabNameWithAlias);
scanOperator.setAlias(indexTableName);
topOps.put(tabNameWithAlias, scanOperator);
opParseContext.put(scanOperator, operatorContext);
rewriteQueryCtx.getParseContext().setTopToTable(
(HashMap<TableScanOperator, Table>) topToTable);
rewriteQueryCtx.getParseContext().setTopOps(