ArrayList<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
Path tblDir = null;
TableDesc tblDesc = null;
PrunedPartitionList partsList = pList;
plan.setNameToSplitSample(parseCtx.getNameToSplitSample());
if (partsList == null) {
try {
partsList = parseCtx.getOpToPartList().get((TableScanOperator)topOp);
if (partsList == null) {
partsList = PartitionPruner.prune(parseCtx.getTopToTable().get(topOp),
parseCtx.getOpToPartPruner().get(topOp), opProcCtx.getConf(),
alias_id, parseCtx.getPrunedPartitions());
parseCtx.getOpToPartList().put((TableScanOperator)topOp, partsList);
}
} catch (SemanticException e) {
throw e;
} catch (HiveException e) {
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
}
// Generate the map work for this alias_id
Set<Partition> parts = null;
// pass both confirmed and unknown partitions through the map-reduce
// framework
parts = partsList.getConfirmedPartns();
parts.addAll(partsList.getUnknownPartns());
PartitionDesc aliasPartnDesc = null;
try {
if (!parts.isEmpty()) {
aliasPartnDesc = Utilities.getPartitionDesc(parts.iterator().next());
}