String key = tab.getTableName() + ";";
if (prunerExpr != null) {
key = key + prunerExpr.getExprString();
}
PrunedPartitionList ret = prunedPartitionsMap.get(key);
if (ret != null) {
return ret;
}
LinkedHashSet<Partition> true_parts = new LinkedHashSet<Partition>();
LinkedHashSet<Partition> unkn_parts = new LinkedHashSet<Partition>();
LinkedHashSet<Partition> denied_parts = new LinkedHashSet<Partition>();
try {
StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
.getDeserializer().getObjectInspector();
Object[] rowWithPart = new Object[2];
if (tab.isPartitioned()) {
LOG.debug("tabname = " + tab.getTableName() + " is partitioned");
for (String partName : Hive.get().getPartitionNames(tab.getDbName(),
tab.getTableName(), (short) -1)) {
// If the "strict" mode is on, we have to provide partition pruner for
// each table.
if ("strict".equalsIgnoreCase(HiveConf.getVar(conf,
HiveConf.ConfVars.HIVEMAPREDMODE))) {
if (!hasColumnExpr(prunerExpr)) {
throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE
.getMsg("for Alias \"" + alias + "\" Table \""
+ tab.getTableName() + "\""));
}
}
// Set all the variables here
LinkedHashMap<String, String> partSpec = Warehouse
.makeSpecFromName(partName);
LOG.trace("about to process partition " + partSpec + " for pruning ");
// evaluate the expression tree
if (prunerExpr != null) {
Boolean r = (Boolean) PartExprEvalUtils.evalExprWithPart(prunerExpr, partSpec,
rowObjectInspector);
LOG.trace("prune result for partition " + partSpec + ": " + r);
if (Boolean.FALSE.equals(r)) {
if (denied_parts.isEmpty()) {
Partition part = Hive.get().getPartition(tab, partSpec,
Boolean.FALSE);
denied_parts.add(part);
}
LOG.trace("pruned partition: " + partSpec);
} else {
Partition part = Hive.get().getPartition(tab, partSpec,
Boolean.FALSE);
String state = "retained";
if (Boolean.TRUE.equals(r)) {
true_parts.add(part);
} else {
unkn_parts.add(part);
state = "unknown";
}
if (LOG.isDebugEnabled()) {
LOG.debug(state + " partition: " + partSpec);
}
}
} else {
// is there is no parition pruning, all of them are needed
true_parts.add(Hive.get()
.getPartition(tab, partSpec, Boolean.FALSE));
}
}
} else {
true_parts.addAll(Hive.get().getPartitions(tab));
}
} catch (HiveException e) {
throw e;
} catch (Exception e) {
throw new HiveException(e);
}
// Now return the set of partitions
ret = new PrunedPartitionList(true_parts, unkn_parts, denied_parts);
prunedPartitionsMap.put(key, ret);
return ret;
}