Map<Integer, List<ExprNodeGenericFuncDesc>> result =
new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
Tree partSpecTree = ast.getChild(childIndex);
if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue;
ExprNodeGenericFuncDesc expr = null;
HashSet<String> names = new HashSet<String>(partSpecTree.getChildCount());
for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
String operator = partSpecSingleKey.getChild(1).getText();
String val = stripQuotes(partSpecSingleKey.getChild(2).getText());
String type = colTypes.get(key);
if (type == null) {
throw new SemanticException("Column " + key + " not found");
}
// Create the corresponding hive expression to filter on partition columns.
ExprNodeColumnDesc column = new ExprNodeColumnDesc(
TypeInfoFactory.getPrimitiveTypeInfo(type), key, null, true);
ExprNodeGenericFuncDesc op = makeBinaryPredicate(
operator, column, new ExprNodeConstantDesc(val));
// If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
names.add(key);
}