HBaseSerDe hbaseSerde = (HBaseSerDe) deserializer;
int keyColPos = hbaseSerde.getKeyColumnOffset();
String keyColType = jobConf.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES).
split(",")[keyColPos];
IndexPredicateAnalyzer analyzer =
HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(columnNames.get(keyColPos), keyColType,
hbaseSerde.getStorageFormatOfCol(keyColPos).get(0));
List<IndexSearchCondition> searchConditions =
new ArrayList<IndexSearchCondition>();
ExprNodeDesc residualPredicate =
analyzer.analyzePredicate(predicate, searchConditions);
int scSize = searchConditions.size();
if (scSize < 1 || 2 < scSize) {
// Either there was nothing which could be pushed down (size = 0),
// there were complex predicates which we don't support yet.
// Currently supported are one of the form:
// 1. key < 20 (size = 1)
// 2. key = 20 (size = 1)
// 3. key < 20 and key > 10 (size = 2)
return null;
}
if (scSize == 2 &&
(searchConditions.get(0).getComparisonOp()
.equals("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual") ||
searchConditions.get(1).getComparisonOp()
.equals("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual"))) {
// If one of the predicates is =, then any other predicate with it is illegal.
return null;
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(
searchConditions);
decomposedPredicate.residualPredicate = residualPredicate;
return decomposedPredicate;
}