Package org.apache.hadoop.hive.ql.index

Examples of org.apache.hadoop.hive.ql.index.IndexSearchCondition


      throw new RuntimeException(
        "Exactly one search condition expected in push down");
    }

    // Convert the search condition into a restriction on the HBase scan
    IndexSearchCondition sc = searchConditions.get(0);
    ExprNodeConstantEvaluator eval =
      new ExprNodeConstantEvaluator(sc.getConstantDesc());
    byte [] startRow;
    try {
      ObjectInspector objInspector = eval.initialize(null);
      Object writable = eval.evaluate(null);
      ByteStream.Output serializeStream = new ByteStream.Output();
View Full Code Here


      throw new RuntimeException(
        "Exactly one search condition expected in push down");
    }

    // Convert the search condition into a restriction on the HBase scan
    IndexSearchCondition sc = searchConditions.get(0);
    ExprNodeConstantEvaluator eval =
      new ExprNodeConstantEvaluator(sc.getConstantDesc());
    byte [] startRow;
    try {
      ObjectInspector objInspector = eval.initialize(null);
      Object writable = eval.evaluate(null);
      ByteStream.Output serializeStream = new ByteStream.Output();
View Full Code Here

    String filterExpr = Utilities.serializeExpression(node);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);

    List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
    assertEquals(sConditions.size(), 1);
    IndexSearchCondition sc = sConditions.get(0);
    PushdownTuple tuple = new PushdownTuple(sConditions.get(0), handler.getPrimitiveComparison(sc
        .getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc));
    byte[] expectedVal = new byte[4];
    ByteBuffer.wrap(expectedVal).putInt(5);
    assertArrayEquals(tuple.getConstVal(), expectedVal);
    assertEquals(tuple.getcOpt().getClass(), Equal.class);
    assertEquals(tuple.getpCompare().getClass(), IntCompare.class);
View Full Code Here

    assertNotNull(node);
    String filterExpr = Utilities.serializeExpression(node);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
    assertEquals(sConditions.size(), 1);
    IndexSearchCondition sc = sConditions.get(0);

    handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc);
  }
View Full Code Here

      assertNotNull(node);
      String filterExpr = Utilities.serializeExpression(node);
      conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
      List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
      assertEquals(sConditions.size(), 1);
      IndexSearchCondition sc = sConditions.get(0);
      new PushdownTuple(sc, handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc),
          handler.getCompareOp(sc.getComparisonOp(), sc));
      fail("Should fail: compare op not registered for index analyzer. Should leave undesirable residual predicate");
    } catch (RuntimeException e) {
      assertTrue(e.getMessage().contains("Unexpected residual predicate: field1 is not null"));
    } catch (Exception e) {
      fail(StringUtils.stringifyException(e));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.index.IndexSearchCondition

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.