Examples of VectorExpression


Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    // Sin(double)
    GenericUDFBridge gudfBridge = new GenericUDFBridge("sin", false, UDFSin.class.getName());
    mathFuncExpr.setGenericUDF(gudfBridge);
    mathFuncExpr.setChildren(children2);
    VectorExpression ve = vc.getVectorExpression(mathFuncExpr, VectorExpressionDescriptor.Mode.PROJECTION);
    Assert.assertEquals(FuncSinDoubleToDouble.class, ve.getClass());

    // Round without digits
    GenericUDFRound udfRound = new GenericUDFRound();
    mathFuncExpr.setGenericUDF(udfRound);
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncRoundDoubleToDouble.class, ve.getClass());

    // Round with digits
    mathFuncExpr.setGenericUDF(udfRound);
    children2.add(new ExprNodeConstantDesc(4));
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(RoundWithNumDigitsDoubleToDouble.class, ve.getClass());
    Assert.assertEquals(4, ((RoundWithNumDigitsDoubleToDouble) ve).getDecimalPlaces().get());

    // Log with int base
    gudfBridge = new GenericUDFBridge("log", false, UDFLog.class.getName());
    mathFuncExpr.setGenericUDF(gudfBridge);
    children2.clear();
    children2.add(new ExprNodeConstantDesc(4.0));
    children2.add(colDesc2);
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass());
    Assert.assertTrue(4 == ((FuncLogWithBaseDoubleToDouble) ve).getBase());

    // Log with default base
    children2.clear();
    children2.add(colDesc2);
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncLnDoubleToDouble.class, ve.getClass());

    //Log with double base
    children2.clear();
    children2.add(new ExprNodeConstantDesc(4.5));
    children2.add(colDesc2);
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass());
    Assert.assertTrue(4.5 == ((FuncLogWithBaseDoubleToDouble) ve).getBase());

    //Log with int input and double base
    children2.clear();
    children2.add(new ExprNodeConstantDesc(4.5));
    children2.add(colDesc1);
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncLogWithBaseLongToDouble.class, ve.getClass());
    Assert.assertTrue(4.5 == ((FuncLogWithBaseLongToDouble) ve).getBase());

    //Power with double power
    children2.clear();
    children2.add(colDesc2);
    children2.add(new ExprNodeConstantDesc(4.5));
    mathFuncExpr.setGenericUDF(new GenericUDFPower());
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncPowerDoubleToDouble.class, ve.getClass());
    Assert.assertTrue(4.5 == ((FuncPowerDoubleToDouble) ve).getPower());

    //Round with default decimal places
    mathFuncExpr.setGenericUDF(udfRound);
    children2.clear();
    children2.add(colDesc2);
    mathFuncExpr.setChildren(children2);
    ve = vc.getVectorExpression(mathFuncExpr);
    Assert.assertEquals(FuncRoundDoubleToDouble.class, ve.getClass());
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    //UDFYear
    GenericUDFBridge gudfBridge = new GenericUDFBridge("year", false, UDFYear.class.getName());
    tsFuncExpr.setGenericUDF(gudfBridge);
    tsFuncExpr.setChildren(children);
    VectorExpression ve = vc.getVectorExpression(tsFuncExpr);
    Assert.assertEquals(VectorUDFYearLong.class, ve.getClass());

    //GenericUDFToUnixTimeStamp
    GenericUDFToUnixTimeStamp gudf = new GenericUDFToUnixTimeStamp();
    tsFuncExpr.setGenericUDF(gudf);
    tsFuncExpr.setTypeInfo(TypeInfoFactory.longTypeInfo);
    ve = vc.getVectorExpression(tsFuncExpr);
    Assert.assertEquals(VectorUDFUnixTimeStampLong.class, ve.getClass());
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    Map<String, Integer> columnMap = new HashMap<String, Integer>();
    columnMap.put("col1", 1);
    columnMap.put("col2", 2);
    VectorizationContext vc = new VectorizationContext(columnMap, 2);
    VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterStringColumnBetween);

    // string NOT BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterStringColumnNotBetween);

    // long BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(false)));
    children1.set(1, new ExprNodeColumnDesc(Long.class, "col1", "table", false));
    children1.set(2, new ExprNodeConstantDesc(10));
    children1.set(3, new ExprNodeConstantDesc(20));
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterLongColumnBetween);

    // long NOT BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(true)));
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterLongColumnNotBetween);

    // double BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(false)));
    children1.set(1, new ExprNodeColumnDesc(Double.class, "col1", "table", false));
    children1.set(2, new ExprNodeConstantDesc(10.0d));
    children1.set(3, new ExprNodeConstantDesc(20.0d));
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterDoubleColumnBetween);

    // double NOT BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(true)));
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterDoubleColumnNotBetween);

    // timestamp BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(false)));
    children1.set(1, new ExprNodeColumnDesc(Timestamp.class, "col1", "table", false));
    children1.set(2, new ExprNodeConstantDesc("2013-11-05 00:00:00.000"));
    children1.set(3, new ExprNodeConstantDesc("2013-11-06 00:00:00.000"));
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertEquals(FilterStringColumnBetween.class, ve.getClass());

    // timestamp NOT BETWEEN
    children1.set(0, new ExprNodeConstantDesc(new Boolean(true)));
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertEquals(FilterStringColumnNotBetween.class, ve.getClass());
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    Map<String, Integer> columnMap = new HashMap<String, Integer>();
    columnMap.put("col1", 1);
    columnMap.put("col2", 2);
    VectorizationContext vc = new VectorizationContext(columnMap, 2);
    VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
    assertTrue(ve instanceof FilterStringColumnInList);
    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
    assertTrue(ve instanceof StringColumnInList);

    // long IN
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    Map<String, Integer> columnMap = new HashMap<String, Integer>();
    columnMap.put("col1", 1);
    columnMap.put("col2", 2);
    columnMap.put("col3", 3);
    VectorizationContext vc = new VectorizationContext(columnMap, 3);
    VectorExpression ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongColumn);

    // long column/scalar IF
    children1.set(2new ExprNodeConstantDesc(1L));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongScalar);

    // long scalar/scalar IF
    children1.set(1, new ExprNodeConstantDesc(1L));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongScalarLongScalar);

    // long scalar/column IF
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongScalarLongColumn);

    // test for double type
    col2Expr = new  ExprNodeColumnDesc(Double.class, "col2", "table", false);
    col3Expr = new  ExprNodeColumnDesc(Double.class, "col3", "table", false);

    // double column/column IF
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleColumnDoubleColumn);

    // double column/scalar IF
    children1.set(2new ExprNodeConstantDesc(1D));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleColumnDoubleScalar);

    // double scalar/scalar IF
    children1.set(1, new ExprNodeConstantDesc(1D));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleScalarDoubleScalar);

    // double scalar/column IF
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleScalarDoubleColumn);

    // double scalar/long column IF
    children1.set(2, new  ExprNodeColumnDesc(Long.class, "col3", "table", false));
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprDoubleScalarLongColumn);

    // Additional combinations of (long,double)X(column,scalar) for each of the second
    // and third arguments are omitted. We have coverage of all the source templates
    // already.

    // test for timestamp type
    col2Expr = new  ExprNodeColumnDesc(Timestamp.class, "col2", "table", false);
    col3Expr = new  ExprNodeColumnDesc(Timestamp.class, "col3", "table", false);

    // timestamp column/column IF
    children1.set(1, col2Expr);
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(ve instanceof IfExprLongColumnLongColumn);

    // timestamp column/scalar IF where scalar is really a CAST of a constant to timestamp.
    ExprNodeGenericFuncDesc f = new ExprNodeGenericFuncDesc();
    f.setGenericUDF(new GenericUDFTimestamp());
    f.setTypeInfo(TypeInfoFactory.timestampTypeInfo);
    List<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
    f.setChildren(children2);
    children2.add(new ExprNodeConstantDesc("2013-11-05 00:00:00.000"));
    children1.set(2, f);
    ve = vc.getVectorExpression(exprDesc);

    // We check for two different classes below because initially the result
    // is IfExprLongColumnLongColumn but in the future if the system is enhanced
    // with constant folding then the result will be IfExprLongColumnLongScalar.
    assertTrue(IfExprLongColumnLongColumn.class == ve.getClass()
               || IfExprLongColumnLongScalar.class == ve.getClass());

    // timestamp scalar/scalar
    children1.set(1, f);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(IfExprLongColumnLongColumn.class == ve.getClass()
        || IfExprLongScalarLongScalar.class == ve.getClass());

    // timestamp scalar/column
    children1.set(2, col3Expr);
    ve = vc.getVectorExpression(exprDesc);
    assertTrue(IfExprLongColumnLongColumn.class == ve.getClass()
        || IfExprLongScalarLongColumn.class == ve.getClass());

    // test for boolean type
    col2Expr = new  ExprNodeColumnDesc(Boolean.class, "col2", "table", false);
    col3Expr = new  ExprNodeColumnDesc(Boolean.class, "col3", "table", false);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    // which are going to return values from the input batch vector expressions
    List<ExprNodeEvaluator> vectorNodeEvaluators = new ArrayList<ExprNodeEvaluator>(bigTableExpressions.size());

    for(int i=0; i<bigTableExpressions.size(); ++i) {
      ExprNodeDesc desc = bigTableExpressions.get(i);
      VectorExpression vectorExpr = bigTableValueExpressions[i];

      // This is a vectorized aware evaluator
      ExprNodeEvaluator eval = new ExprNodeEvaluator<ExprNodeDesc>(desc) {
        int columnIndex;;
        int writerIndex;

        public ExprNodeEvaluator initVectorExpr(int columnIndex, int writerIndex) {
          this.columnIndex = columnIndex;
          this.writerIndex = writerIndex;
          return this;
        }

        @Override
        public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException {
          throw new HiveException("should never reach here");
        }

        @Override
        protected Object _evaluate(Object row, int version) throws HiveException {
          VectorizedRowBatch inBatch = (VectorizedRowBatch) row;
          int rowIndex = inBatch.selectedInUse ? inBatch.selected[batchIndex] : batchIndex;
          return valueWriters[writerIndex].writeValue(inBatch.cols[columnIndex], rowIndex);
        }
      }.initVectorExpr(vectorExpr.getOutputColumn(), i);
      vectorNodeEvaluators.add(eval);
    }
    // Now replace the old evaluators with our own
    joinValues[posBigTable] = vectorNodeEvaluators;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

          }
        });   

    for(int i=0; i<bigTableExpressions.size(); ++i) {
      ExprNodeDesc desc = bigTableExpressions.get(i);
      VectorExpression vectorExpr = bigTableValueExpressions[i];

      // This is a vectorized aware evaluator
      ExprNodeEvaluator eval = new ExprNodeEvaluator<ExprNodeDesc>(desc) {
        int columnIndex;;
        int writerIndex;

        public ExprNodeEvaluator initVectorExpr(int columnIndex, int writerIndex) {
          this.columnIndex = columnIndex;
          this.writerIndex = writerIndex;
          return this;
        }

        @Override
        public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException {
          throw new HiveException("should never reach here");
        }

        @Override
        protected Object _evaluate(Object row, int version) throws HiveException {
          VectorizedRowBatch inBatch = (VectorizedRowBatch) row;
          int rowIndex = inBatch.selectedInUse ? inBatch.selected[batchIndex] : batchIndex;
          return valueWriters[writerIndex].writeValue(inBatch.cols[columnIndex], rowIndex);
        }
      }.initVectorExpr(vectorExpr.getOutputColumn(), i);
      vectorNodeEvaluators.add(eval);
    }
    // Now replace the old evaluators with our own
    joinValues[posBigTable] = vectorNodeEvaluators;
   
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    // which are going to return values from the input batch vector expressions
    List<ExprNodeEvaluator> vectorNodeEvaluators = new ArrayList<ExprNodeEvaluator>(bigTableExpressions.size());

    for(int i=0; i<bigTableExpressions.size(); ++i) {
      ExprNodeDesc desc = bigTableExpressions.get(i);
      VectorExpression vectorExpr = bigTableValueExpressions[i];

      // This is a vectorized aware evaluator
      ExprNodeEvaluator eval = new ExprNodeEvaluator<ExprNodeDesc>(desc) {
        int columnIndex;;
        int writerIndex;

        public ExprNodeEvaluator initVectorExpr(int columnIndex, int writerIndex) {
          this.columnIndex = columnIndex;
          this.writerIndex = writerIndex;
          return this;
        }

        @Override
        public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException {
          throw new HiveException("should never reach here");
        }

        @Override
        protected Object _evaluate(Object row, int version) throws HiveException {
          VectorizedRowBatch inBatch = (VectorizedRowBatch) row;
          int rowIndex = inBatch.selectedInUse ? inBatch.selected[batchIndex] : batchIndex;
          return valueWriters[writerIndex].writeValue(inBatch.cols[columnIndex], rowIndex);
        }
      }.initVectorExpr(vectorExpr.getOutputColumn(), i);
      vectorNodeEvaluators.add(eval);
    }
    // Now replace the old evaluators with our own
    joinValues[posBigTable] = vectorNodeEvaluators;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

          }
        });   

    for(int i=0; i<bigTableExpressions.size(); ++i) {
      ExprNodeDesc desc = bigTableExpressions.get(i);
      VectorExpression vectorExpr = bigTableValueExpressions[i];

      // This is a vectorized aware evaluator
      ExprNodeEvaluator eval = new ExprNodeEvaluator<ExprNodeDesc>(desc) {
        int columnIndex;;
        int writerIndex;

        public ExprNodeEvaluator initVectorExpr(int columnIndex, int writerIndex) {
          this.columnIndex = columnIndex;
          this.writerIndex = writerIndex;
          return this;
        }

        @Override
        public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException {
          throw new HiveException("should never reach here");
        }

        @Override
        protected Object _evaluate(Object row, int version) throws HiveException {
          VectorizedRowBatch inBatch = (VectorizedRowBatch) row;
          int rowIndex = inBatch.selectedInUse ? inBatch.selected[batchIndex] : batchIndex;
          return valueWriters[writerIndex].writeValue(inBatch.cols[columnIndex], rowIndex);
        }
      }.initVectorExpr(vectorExpr.getOutputColumn(), i);
      vectorNodeEvaluators.add(eval);
    }
    // Now replace the old evaluators with our own
    joinValues[posBigTable] = vectorNodeEvaluators;
   
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression

    this.conf = (SelectDesc) conf;
    List<ExprNodeDesc> colList = this.conf.getColList();
    vExpressions = new VectorExpression[colList.size()];
    for (int i = 0; i < colList.size(); i++) {
      ExprNodeDesc expr = colList.get(i);
      VectorExpression ve = vContext.getVectorExpression(expr);
      vExpressions[i] = ve;
    }

    /**
     * Create a new vectorization context to create a new projection, but keep
     * same output column manager must be inherited to track the scratch the columns.
     */
    vOutContext = new VectorizationContext(vContext);

    // Set a fileKey, although this operator doesn't use it.
    vOutContext.setFileKey(vContext.getFileKey() + "/_SELECT_");

    vOutContext.resetProjectionColumns();
    for (int i=0; i < colList.size(); ++i) {
      String columnName = this.conf.getOutputColumnNames().get(i);
      VectorExpression ve = vExpressions[i];
      vOutContext.addProjectionColumn(columnName,
              ve.getOutputColumn());
    }
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.