Examples of PTFPartition


Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Override
  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
    ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
    PTFPartition iPart = pItr.getPartition();
    StructObjectInspector inputOI;
    inputOI = (StructObjectInspector) iPart.getOutputOI();

    WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
    Order order = wTFnDef.getOrder().getExpressions().get(0).getOrder();

    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
      boolean processWindow = processWindow(wFn);
      pItr.reset();
      if ( !processWindow ) {
        GenericUDAFEvaluator fEval = wFn.getWFnEval();
        Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()];
        AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
        while(pItr.hasNext()) {
          Object row = pItr.next();
          int i =0;
          if ( wFn.getArgs() != null ) {
            for(PTFExpressionDef arg : wFn.getArgs()) {
              args[i++] = arg.getExprEvaluator().evaluate(row);
            }
          }
          fEval.aggregate(aggBuffer, args);
        }
        Object out = fEval.evaluate(aggBuffer);
        if ( !wFn.isPivotResult()) {
          out = new SameList(iPart.size(), out);
        }
        oColumns.add((List<?>)out);
      } else {
        oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart, order));
      }
    }

    /*
     * Output Columns in the following order
     * - the columns representing the output from Window Fns
     * - the input Rows columns
     */

    for(int i=0; i < iPart.size(); i++) {
      ArrayList oRow = new ArrayList();
      Object iRow = iPart.getAt(i);

      for(int j=0; j < oColumns.size(); j++) {
        oRow.add(oColumns.get(j).get(i));
      }

View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

 
  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Override
  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
    ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
    PTFPartition iPart = pItr.getPartition();
    StructObjectInspector inputOI;
    inputOI = (StructObjectInspector) iPart.getOutputOI();

    WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
    Order order = wTFnDef.getOrder().getExpressions().get(0).getOrder();

    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
      boolean processWindow = processWindow(wFn);
      pItr.reset();
      if ( !processWindow ) {
        Object out = evaluateWindowFunction(wFn, pItr);
        if ( !wFn.isPivotResult()) {
          out = new SameList(iPart.size(), out);
        }
        oColumns.add((List<?>)out);
      } else {
        oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart, order));
      }
    }

    /*
     * Output Columns in the following order
     * - the columns representing the output from Window Fns
     * - the input Rows columns
     */

    for(int i=0; i < iPart.size(); i++) {
      ArrayList oRow = new ArrayList();
      Object iRow = iPart.getAt(i);

      for(int j=0; j < oColumns.size(); j++) {
        oRow.add(oColumns.get(j).get(i));
      }

View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

  public Iterator<Object> iterator(PTFPartitionIterator<Object> pItr) throws HiveException {
    WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
    ArrayList<Object> output = new ArrayList<Object>();
    List<?>[] outputFromPivotFunctions = new List<?>[wTFnDef.getWindowFunctions().size()];
    ArrayList<Integer> wFnsWithWindows = new ArrayList<Integer>();
    PTFPartition iPart = pItr.getPartition();

    int i=0;
    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
      boolean processWindow = processWindow(wFn);
      pItr.reset();
      if ( !processWindow && !wFn.isPivotResult() ) {
        Object out = evaluateWindowFunction(wFn, pItr);
        output.add(out);
      } else if (wFn.isPivotResult()) {
        GenericUDAFEvaluator streamingEval = wFn.getWFnEval().getWindowingEvaluator(wFn.getWindowFrame());
        if ( streamingEval != null && streamingEval instanceof ISupportStreamingModeForWindowing ) {
          ISupportStreamingModeForWindowing strEval = (ISupportStreamingModeForWindowing) streamingEval;
          if ( strEval.getRowsRemainingAfterTerminate() == 0 ) {
            wFn.setWFnEval(streamingEval);
            if ( wFn.getOI() instanceof ListObjectInspector ) {
              ListObjectInspector listOI = (ListObjectInspector) wFn.getOI();
              wFn.setOI(listOI.getListElementObjectInspector());
            }
            output.add(null);
            wFnsWithWindows.add(i);
          } else {
            outputFromPivotFunctions[i] = (List) evaluateWindowFunction(wFn,
                pItr);
            output.add(null);
          }
        } else {
          outputFromPivotFunctions[i] = (List) evaluateWindowFunction(wFn, pItr);
          output.add(null);
        }
      } else {
        output.add(null);
        wFnsWithWindows.add(i);
      }
      i++;
    }

    i=0;
    for(i=0; i < iPart.getOutputOI().getAllStructFieldRefs().size(); i++) {
      output.add(null);
    }

    if ( wTFnDef.getRankLimit() != -1 ) {
      rnkLimitDef = new RankLimit(wTFnDef.getRankLimit(),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

    WindowTableFunctionDef wFnDef = (WindowTableFunctionDef) getTableDef();
    PTFPartitionIterator<Object> pItr = iPart.iterator();
    PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, pItr);

    if ( outputPartition == null ) {
      outputPartition = new PTFPartition(getPartitionClass(),
          getPartitionMemSize(), wFnDef.getOutputFromWdwFnProcessing().getSerde(), OI);
    }
    else {
      outputPartition.reset();
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Override
  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException
  {
    ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
    PTFPartition iPart = pItr.getPartition();
    StructObjectInspector inputOI;
    try {
      inputOI = (StructObjectInspector) iPart.getSerDe().getObjectInspector();
    } catch (SerDeException se) {
      throw new HiveException(se);
    }

    WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
    Order order = wTFnDef.getOrder().getExpressions().get(0).getOrder();

    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions())
    {
      boolean processWindow = processWindow(wFn);
      pItr.reset();
      if ( !processWindow )
      {
        GenericUDAFEvaluator fEval = wFn.getWFnEval();
        Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()];
        AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
        while(pItr.hasNext())
        {
          Object row = pItr.next();
          int i =0;
          if ( wFn.getArgs() != null ) {
            for(PTFExpressionDef arg : wFn.getArgs())
            {
              args[i++] = arg.getExprEvaluator().evaluate(row);
            }
          }
          fEval.aggregate(aggBuffer, args);
        }
        Object out = fEval.evaluate(aggBuffer);
        if ( !wFn.isPivotResult())
        {
          out = new SameList(iPart.size(), out);
        }
        oColumns.add((List<?>)out);
      }
      else
      {
        oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart, order));
      }
    }

    /*
     * Output Columns in the following order
     * - the columns representing the output from Window Fns
     * - the input Rows columns
     */

    for(int i=0; i < iPart.size(); i++)
    {
      ArrayList oRow = new ArrayList();
      Object iRow = iPart.getAt(i);

      for(int j=0; j < oColumns.size(); j++)
      {
        oRow.add(oColumns.get(j).get(i));
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

  {
    PTFPartitionIterator<Object> pItr = iPart.iterator();
    PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, pItr);

    if ( outputPartition == null ) {
      outputPartition = new PTFPartition(getPartitionClass(),
          getPartitionMemSize(), tDef.getOutputShape().getSerde(), OI);
    }
    else {
      outputPartition.reset();
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFPartition

  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Override
  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
    ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
    PTFPartition iPart = pItr.getPartition();
    StructObjectInspector inputOI;
    inputOI = (StructObjectInspector) iPart.getOutputOI();

    WindowTableFunctionDef wTFnDef = (WindowTableFunctionDef) getTableDef();
    Order order = wTFnDef.getOrder().getExpressions().get(0).getOrder();

    for(WindowFunctionDef wFn : wTFnDef.getWindowFunctions()) {
      boolean processWindow = processWindow(wFn);
      pItr.reset();
      if ( !processWindow ) {
        GenericUDAFEvaluator fEval = wFn.getWFnEval();
        Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()];
        AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
        while(pItr.hasNext()) {
          Object row = pItr.next();
          int i =0;
          if ( wFn.getArgs() != null ) {
            for(PTFExpressionDef arg : wFn.getArgs()) {
              args[i++] = arg.getExprEvaluator().evaluate(row);
            }
          }
          fEval.aggregate(aggBuffer, args);
        }
        Object out = fEval.evaluate(aggBuffer);
        if ( !wFn.isPivotResult()) {
          out = new SameList(iPart.size(), out);
        }
        oColumns.add((List<?>)out);
      } else {
        oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart, order));
      }
    }

    /*
     * Output Columns in the following order
     * - the columns representing the output from Window Fns
     * - the input Rows columns
     */

    for(int i=0; i < iPart.size(); i++) {
      ArrayList oRow = new ArrayList();
      Object iRow = iPart.getAt(i);

      for(int j=0; j < oColumns.size(); j++) {
        oRow.add(oColumns.get(j).get(i));
      }

View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.