Package org.apache.hadoop.hive.ql.exec

Examples of org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator


      // be called for leafs.
      assert(!stack.isEmpty());

      // LineageCtx
      LineageCtx lCtx = (LineageCtx) procCtx;
      LateralViewJoinOperator op = (LateralViewJoinOperator)nd;
      boolean isUdtfPath = true;
      Operator<? extends OperatorDesc> inpOp = getParent(stack);
      ArrayList<ColumnInfo> cols = inpOp.getSchema().getSignature();

      if (inpOp instanceof SelectOperator) {
        isUdtfPath = false;
      }

      // Dirty hack!!
      // For the select path the columns are the ones at the end of the
      // current operators schema and for the udtf path the columns are
      // at the beginning of the operator schema.
      ArrayList<ColumnInfo> out_cols = op.getSchema().getSignature();
      int out_cols_size = out_cols.size();
      int cols_size = cols.size();
      if (isUdtfPath) {
        int cnt = 0;
        while (cnt < cols_size) {
View Full Code Here


   */
  public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor {
    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {
      LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      List<String> cols = cppCtx.genColLists(op);
      if (cols == null) {
        return null;
      }

      Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
      // As columns go down the DAG, the LVJ will transform internal column
      // names from something like 'key' to '_col0'. Because of this, we need
      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.

      // this is SEL(*) cols + UDTF cols
      List<String> outputCols = op.getConf().getOutputInternalColNames();

      // cause we cannot prune columns from UDTF branch currently, extract
      // columns from SEL(*) branch only and append all columns from UDTF branch to it
      int numSelColumns = op.getConf().getNumSelColumns();

      List<String> colsAfterReplacement = new ArrayList<String>();
      ArrayList<String> newColNames = new ArrayList<String>();
      for (String col : cols) {
        int index = outputCols.indexOf(col);
        // colExprMap.size() == size of cols from SEL(*) branch
        if (index >= 0 && index < numSelColumns) {
          ExprNodeDesc transformed = colExprMap.get(col);
          Utilities.mergeUniqElems(colsAfterReplacement, transformed.getCols());
          newColNames.add(col);
        }
      }
      // update number of columns from sel(*)
      op.getConf().setNumSelColumns(newColNames.size());

      // add all UDTF columns
      // following SEL will do CP for columns from UDTF, not adding SEL in here
      newColNames.addAll(outputCols.subList(numSelColumns, outputCols.size()));
      op.getConf().setOutputInternalColNames(newColNames);

      cppCtx.getPrunedColLists().put(op, colsAfterReplacement);
      return null;
    }
View Full Code Here

            return null;
          }
        }
      }

      LateralViewJoinOperator lvJoin = null;
      if (op.getConf().isSelStarNoCompute()) {
        assert op.getNumChild() == 1;
        Operator<? extends OperatorDesc> child = op.getChildOperators().get(0);
        if (child instanceof LateralViewJoinOperator) { // this SEL is SEL(*)
                                                        // for LV
View Full Code Here

   * The Node Processor for Column Pruning on Lateral View Join Operators.
   */
  public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {
      LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      List<String> cols = cppCtx.genColLists(op);
      if (cols == null) {
        return null;
      }
     
      Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
      // As columns go down the DAG, the LVJ will transform internal column
      // names from something like 'key' to '_col0'. Because of this, we need
      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.

      // this is SEL(*) cols + UDTF cols
      List<String> outputCols = op.getConf().getOutputInternalColNames();

      // cause we cannot prune columns from UDTF branch currently, extract
      // columns from SEL(*) branch only and append all columns from UDTF branch to it
      int numSelColumns = op.getConf().getNumSelColumns();

      List<String> colsAfterReplacement = new ArrayList<String>();
      ArrayList<String> newColNames = new ArrayList<String>();
      for (String col : cols) {
        int index = outputCols.indexOf(col);
        // colExprMap.size() == size of cols from SEL(*) branch
        if (index >= 0 && index < numSelColumns) {
          ExprNodeDesc transformed = colExprMap.get(col);
          Utilities.mergeUniqElems(colsAfterReplacement, transformed.getCols());
          newColNames.add(col);
        }
      }
      // update number of columns from sel(*)
      op.getConf().setNumSelColumns(newColNames.size());

      // add all UDTF columns
      // following SEL will do CP for columns from UDTF, not adding SEL in here
      newColNames.addAll(outputCols.subList(numSelColumns, outputCols.size()));
      op.getConf().setOutputInternalColNames(newColNames);

      cppCtx.getPrunedColLists().put(op, colsAfterReplacement);
      return null;
    }
View Full Code Here

            return null;
          }
        }
      }

      LateralViewJoinOperator lvJoin = null;
      if (op.getConf().isSelStarNoCompute()) {
        assert op.getNumChild() == 1;
        Operator<? extends OperatorDesc> child = op.getChildOperators().get(0);
        if (child instanceof LateralViewJoinOperator) { // this SEL is SEL(*)
                                                        // for LV
View Full Code Here

      // be called for leafs.
      assert(!stack.isEmpty());

      // LineageCtx
      LineageCtx lCtx = (LineageCtx) procCtx;
      LateralViewJoinOperator op = (LateralViewJoinOperator)nd;
      boolean isUdtfPath = true;
      Operator<? extends OperatorDesc> inpOp = getParent(stack);
      ArrayList<ColumnInfo> cols = inpOp.getSchema().getSignature();

      if (inpOp instanceof SelectOperator) {
        isUdtfPath = false;
      }

      // Dirty hack!!
      // For the select path the columns are the ones at the end of the
      // current operators schema and for the udtf path the columns are
      // at the beginning of the operator schema.
      ArrayList<ColumnInfo> out_cols = op.getSchema().getSignature();
      int out_cols_size = out_cols.size();
      int cols_size = cols.size();
      if (isUdtfPath) {
        int cnt = 0;
        while (cnt < cols_size) {
View Full Code Here

   * The Node Processor for Column Pruning on Lateral View Join Operators.
   */
  public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {
      LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      List<String> cols = cppCtx.genColLists(op);

      Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();

      // As columns go down the DAG, the LVJ will transform internal column
      // names from something like 'key' to '_col0'. Because of this, we need
      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.
      List<String> colsAfterReplacement = new ArrayList<String>();
      for (String col : cols) {
        if (colExprMap.containsKey(col)) {
          ExprNodeDesc expr = colExprMap.get(col);
          colsAfterReplacement.addAll(expr.getCols());
        } else {
          colsAfterReplacement.add(col);
        }
      }
      // this is SEL(*) cols + UDTF cols
      List<String> outputCols = op.getConf().getOutputInternalColNames();
      if (outputCols.size() != cols.size()) {
        // cause we cannot prune columns from UDTF branch currently, extract
        // columns from SEL(*) branch only and append all columns from UDTF branch to it
        ArrayList<String> newColNames = new ArrayList<String>();
        for (String col : cols) {
          int index = outputCols.indexOf(col);
          // colExprMap.size() == size of cols from SEL(*) branch
          if (index >= 0 && index < colExprMap.size()) {
            newColNames.add(col);
          }
        }
        newColNames.addAll(outputCols.subList(colExprMap.size(), outputCols.size()));
        op.getConf().setOutputInternalColNames(newColNames);
      }

      cppCtx.getPrunedColLists().put(op,
          colsAfterReplacement);
      return null;
View Full Code Here

    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {
      SelectOperator op = (SelectOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;

      LateralViewJoinOperator lvJoin = null;
      if (op.getChildOperators() != null) {
        for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
          // If one of my children is a FileSink or Script, return all columns.
          // Without this break, a bug in ReduceSink to Extract edge column
          // pruning will manifest
View Full Code Here

   * The Node Processor for Column Pruning on Lateral View Join Operators.
   */
  public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {
      LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      List<String> cols = cppCtx.genColLists(op);

      Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();

      // As columns go down the DAG, the LVJ will transform internal column
      // names from something like 'key' to '_col0'. Because of this, we need
      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.
      List<String> colsAfterReplacement = new ArrayList<String>();
      for (String col : cols) {
        if (colExprMap.containsKey(col)) {
          ExprNodeDesc expr = colExprMap.get(col);
          colsAfterReplacement.addAll(expr.getCols());
        } else {
          colsAfterReplacement.add(col);
        }
      }
      // this is SEL(*) cols + UDTF cols
      List<String> outputCols = op.getConf().getOutputInternalColNames();
      if (outputCols.size() != cols.size()) {
        // cause we cannot prune columns from UDTF branch currently, extract
        // columns from SEL(*) branch only and append all columns from UDTF branch to it
        ArrayList<String> newColNames = new ArrayList<String>();
        for (String col : cols) {
          int index = outputCols.indexOf(col);
          // colExprMap.size() == size of cols from SEL(*) branch
          if (index >= 0 && index < colExprMap.size()) {
            newColNames.add(col);
          }
        }
        newColNames.addAll(outputCols.subList(colExprMap.size(), outputCols.size()));
        op.getConf().setOutputInternalColNames(newColNames);
      }

      cppCtx.getPrunedColLists().put(op,
          colsAfterReplacement);
      return null;
View Full Code Here

    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {
      SelectOperator op = (SelectOperator) nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;

      LateralViewJoinOperator lvJoin = null;
      if (op.getChildOperators() != null) {
        for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
          // If one of my children is a FileSink or Script, return all columns.
          // Without this break, a bug in ReduceSink to Extract edge column
          // pruning will manifest
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.