Examples of PTFOperator


Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

   */
  public static class ColumnPrunerPTFProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {

      PTFOperator op = (PTFOperator) nd;
      PTFDesc conf = op.getConf();
      //Since we cannot know what columns will be needed by a PTF chain,
      //we do not prune columns on PTFOperator for PTF chains.
      if (!conf.forWindowing()) {
        return getDefaultProc().process(nd, stack, ctx, nodeOutputs);
      }

      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();

      List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
      prunedCols = new ArrayList<String>(prunedCols);
      prunedColumnsList(prunedCols, def);
      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
      op.getSchema().setSignature(sig);
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

         *   by the PTFOp.
         */

        assert parentOperators.size() == 1;

        PTFOperator ptfOp = (PTFOperator) childOperators.get(0).getChildOperators().get(0);
        List<String> childCols = cppCtx.getPrunedColList(ptfOp);
        boolean[] flags = new boolean[conf.getValueCols().size()];
        for (int i = 0; i < flags.length; i++) {
          flags[i] = false;
        }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

  public static class ColumnPrunerPTFProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {

      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      PTFOperator op = (PTFOperator) nd;
      PTFDesc conf = op.getConf();
      //Since we cannot know what columns will be needed by a PTF chain,
      //we do not prune columns on PTFOperator for PTF chains.
      if (!conf.forWindowing()) {
       
        Operator<? extends OperatorDesc> parent = op.getParentOperators().get(0);
        RowResolver parentRR = cppCtx.getParseContext().getOpParseCtx().get(parent).getRowResolver();
        List<ColumnInfo> sig = parentRR.getRowSchema().getSignature();
        List<String> colList = new ArrayList<String>();
        for(ColumnInfo cI : sig) {
          colList.add(cI.getInternalName());
        }
        cppCtx.getPrunedColLists().put(op, colList);
        return null;
      }

      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();

      List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
      prunedCols = new ArrayList<String>(prunedCols);
      prunedColumnsList(prunedCols, def);
      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
      op.getSchema().setSignature(sig);
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      LOG.info("Processing for " + nd.getName() + "("
          + ((Operator) nd).getIdentifier() + ")");
      OpWalkerInfo owi = (OpWalkerInfo) procCtx;
      PTFOperator ptfOp = (PTFOperator) nd;

      pushRankLimit(ptfOp, owi);
      return super.process(nd, stack, procCtx, nodeOutputs);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {

      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      PTFOperator op = (PTFOperator) nd;
      PTFDesc conf = op.getConf();
      //Since we cannot know what columns will be needed by a PTF chain,
      //we do not prune columns on PTFOperator for PTF chains.
      if (!conf.forWindowing()) {
        return super.process(nd, stack, cppCtx, nodeOutputs);
      }

      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();

      List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
      prunedCols = new ArrayList<String>(prunedCols);
      prunedColumnsList(prunedCols, def);
      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
      op.getSchema().setSignature(sig);
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

  public static class ColumnPrunerPTFProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {

      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      PTFOperator op = (PTFOperator) nd;
      PTFDesc conf = op.getConf();
      //Since we cannot know what columns will be needed by a PTF chain,
      //we do not prune columns on PTFOperator for PTF chains.
      if (!conf.forWindowing()) {
       
        Operator<? extends OperatorDesc> parent = op.getParentOperators().get(0);
        RowResolver parentRR = cppCtx.getParseContext().getOpParseCtx().get(parent).getRowResolver();
        List<ColumnInfo> sig = parentRR.getRowSchema().getSignature();
        List<String> colList = new ArrayList<String>();
        for(ColumnInfo cI : sig) {
          colList.add(cI.getInternalName());
        }
        cppCtx.getPrunedColLists().put(op, colList);
        return null;
      }

      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();

      List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
      prunedCols = new ArrayList<String>(prunedCols);
      prunedColumnsList(prunedCols, def);
      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
      op.getSchema().setSignature(sig);
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

   */
  public static class ColumnPrunerPTFProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {

      PTFOperator op = (PTFOperator) nd;
      PTFDesc conf = op.getConf();
      //Since we cannot know what columns will be needed by a PTF chain,
      //we do not prune columns on PTFOperator for PTF chains.
      if (!conf.forWindowing()) {
        return getDefaultProc().process(nd, stack, ctx, nodeOutputs);
      }

      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();

      List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
      prunedCols = new ArrayList<String>(prunedCols);
      prunedColumnsList(prunedCols, def);
      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
      op.getSchema().setSignature(sig);
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

         *   by the PTFOp.
         */

        assert parentOperators.size() == 1;

        PTFOperator ptfOp = (PTFOperator) childOperators.get(0).getChildOperators().get(0);
        List<String> childCols = cppCtx.getPrunedColList(ptfOp);
        boolean[] flags = new boolean[conf.getValueCols().size()];
        for (int i = 0; i < flags.length; i++) {
          flags[i] = false;
        }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

         *   by the PTFOp.
         */

        assert parentOperators.size() == 1;

        PTFOperator ptfOp = (PTFOperator) childOperators.get(0).getChildOperators().get(0);
        List<String> childCols = cppCtx.getPrunedColList(ptfOp);
        boolean[] flags = new boolean[conf.getValueCols().size()];
        for (int i = 0; i < flags.length; i++) {
          flags[i] = false;
        }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.PTFOperator

   */
  public static class ColumnPrunerPTFProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        Object... nodeOutputs) throws SemanticException {

      PTFOperator op = (PTFOperator) nd;
      PTFDesc conf = op.getConf();
      //Since we cannot know what columns will be needed by a PTF chain,
      //we do not prune columns on PTFOperator for PTF chains.
      if (!conf.forWindowing()) {
        return getDefaultProc().process(nd, stack, ctx, nodeOutputs);
      }

      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();

      List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
      prunedCols = new ArrayList<String>(prunedCols);
      prunedColumnsList(prunedCols, def);
      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
      op.getSchema().setSignature(sig);
      return null;
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.