Examples of FilterDesc


Examples of org.apache.hadoop.hive.ql.plan.filterDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      FilterOperator filOp       = (FilterOperator)nd;
      filterDesc     filOpDesc   = filOp.getConf();
      sampleDesc     sampleDescr = filOpDesc.getSampleDescr();

      if ((sampleDescr == null) || !sampleDescr.getInputPruning())
        return null;

      assert stack.size() == 3;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

    // add new filter op
    List<Operator<? extends Serializable>> originalChilren = op.getChildOperators();
    op.setChildOperators(null);
    Operator<filterDesc> output =
      OperatorFactory.getAndMakeChild(new filterDesc(condn, false),
                                      new RowSchema(inputRR.getColumnInfos()),
                                      op);
    output.setChildOperators(originalChilren);
    for (Operator<? extends Serializable> ch : originalChilren) {
      List<Operator<? extends Serializable>> parentOperators = ch.getParentOperators();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

      exprNodeDesc zero = new exprNodeConstantDesc("0");
      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1);
      exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("and", func1, func2);
      assert(func3 != null);
      filterDesc filterCtx = new filterDesc(func3, false);

      // Configuration
      Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);
      op.setConf(filterCtx);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

    OpParseContext inputCtx = opParseCtx.get(input);
    RowResolver inputRR = inputCtx.getRR();
    Operator output = putOpInsertMap(
      OperatorFactory.getAndMakeChild(
        new filterDesc(genExprNodeDesc(condn, inputRR), false),
          new RowSchema(inputRR.getColumnInfos()), input), inputRR);

    LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: " + inputRR.toString());
    return output;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

        // input pruning is enough; add the filter for the optimizer to use it later
        LOG.info("No need for sample filter");
        exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
        tableOp = OperatorFactory.getAndMakeChild(
          new filterDesc(samplePredicate, true, new sampleDesc(ts.getNumerator(), ts.getDenominator(), tabBucketCols, true)),
          top);
      }
      else {
        // need to add filter
        // create tableOp to be filterDesc and set as child to 'top'
        LOG.info("Need sample filter");
        exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
        tableOp = OperatorFactory.getAndMakeChild(
            new filterDesc(samplePredicate, true),
            top);
      }
    }
    else {
      boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
      if (testMode) {
        String tabName = tab.getName();

        // has the user explicitly asked not to sample this table
        String   unSampleTblList = conf.getVar(HiveConf.ConfVars.HIVETESTMODENOSAMPLE);
        String[] unSampleTbls    = unSampleTblList.split(",");
        boolean unsample = false;
        for (String unSampleTbl : unSampleTbls)
          if (tabName.equalsIgnoreCase(unSampleTbl))
            unsample = true;

        if (!unsample) {
          int numBuckets = tab.getNumBuckets();

          // If the input table is bucketed, choose the first bucket
          if (numBuckets > 0) {
            TableSample tsSample = new TableSample(1, numBuckets);
            tsSample.setInputPruning(true);
            qb.getParseInfo().setTabSample(alias, tsSample);
            exprNodeDesc samplePred = genSamplePredicate(tsSample, tab.getBucketCols(), true, alias, rwsch, qb.getMetaData(), null);
            tableOp = OperatorFactory.getAndMakeChild(
              new filterDesc(samplePred, true,
                             new sampleDesc(tsSample.getNumerator(), tsSample.getDenominator(), tab.getBucketCols(), true)),
              top);
            LOG.info("No need for sample filter");
          }
          // The table is not bucketed, add a dummy filter :: rand()
          else {
            int freq = conf.getIntVar(HiveConf.ConfVars.HIVETESTMODESAMPLEFREQ);
            TableSample tsSample = new TableSample(1, freq);
            tsSample.setInputPruning(false);
            qb.getParseInfo().setTabSample(alias, tsSample);
            LOG.info("Need sample filter");
            exprNodeDesc randFunc = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("rand", new exprNodeConstantDesc(Integer.valueOf(460476415)));
            exprNodeDesc samplePred = genSamplePredicate(tsSample, null, false, alias, rwsch, qb.getMetaData(), randFunc);
            tableOp = OperatorFactory.getAndMakeChild(new filterDesc(samplePred, true), top);
          }
        }
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

    exprNodeDesc desc = new exprNodeGenericFuncDesc(
        TypeInfoFactory.booleanTypeInfo,
        FunctionRegistry.getFunctionInfo("<").getGenericUDF(),
        children3);
   
    return new filterDesc(desc, false);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

    OpParseContext inputCtx = opParseCtx.get(input);
    RowResolver inputRR = inputCtx.getRR();
    Operator output = putOpInsertMap(
      OperatorFactory.getAndMakeChild(
        new filterDesc(genExprNodeDesc(condn, inputRR), false),
          new RowSchema(inputRR.getColumnInfos()), input), inputRR);
    LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: " + inputRR.toString());
    return output;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

        // TODO sample predicate is not needed, but we are adding it anyway since
        // input pruning is broken for subqueries. will remove this once we move
        // compilation of sampling to use the operator tree
        exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
        tableOp = OperatorFactory.getAndMakeChild(
            new filterDesc(samplePredicate, true),
            top);
      }
      else {
        // need to add filter
        // create tableOp to be filterDesc and set as child to 'top'
        LOG.info("Need sample filter");
        exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
        tableOp = OperatorFactory.getAndMakeChild(
            new filterDesc(samplePredicate, true),
            top);
      }
    }
    else {
      boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
      if (testMode) {
        String tabName = tab.getName();
       
        // has the user explicitly asked not to sample this table
        String   unSampleTblList = conf.getVar(HiveConf.ConfVars.HIVETESTMODENOSAMPLE);
        String[] unSampleTbls    = unSampleTblList.split(",");
        boolean unsample = false;
        for (String unSampleTbl : unSampleTbls)
          if (tabName.equalsIgnoreCase(unSampleTbl))
            unsample = true;
       
        if (!unsample) {
          int numBuckets = tab.getNumBuckets();
       
          // If the input table is bucketed, choose the first bucket
          if (numBuckets > 0) {
            TableSample tsSample = new TableSample(1, numBuckets);
            tsSample.setInputPruning(true);
            qb.getParseInfo().setTabSample(alias, tsSample);
            LOG.info("No need for sample filter");
          }
          // The table is not bucketed, add a dummy filter :: rand()
          else {
            int freq = conf.getIntVar(HiveConf.ConfVars.HIVETESTMODESAMPLEFREQ);
            TableSample tsSample = new TableSample(1, freq);
            tsSample.setInputPruning(false);
            qb.getParseInfo().setTabSample(alias, tsSample);
            LOG.info("Need sample filter");
            exprNodeDesc randFunc = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("rand", new exprNodeConstantDesc(Integer.valueOf(460476415)));
            exprNodeDesc samplePred = genSamplePredicate(tsSample, null, false, alias, rwsch, qb.getMetaData(), randFunc);
            tableOp = OperatorFactory.getAndMakeChild(new filterDesc(samplePred, true), top);
          }
        }
      }
    }
   
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

        TypeInfoFactory.booleanTypeInfo,
        FunctionRegistry.getUDFClass("<"),
        FunctionRegistry.getUDFMethod("<", TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.doubleTypeInfo),
        children3);
   
    return new filterDesc(desc, false);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.filterDesc

      exprNodeDesc zero = new exprNodeConstantDesc("0");
      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1);
      exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2);
      assert(func3 != null);
      filterDesc filterCtx = new filterDesc(func3, false);

      // Configuration
      Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);
      op.setConf(filterCtx);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.