Package org.apache.hadoop.hive.ql.parse

Examples of org.apache.hadoop.hive.ql.parse.OpParseContext


      int pos = parentOperators.indexOf(op);
      assert pos != -1;
      parentOperators.remove(pos);
      parentOperators.add(pos, output); // add the new op as the old
    }
    OpParseContext ctx = new OpParseContext(inputRR);
    owi.put(output, ctx);

    if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
        HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
      // remove the candidate filter ops
View Full Code Here


   *         ExprNodeConstantDesc or ExprNodeNullDesc.
   */
  public Map<ColumnInfo, ExprNodeDesc> getPropagatedConstants(
      Operator<? extends Serializable> op) {
    Map<ColumnInfo, ExprNodeDesc> constants = new HashMap<ColumnInfo, ExprNodeDesc>();
    OpParseContext parseCtx = opToParseCtx.get(op);
    if (parseCtx == null) {
      return constants;
    }
    RowResolver rr = parseCtx.getRowResolver();
    LOG.debug("Getting constants of op:" + op + " with rr:" + rr);
   
    try {
      if (op.getParentOperators() == null) {
        return constants;
View Full Code Here

      throw new RuntimeException(e);
    }
  }

  public RowResolver getRowResolver(Operator<? extends Serializable> op) {
    OpParseContext parseCtx = opToParseCtx.get(op);
    if (parseCtx == null) {
      return null;
    }
    return parseCtx.getRowResolver();
  }
View Full Code Here

        rewriteQueryCtx.getParseContext().getTopOps();
      Map<Operator<? extends OperatorDesc>, OpParseContext>  opParseContext =
        rewriteQueryCtx.getParseContext().getOpParseCtx();

      //need this to set rowResolver for new scanOperator
      OpParseContext operatorContext = opParseContext.get(scanOperator);

      //remove original TableScanOperator
      topOps.remove(alias);
      topToTable.remove(scanOperator);
      opParseContext.remove(scanOperator);

      //construct a new descriptor for the index table scan
      TableScanDesc indexTableScanDesc = new TableScanDesc();
      indexTableScanDesc.setGatherStats(false);

      String indexTableName = rewriteQueryCtx.getIndexName();
      Table indexTableHandle = null;
      try {
        indexTableHandle = rewriteQueryCtx.getHiveDb().getTable(indexTableName);
      } catch (HiveException e) {
        LOG.error("Error while getting the table handle for index table.");
        LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
        throw new SemanticException(e.getMessage(), e);
      }

      String k = indexTableName + Path.SEPARATOR;
      indexTableScanDesc.setStatsAggPrefix(k);
      scanOperator.setConf(indexTableScanDesc);

      //Construct the new RowResolver for the new TableScanOperator
      RowResolver rr = new RowResolver();
      try {
        StructObjectInspector rowObjectInspector =
          (StructObjectInspector) indexTableHandle.getDeserializer().getObjectInspector();
        for (String column : rewriteQueryCtx.getColumns()) {
          StructField field = rowObjectInspector.getStructFieldRef(column);
          rr.put(indexTableName, field.getFieldName(), new ColumnInfo(field.getFieldName(),
              TypeInfoUtils.getTypeInfoFromObjectInspector(field.getFieldObjectInspector()),
              indexTableName, false));
        }
      } catch (SerDeException e) {
        LOG.error("Error while creating the RowResolver for new TableScanOperator.");
        LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
        throw new SemanticException(e.getMessage(), e);
      }

      //Set row resolver for new table
      operatorContext.setRowResolver(rr);

      String newAlias = indexTableName;
      int index = alias.lastIndexOf(":");
      if (index >= 0) {
        newAlias = alias.substring(0, index) + ":" + indexTableName;
View Full Code Here

          }
        }

        //Now the GroupByOperator has the new AggregationList; sum(`_count_of_indexed_key`)
        //instead of count(indexed_key)
        OpParseContext gbyOPC = rewriteQueryCtx.getOpc().get(operator);
        RowResolver gbyRR = newDAGContext.getOpParseCtx().get(newGbyOperator).getRowResolver();
        gbyOPC.setRowResolver(gbyRR);
        rewriteQueryCtx.getOpc().put(operator, gbyOPC);

        oldConf.setAggregators((ArrayList<AggregationDesc>) newAggrList);
        operator.setConf(oldConf);
View Full Code Here

  }

  @SuppressWarnings("nls")
  private Operator<? extends OperatorDesc>
    putOpInsertMap(Operator<? extends OperatorDesc> op, RowResolver rr) {
    OpParseContext ctx = new OpParseContext(rr);
    pGraphContext.getOpParseCtx().put(op, ctx);
    return op;
  }
View Full Code Here

    RowResolver outputRS = opParseCtxMap.get(op).getRowResolver();

    MapJoinOperator mapJoinOp = (MapJoinOperator) OperatorFactory.getAndMakeChild(
        mapJoinDescriptor, new RowSchema(outputRS.getColumnInfos()), op.getParentOperators());

    OpParseContext ctx = new OpParseContext(outputRS);
    opParseCtxMap.put(mapJoinOp, ctx);

    mapJoinOp.getConf().setReversedExprs(op.getConf().getReversedExprs());
    Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
    mapJoinOp.setColumnExprMap(colExprMap);
View Full Code Here

    // The mapjoin has the same schema as the join operator
    MapJoinOperator mapJoinOp = (MapJoinOperator) OperatorFactory.getAndMakeChild(
        mapJoinDesc, joinRS.getRowSchema(),
        new ArrayList<Operator<? extends OperatorDesc>>());

    OpParseContext ctx = new OpParseContext(joinRS);
    opParseCtxMap.put(mapJoinOp, ctx);

    // change the children of the original join operator to point to the map
    // join operator
    List<Operator<? extends OperatorDesc>> childOps = smbJoinOp.getChildOperators();
View Full Code Here

   *          parse context
   */
  @SuppressWarnings("nls")
  public static Operator<? extends OperatorDesc> putOpInsertMap(
      Operator<? extends OperatorDesc> op, RowResolver rr, ParseContext parseCtx) {
    OpParseContext ctx = new OpParseContext(rr);
    parseCtx.getOpParseCtx().put(op, ctx);
    return op;
  }
View Full Code Here

        List<ExprNodeDesc> backtrackedKeyCols =
            ExprNodeDescUtils.backtrack(childKeyCols, child, current);
        List<ExprNodeDesc> backtrackedPartitionCols =
            ExprNodeDescUtils.backtrack(childPartitionCols, child, current);

        OpParseContext opCtx = pCtx.getOpParseCtx().get(current);
        RowResolver rowResolver = opCtx.getRowResolver();
        Set<String> tableNeedToCheck = new HashSet<String>();
        for (ExprNodeDesc expr: childKeyCols) {
          if (!(expr instanceof ExprNodeColumnDesc)) {
            return correlatedReduceSinkOperators;
          }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.parse.OpParseContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.