Package org.apache.hadoop.hive.ql.parse

Examples of org.apache.hadoop.hive.ql.parse.OpParseContext


    // The mapjoin has the same schema as the join operator
    MapJoinOperator mapJoinOp = (MapJoinOperator) OperatorFactory.getAndMakeChild(
        mapJoinDesc, joinRS.getRowSchema(),
        new ArrayList<Operator<? extends OperatorDesc>>());

    OpParseContext ctx = new OpParseContext(joinRS);
    opParseCtxMap.put(mapJoinOp, ctx);

    // change the children of the original join operator to point to the map
    // join operator
    List<Operator<? extends OperatorDesc>> childOps = smbJoinOp.getChildOperators();
View Full Code Here


        for (ExprNodeDesc expr: childKeyCols) {
          if (!(expr instanceof ExprNodeColumnDesc)) {
            return correlatedReduceSinkOperators;
          } else {
            String colName = ((ExprNodeColumnDesc)expr).getColumn();
            OpParseContext opCtx = pCtx.getOpParseCtx().get(current);
            for (ColumnInfo cinfo : opCtx.getRowResolver().getColumnInfos()) {
              if (colName.equals(cinfo.getInternalName())) {
                tableNeedToCheck.add(cinfo.getTabAlias());
              }
            }
          }
View Full Code Here

        rewriteQueryCtx.getParseContext().getTopOps();
      Map<Operator<? extends OperatorDesc>, OpParseContext>  opParseContext =
        rewriteQueryCtx.getParseContext().getOpParseCtx();

      //need this to set rowResolver for new scanOperator
      OpParseContext operatorContext = opParseContext.get(scanOperator);

      //remove original TableScanOperator
      topToTable.remove(scanOperator);
      topOps.remove(baseTableName);
      opParseContext.remove(scanOperator);

      //construct a new descriptor for the index table scan
      TableScanDesc indexTableScanDesc = new TableScanDesc();
      indexTableScanDesc.setGatherStats(false);

      String indexTableName = rewriteQueryCtx.getIndexName();
      Table indexTableHandle = null;
      try {
        indexTableHandle = rewriteQueryCtx.getHiveDb().getTable(indexTableName);
      } catch (HiveException e) {
        LOG.error("Error while getting the table handle for index table.");
        LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
        throw new SemanticException(e.getMessage(), e);
      }

      String k = indexTableName + Path.SEPARATOR;
      indexTableScanDesc.setStatsAggPrefix(k);
      scanOperator.setConf(indexTableScanDesc);

      //Construct the new RowResolver for the new TableScanOperator
      RowResolver rr = new RowResolver();
      try {
        StructObjectInspector rowObjectInspector =
          (StructObjectInspector) indexTableHandle.getDeserializer().getObjectInspector();
        List<? extends StructField> fields = rowObjectInspector
        .getAllStructFieldRefs();
        for (int i = 0; i < fields.size(); i++) {
          rr.put(indexTableName, fields.get(i).getFieldName(), new ColumnInfo(fields
              .get(i).getFieldName(), TypeInfoUtils
              .getTypeInfoFromObjectInspector(fields.get(i)
                  .getFieldObjectInspector()), indexTableName, false));
        }
      } catch (SerDeException e) {
        LOG.error("Error while creating the RowResolver for new TableScanOperator.");
        LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
        throw new SemanticException(e.getMessage(), e);
      }

      //Set row resolver for new table
      operatorContext.setRowResolver(rr);
      String tabNameWithAlias = null;
      if(alias != null){
        tabNameWithAlias = alias + ":" + indexTableName;
       }else{
         tabNameWithAlias = indexTableName;
View Full Code Here

          }
        }

        //Now the GroupByOperator has the new AggregationList; sum(`_count_of_indexed_key`)
        //instead of count(indexed_key)
        OpParseContext gbyOPC = rewriteQueryCtx.getOpc().get(operator);
        RowResolver gbyRR = newDAGContext.getOpParseCtx().get(newGbyOperator).getRowResolver();
        gbyOPC.setRowResolver(gbyRR);
        rewriteQueryCtx.getOpc().put(operator, gbyOPC);

        oldConf.setAggregators((ArrayList<AggregationDesc>) newAggrList);
        operator.setConf(oldConf);
View Full Code Here

   *          parse context
   */
  @SuppressWarnings("nls")
  public static Operator<? extends OperatorDesc> putOpInsertMap(
      Operator<? extends OperatorDesc> op, RowResolver rr, ParseContext parseCtx) {
    OpParseContext ctx = new OpParseContext(rr);
    parseCtx.getOpParseCtx().put(op, ctx);
    return op;
  }
View Full Code Here

      int pos = parentOperators.indexOf(op);
      assert pos != -1;
      parentOperators.remove(pos);
      parentOperators.add(pos, output); // add the new op as the old
    }
    OpParseContext ctx = new OpParseContext(inputRR);
    owi.put(output, ctx);
    return output;
  }
View Full Code Here

      return cols;
    }

    private Operator<? extends Serializable> putOpInsertMap(Operator<?> op, RowResolver rr,
        ParseContext context) {
      OpParseContext ctx = new OpParseContext(rr);
      context.getOpParseCtx().put(op, ctx);
      return op;
    }
View Full Code Here

            syntheticExpr = syntheticInExpr;
          }
        }

        Operator<FilterDesc> newFilter = createFilter(source, parent, parentRR, syntheticExpr);
        pCtx.getOpParseCtx().put(newFilter, new OpParseContext(parentRR));
        parent = newFilter;
      }

      return null;
    }
View Full Code Here

        op.removeChild(child);
        SelectOperator sel = (SelectOperator) OperatorFactory.getAndMakeChild(
            select, new RowSchema(outputRS.getColumnInfos()), op);
        OperatorFactory.makeChild(sel, child);

        OpParseContext parseCtx = new OpParseContext(outputRS);
        cppCtx.getParseContext().getOpParseCtx().put(sel, parseCtx);

        sel.setColumnExprMap(colExprMap);
      }
View Full Code Here

        ((FilterOperator)parent).getConf().setPredicate(merged);
      } else {
        ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
        RowResolver parentRR = pGraphContext.getOpParseCtx().get(parent).getRowResolver();
        Operator<FilterDesc> newFilter = createFilter(reducer, parent, parentRR, merged);
        pGraphContext.getOpParseCtx().put(newFilter, new OpParseContext(parentRR));
      }
    }

    return pGraphContext;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.parse.OpParseContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.