Examples of QBJoinTree


Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

      HashMap<String, String> aliasToPath = new HashMap<String, String>();
      HashMap<String, ArrayList<String>> pathToAliases = currTask.getWork().getPathToAliases();

      // get parseCtx for this Join Operator
      ParseContext parseCtx = physicalContext.getParseContext();
      QBJoinTree joinTree = parseCtx.getJoinContext().get(joinOp);

      // start to generate multiple map join tasks
      JoinDesc joinDesc = joinOp.getConf();
      Byte[] order = joinDesc.getTagOrder();
      int numAliases = order.length;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

   */
  public static String genMapJoinOpAndLocalWork(MapredWork newWork, JoinOperator op, int mapJoinPos)
      throws SemanticException {
    LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap =
        newWork.getMapWork().getOpParseCtxMap();
    QBJoinTree newJoinTree = newWork.getMapWork().getJoinTree();
    // generate the map join operator; already checked the map join
    MapJoinOperator newMapJoinOp = MapJoinProcessor.convertMapJoin(opParseCtxMap, op,
        newJoinTree, mapJoinPos, true, false);
    return genLocalWorkForMapJoin(newWork, newMapJoinOp, mapJoinPos);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

    Map<Byte, List<ExprNodeDesc>> keyExprMap = new HashMap<Byte, List<ExprNodeDesc>>();

    // Walk over all the sources (which are guaranteed to be reduce sink
    // operators).
    // The join outputs a concatenation of all the inputs.
    QBJoinTree leftSrc = joinTree.getJoinSrc();

    List<Operator<? extends OperatorDesc>> parentOps = op.getParentOperators();
    List<Operator<? extends OperatorDesc>> newParentOps =
      new ArrayList<Operator<? extends OperatorDesc>>();
    List<Operator<? extends OperatorDesc>> oldReduceSinkParentOps =
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

      Set<Map.Entry<JoinOperator, QBJoinTree>> joinCtx = pGraphContext.getJoinContext().entrySet();
      Iterator<Map.Entry<JoinOperator, QBJoinTree>> joinCtxIter = joinCtx.iterator();
      while (joinCtxIter.hasNext()) {
        Map.Entry<JoinOperator, QBJoinTree> joinEntry = joinCtxIter.next();
        JoinOperator joinOp = joinEntry.getKey();
        QBJoinTree qbJoin = joinEntry.getValue();
        int mapJoinPos = mapSideJoin(joinOp, qbJoin);
        if (mapJoinPos >= 0) {
          MapJoinOperator mapJoinOp = generateMapJoinOperator(pactx, joinOp, qbJoin, mapJoinPos);
          listMapJoinOps.add(mapJoinOp);
          mapJoinMap.put(mapJoinOp, qbJoin);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

      }
    }

    private boolean nonSubqueryMapJoin(ParseContext pGraphContext, MapJoinOperator mapJoin,
        MapJoinOperator parentMapJoin) {
      QBJoinTree joinTree = pGraphContext.getMapJoinContext().get(mapJoin);
      QBJoinTree parentJoinTree = pGraphContext.getMapJoinContext().get(parentMapJoin);
      if (joinTree.getJoinSrc() != null && joinTree.getJoinSrc().equals(parentJoinTree)) {
        return true;
      }
      return false;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

      || mapJoinOp.getConf().getAliasBucketFileNameMapping().size() == 0) {
      return false;
    }

    boolean tableEligibleForBucketedSortMergeJoin = true;
    QBJoinTree joinCxt = this.pGraphContext.getMapJoinContext()
      .get(mapJoinOp);
    if (joinCxt == null) {
      return false;
    }
    String[] srcs = joinCxt.getBaseSrc();
    for (int srcPos = 0; srcPos < srcs.length; srcPos++) {
      srcs[srcPos] = QB.getAppendedAliasFromId(joinCxt.getId(), srcs[srcPos]);
    }

    // All the tables/partitions columns should be sorted in the same order
    // For example, if tables A and B are being joined on columns c1, c2 and c3
    // which are the sorted and bucketed columns. The join would work, as long
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

  protected boolean checkConvertJoinToSMBJoin(
      JoinOperator joinOperator,
      SortBucketJoinProcCtx smbJoinContext,
      ParseContext pGraphContext) throws SemanticException {

    QBJoinTree joinCtx = pGraphContext.getJoinContext().get(joinOperator);

    if (joinCtx == null) {
      return false;
    }
    String[] srcs = joinCtx.getBaseSrc();

    // All the tables/partitions columns should be sorted in the same order
    // For example, if tables A and B are being joined on columns c1, c2 and c3
    // which are the sorted and bucketed columns. The join would work, as long
    // c1, c2 and c3 are sorted in the same order.
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

    // This has already been inspected and rejected
    if (context.getRejectedJoinOps().contains(joinOp)) {
      return false;
    }

    QBJoinTree joinCtx = pGraphContext.getJoinContext().get(joinOp);
    if (joinCtx == null) {
      return false;
    }

    Class<? extends BigTableSelectorForAutoSMJ> bigTableMatcherClass = null;
    try {
      bigTableMatcherClass =
        (Class<? extends BigTableSelectorForAutoSMJ>)
          (Class.forName(HiveConf.getVar(pGraphContext.getConf(),
            HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN_BIGTABLE_SELECTOR)));
    } catch (ClassNotFoundException e) {
      throw new SemanticException(e.getMessage());
    }

    BigTableSelectorForAutoSMJ bigTableMatcher =
      (BigTableSelectorForAutoSMJ) ReflectionUtils.newInstance(bigTableMatcherClass, null);
    JoinDesc joinDesc = joinOp.getConf();
    JoinCondDesc[] joinCondns = joinDesc.getConds();
    Set<Integer> joinCandidates = MapJoinProcessor.getBigTableCandidates(joinCondns);
    if (joinCandidates == null) {
      // This is a full outer join. This can never be a map-join
      // of any type. So return false.
      return false;
    }
    int bigTablePosition =
      bigTableMatcher.getBigTablePosition(pGraphContext, joinOp, joinCandidates);
    if (bigTablePosition < 0) {
      // contains aliases from sub-query
      return false;
    }
    context.setBigTablePosition(bigTablePosition);
    String joinAlias =
      bigTablePosition == 0 ?
        joinCtx.getLeftAlias() : joinCtx.getRightAliases()[bigTablePosition - 1];
    joinAlias = QB.getAppendedAliasFromId(joinCtx.getId(), joinAlias);

    Map<Byte, List<ExprNodeDesc>> keyExprMap  = new HashMap<Byte, List<ExprNodeDesc>>();
    List<Operator<? extends OperatorDesc>> parentOps = joinOp.getParentOperators();
    // get the join keys from parent ReduceSink operators
    for (Operator<? extends OperatorDesc> parentOp : parentOps) {
      ReduceSinkDesc rsconf = ((ReduceSinkOperator)parentOp).getConf();
      Byte tag = (byte) rsconf.getTag();
      List<ExprNodeDesc> keys = rsconf.getKeyCols();
      keyExprMap.put(tag, keys);
    }

    context.setKeyExprMap(keyExprMap);
    // Make a deep copy of the aliases so that they are not changed in the context
    String[] joinSrcs = joinCtx.getBaseSrc();
    String[] srcs = new String[joinSrcs.length];
    for (int srcPos = 0; srcPos < joinSrcs.length; srcPos++) {
      joinSrcs[srcPos] = QB.getAppendedAliasFromId(joinCtx.getId(), joinSrcs[srcPos]);
      srcs[srcPos] = new String(joinSrcs[srcPos]);
    }

    // Given a candidate map-join, can this join be converted.
    // The candidate map-join was derived from the pluggable sort merge join big
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

  protected boolean canConvertMapJoinToBucketMapJoin(
      MapJoinOperator mapJoinOp,
      ParseContext pGraphContext,
      BucketJoinProcCtx context) throws SemanticException {

    QBJoinTree joinCtx = this.pGraphContext.getMapJoinContext().get(mapJoinOp);
    if (joinCtx == null) {
      return false;
    }

    List<String> joinAliases = new ArrayList<String>();
    String[] srcs = joinCtx.getBaseSrc();
    String[] left = joinCtx.getLeftAliases();
    List<String> mapAlias = joinCtx.getMapAliases();
    String baseBigAlias = null;

    for (String s : left) {
      if (s != null) {
        String subQueryAlias = QB.getAppendedAliasFromId(joinCtx.getId(), s);
        if (!joinAliases.contains(subQueryAlias)) {
          joinAliases.add(subQueryAlias);
          if (!mapAlias.contains(s)) {
            baseBigAlias = subQueryAlias;
          }
        }
      }
    }

    for (String s : srcs) {
      if (s != null) {
        String subQueryAlias = QB.getAppendedAliasFromId(joinCtx.getId(), s);
        if (!joinAliases.contains(subQueryAlias)) {
          joinAliases.add(subQueryAlias);
          if (!mapAlias.contains(s)) {
            baseBigAlias = subQueryAlias;
          }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.QBJoinTree

  public static String genMapJoinOpAndLocalWork(MapredWork newWork, JoinOperator op, int mapJoinPos)
      throws SemanticException {
    try {
      LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap = newWork
          .getOpParseCtxMap();
      QBJoinTree newJoinTree = newWork.getJoinTree();
      // generate the map join operator; already checked the map join
      MapJoinOperator newMapJoinOp = MapJoinProcessor.convertMapJoin(opParseCtxMap, op,
          newJoinTree, mapJoinPos, true);
      // generate the local work and return the big table alias
      String bigTableAlias = MapJoinProcessor
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.