Examples of TypeCheckCtx


Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    public Object process(Node nd, Stack<Node> stack,
        NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException
    {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
      if (ctx.getError() != null)
      {
        return null;
      }

      ExprNodeDesc desc = WindowingTypeCheckProcFactory
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    public Object process(Node nd, Stack<Node> stack,
        NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException
    {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
      if (ctx.getError() != null)
      {
        return null;
      }

      ExprNodeDesc desc = WindowingTypeCheckProcFactory  .processGByExpr(nd, procCtx);
      if (desc != null)
      {
        return desc;
      }

      ASTNode expr = (ASTNode) nd;
      ASTNode parent = stack.size() > 1 ? (ASTNode) stack.get(stack.size() - 2) : null;
      RowResolver input = ctx.getInputRR();

      if (expr.getType() != Windowing2Parser.TABLEORCOL)
      {
        ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr), expr);
        return null;
      }

      assert (expr.getChildCount() == 1);
      String tableOrCol = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText());

      boolean isTableAlias = input.hasTableAlias(tableOrCol);
      ColumnInfo colInfo = input.get(null, tableOrCol);

      if (isTableAlias)
      {
        if (colInfo != null)
        {
          if (parent != null && parent.getType() == Windowing2Parser.DOT)
          {
            // It's a table alias.
            return null;
          }
          // It's a column.
          return new ExprNodeColumnDesc(colInfo.getType(),
              colInfo.getInternalName(), colInfo.getTabAlias(),
              colInfo.getIsVirtualCol());
        }
        else
        {
          // It's a table alias.
          // We will process that later in DOT.
          return null;
        }
      }
      else
      {
        if (colInfo == null)
        {
          // It's not a column or a table alias.
          if (input.getIsExprResolver())
          {
            ASTNode exprNode = expr;
            if (!stack.empty())
            {
              ASTNode tmp = (ASTNode) stack.pop();
              if (!stack.empty())
              {
                exprNode = (ASTNode) stack.peek();
              }
              stack.push(tmp);
            }
            ctx.setError(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY
                .getMsg(exprNode), expr);
            return null;
          }
          else
          {
            List<String> possibleColumnNames = input
                .getReferenceableColumnAliases(tableOrCol, -1);
            String reason = String.format(
                "(possible column names are: %s)",
                StringUtils.join(possibleColumnNames, ", "));
            ctx.setError(
                ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg(
                    expr.getChild(0), reason), expr);
            LOG.debug(ErrorMsg.INVALID_TABLE_OR_COLUMN.toString()
                + ":" + input.toString());
            return null;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

  }
 
  private void setupSelectListInputInfo() throws WindowingException
  {
    selectListInputRowResolver = HiveUtils.getRowResolver("npathInput", selectListInputOI);
    selectListInputTypeCheckCtx = new TypeCheckCtx(selectListInputRowResolver);
    selectListInputTypeCheckCtx.setUnparseTranslator(null);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    public Object process(Node nd, Stack<Node> stack,
        NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException
    {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;

      ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null)
      {
        // Here we know nd represents a group by expression.

        // During the DFS traversal of the AST, a descendant of nd
        // likely set an
        // error because a sub-tree of nd is unlikely to also be a group
        // by
        // expression. For example, in a query such as
        // SELECT *concat(key)* FROM src GROUP BY concat(key), 'key'
        // will be
        // processed before 'concat(key)' and since 'key' is not a group
        // by
        // expression, an error will be set in ctx by
        // ColumnExprProcessor.

        // We can clear the global error when we see that it was set in
        // a
        // descendant node of a group by expression because
        // processGByExpr() returns a ExprNodeDesc that effectively
        // ignores
        // its children. Although the error can be set multiple times by
        // descendant nodes, DFS traversal ensures that the error only
        // needs to
        // be cleared once. Also, for a case like
        // SELECT concat(value, concat(value))... the logic still works
        // as the
        // error is only set with the first 'value'; all node pocessors
        // quit
        // early if the global error is set.

        if (isDescendant(nd, ctx.getErrorSrcNode()))
        {
          ctx.setError(null, null);
        }
        return desc;
      }

      if (ctx.getError() != null)
      {
        return null;
      }

      ASTNode expr = (ASTNode) nd;

      // If the first child is a TOK_TABLE_OR_COL, and nodeOutput[0] is
      // NULL,
      // and the operator is a DOT, then it's a table column reference.
      if (expr.getType() == Windowing2Parser.DOT
          && expr.getChild(0).getType() == Windowing2Parser.TABLEORCOL
          && nodeOutputs[0] == null)
      {

        RowResolver input = ctx.getInputRR();
        String tableAlias = BaseSemanticAnalyzer
            .unescapeIdentifier(expr.getChild(0).getChild(0)
                .getText());
        // NOTE: tableAlias must be a valid non-ambiguous table alias,
        // because we've checked that in TOK_TABLE_OR_COL's process
        // method.
        ColumnInfo colInfo = input.get(tableAlias,
            ((ExprNodeConstantDesc) nodeOutputs[1]).getValue()
                .toString());

        if (colInfo == null)
        {
          ctx.setError(
              ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)),
              expr);
          return null;
        }
        return new ExprNodeColumnDesc(colInfo.getType(),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    // a constant, we convert that into an exprNodeConstantDesc. For others
    // we
    // just
    // build the exprNodeFuncDesc with recursively built children.
    ASTNode expr = (ASTNode) nd;
    TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
    RowResolver input = ctx.getInputRR();
    ExprNodeDesc desc = null;

    // If the current subExpression is pre-calculated, as in Group-By etc.
    ColumnInfo colInfo = input.getExpression(expr);
    if (colInfo != null)
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    public Object process(Node nd, Stack<Node> stack,
        NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException
    {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
      if (ctx.getError() != null)
      {
        return null;
      }

      ExprNodeDesc desc = WindowingTypeCheckProcFactory.processGByExpr(nd, procCtx);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    public Object process(Node nd, Stack<Node> stack,
        NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException
    {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
      if (ctx.getError() != null)
      {
        return null;
      }

      ExprNodeDesc desc = WindowingTypeCheckProcFactory
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    public Object process(Node nd, Stack<Node> stack,
        NodeProcessorCtx procCtx, Object... nodeOutputs)
        throws SemanticException
    {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
      if (ctx.getError() != null)
      {
        return null;
      }

      ExprNodeDesc desc = WindowingTypeCheckProcFactory
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

    {
      this.inpDef = input;
      this.forMapPhase = mapOI != null;
      OI = forMapPhase ? mapOI : inpDef.getOI();
      rr = HiveUtils.getRowResolver(inpDef.getAlias(), OI);
      tCtx = new TypeCheckCtx(rr);
      tCtx.setUnparseTranslator(null);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.parse.TypeCheckCtx

          resultExprInfo.resultExprNames, selectListExprOIs);
    }

    private void setupSelectListInputInfo() throws SemanticException
    {
      selectListInputTypeCheckCtx = new TypeCheckCtx(selectListInputRowResolver);
      selectListInputTypeCheckCtx.setUnparseTranslator(null);
      /*
       * create SelectListOI
       */
      selectListInputOI = (StructObjectInspector)
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.