Package org.apache.hadoop.hive.ql.parse

Examples of org.apache.hadoop.hive.ql.parse.ASTNode


      HashSet<WriteEntity> outputs) throws SemanticException {
    List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
    PrivilegeObjectDesc hiveObj = null;
    if (ast.getChildCount() > 2) {
      ASTNode astChild = (ASTNode) ast.getChild(2);
      hiveObj = analyzePrivilegeObject(astChild, outputs);
    }
    RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj);
    return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc), conf);
  }
View Full Code Here


    PrincipalDesc principalDesc = null;
    PrivilegeObjectDesc privHiveObj = null;
    List<String> cols = null;

    ASTNode param = null;
    if (ast.getChildCount() > 0) {
      param = (ASTNode) ast.getChild(0);
      principalDesc = AuthorizationParseUtils.getPrincipalDesc(param);
      if (principalDesc != null) {
        param = (ASTNode) ast.getChild(1)// shift one
      }
    }

    if (param != null) {
      if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
        privHiveObj = new PrivilegeObjectDesc();
      } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
        privHiveObj = new PrivilegeObjectDesc();
        //set object name
        String text = param.getChild(0).getText();
        privHiveObj.setObject(BaseSemanticAnalyzer.unescapeIdentifier(text));
        //set object type
        ASTNode objTypeNode = (ASTNode) param.getChild(1);
        privHiveObj.setTable(objTypeNode.getToken().getType() == HiveParser.TOK_TABLE_TYPE);

        //set col and partition spec if specified
        for (int i = 2; i < param.getChildCount(); i++) {
          ASTNode partOrCol = (ASTNode) param.getChild(i);
          if (partOrCol.getType() == HiveParser.TOK_PARTSPEC) {
            privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(partOrCol));
          } else if (partOrCol.getType() == HiveParser.TOK_TABCOLNAME) {
            cols = BaseSemanticAnalyzer.getColumnNames(partOrCol);
          } else {
            throw new SemanticException("Invalid token type " + partOrCol.getType());
          }
        }
      }
    }
View Full Code Here

    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef(
        (ASTNode) ast.getChild(0));

    //check if admin option has been specified
    int rolesStartPos = 1;
    ASTNode wAdminOption = (ASTNode) ast.getChild(1);
    boolean isAdmin = false;
    if(wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION){
      rolesStartPos = 2; //start reading role names from next postion
      isAdmin = true;
    }

    List<String> roles = new ArrayList<String>();
View Full Code Here

    PrivilegeObjectDesc subject = new PrivilegeObjectDesc();
    //set object identifier
    subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()));
    //set object type
    ASTNode objTypeNode =  (ASTNode) ast.getChild(1);
    subject.setTable(objTypeNode.getToken().getType() == HiveParser.TOK_TABLE_TYPE);
    if (ast.getChildCount() == 3) {
      //if partition spec node is present, set partition spec
      ASTNode partSpecNode = (ASTNode) ast.getChild(2);
      subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(partSpecNode));
    }

    if (subject.getTable()) {
      Table tbl = getTable(SessionState.get().getCurrentDatabase(), subject.getObject());
View Full Code Here

  private List<PrivilegeDesc> analyzePrivilegeListDef(ASTNode node)
      throws SemanticException {
    List<PrivilegeDesc> ret = new ArrayList<PrivilegeDesc>();
    for (int i = 0; i < node.getChildCount(); i++) {
      ASTNode privilegeDef = (ASTNode) node.getChild(i);
      ASTNode privilegeType = (ASTNode) privilegeDef.getChild(0);
      Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeType.getType());

      if (privObj == null) {
        throw new SemanticException("Undefined privilege " + PrivilegeType.
            getPrivTypeByToken(privilegeType.getType()));
      }
      List<String> cols = null;
      if (privilegeDef.getChildCount() > 1) {
        cols = BaseSemanticAnalyzer.getColumnNames((ASTNode) privilegeDef.getChild(1));
      }
View Full Code Here

    try {
      command = new VariableSubstitution().substitute(conf,command);
      ctx = new Context(conf);

      ParseDriver pd = new ParseDriver();
      ASTNode tree = pd.parse(command, ctx);
      tree = ParseUtils.findRootNonNullToken(tree);

      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
      List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks();
View Full Code Here

      ctx.setCmd(command);
      ctx.setHDFSCleanup(true);

      perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARSE);
      ParseDriver pd = new ParseDriver();
      ASTNode tree = pd.parse(command, ctx);
      tree = ParseUtils.findRootNonNullToken(tree);
      perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARSE);

      // Initialize the transaction manager.  This must be done before analyze is called.  Also
      // record the valid transactions for this query.  We have to do this at compile time
      // because we use the information in planning the query.  Also,
      // we want to record it at this point so that users see data valid at the point that they
      // submit the query.
      SessionState.get().initTxnMgr(conf);
      recordValidTxns();

      perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ANALYZE);
      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
      List<HiveSemanticAnalyzerHook> saHooks =
          getHooks(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK,
              HiveSemanticAnalyzerHook.class);

      // Do semantic analysis and plan generation
      if (saHooks != null) {
        HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
        hookCtx.setConf(conf);
        hookCtx.setUserName(userName);
        hookCtx.setIpAddress(SessionState.get().getUserIpAddress());
        hookCtx.setCommand(command);
        for (HiveSemanticAnalyzerHook hook : saHooks) {
          tree = hook.preAnalyze(hookCtx, tree);
        }
        sem.analyze(tree, ctx);
        hookCtx.update(sem);
        for (HiveSemanticAnalyzerHook hook : saHooks) {
          hook.postAnalyze(hookCtx, sem.getRootTasks());
        }
      } else {
        sem.analyze(tree, ctx);
      }
      // Record any ACID compliant FileSinkOperators we saw so we can add our transaction ID to
      // them later.
      acidSinks = sem.getAcidFileSinks();

      LOG.info("Semantic Analysis Completed");

      // validate the plan
      sem.validate();
      perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ANALYZE);

      plan = new QueryPlan(command, sem, perfLogger.getStartTime(PerfLogger.DRIVER_RUN), queryId,
        SessionState.get().getCommandType());

      String queryStr = plan.getQueryStr();
      conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);

      conf.set("mapreduce.workflow.id", "hive_" + queryId);
      conf.set("mapreduce.workflow.name", queryStr);

      // initialize FetchTask right here
      if (plan.getFetchTask() != null) {
        plan.getFetchTask().initialize(conf, plan, null);
      }

      // get the output schema
      schema = getSchema(sem, conf);

      //do the authorization check
      if (!sem.skipAuthorization() &&
          HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {

        try {
          perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
          doAuthorization(sem, command);
        } catch (AuthorizationException authExp) {
          console.printError("Authorization failed:" + authExp.getMessage()
              + ". Use SHOW GRANT to get more details.");
          errorMessage = authExp.getMessage();
          SQLState = "42000";
          return 403;
        } finally {
          perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
        }
      }

      if (conf.getBoolVar(ConfVars.HIVE_LOG_EXPLAIN_OUTPUT)) {
        String explainOutput = getExplainOutput(sem, plan, tree.dump());
        if (explainOutput != null) {
          LOG.info("EXPLAIN output for queryid " + queryId + " : "
              + explainOutput);
        }
      }
View Full Code Here

      .getChild(0));
    boolean likeTable = false;
    StorageFormat format = new StorageFormat(context.getConf());

    for (int num = 1; num < numCh; num++) {
      ASTNode child = (ASTNode) ast.getChild(num);
      if (format.fillStorageFormat(child)) {
        if (org.apache.commons.lang.StringUtils
            .isNotEmpty(format.getStorageHandler())) {
            return ast;
        }
        continue;
      }
      switch (child.getToken().getType()) {

      case HiveParser.TOK_QUERY: // CTAS
        throw new SemanticException(
          "Operation not supported. Create table as " +
            "Select is not a valid operation.");

      case HiveParser.TOK_ALTERTABLE_BUCKETS:
        break;

      case HiveParser.TOK_LIKETABLE:
        likeTable = true;
        break;

      case HiveParser.TOK_IFNOTEXISTS:
        try {
          List<String> tables = db.getTablesByPattern(tableName);
          if (tables != null && tables.size() > 0) { // table
            // exists
            return ast;
          }
        } catch (HiveException e) {
          throw new SemanticException(e);
        }
        break;

      case HiveParser.TOK_TABLEPARTCOLS:
        List<FieldSchema> partCols = BaseSemanticAnalyzer
          .getColumns((ASTNode) child.getChild(0), false);
        for (FieldSchema fs : partCols) {
          if (!fs.getType().equalsIgnoreCase("string")) {
            throw new SemanticException(
              "Operation not supported. HCatalog only " +
                "supports partition columns of type string. "
View Full Code Here

      JoinTypeCheckCtx ctx = (JoinTypeCheckCtx) procCtx;
      if (ctx.getError() != null) {
        return null;
      }

      ASTNode expr = (ASTNode) nd;
      ASTNode parent = stack.size() > 1 ? (ASTNode) stack.get(stack.size() - 2) : null;

      if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) {
        ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr), expr);
        return null;
      }

      assert (expr.getChildCount() == 1);
      String tableOrCol = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText());

      boolean qualifiedAccess = (parent != null && parent.getType() == HiveParser.DOT);

      ColumnInfo colInfo = null;
      if (!qualifiedAccess) {
        colInfo = getColInfo(ctx, null, tableOrCol, expr);
        // It's a column.
View Full Code Here

    Context ctx;
    ParseContext subPCtx = null;
    try {
      ctx = new Context(conf);
      ParseDriver pd = new ParseDriver();
      ASTNode tree = pd.parse(command, ctx);
      tree = ParseUtils.findRootNonNullToken(tree);

      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
      assert(sem instanceof SemanticAnalyzer);
      doSemanticAnalysis((SemanticAnalyzer) sem, tree, ctx);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.parse.ASTNode

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.