Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.DDLWork


      // Clear the output for CTAS since we don't need the output from the
      // mapredWork, the
      // DDLWork at the tail of the chain will have the output
      getOutputs().clear();

      Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(
          getInputs(), getOutputs(), crtTblDesc), conf);

      // find all leaf tasks and make the DDLTask as a dependent task of all of
      // them
      HashSet<Task<? extends Serializable>> leaves = new HashSet<Task<? extends Serializable>>();
View Full Code Here


      validateCreateTable(crtTblDesc);
      // outputs is empty, which means this create table happens in the current
      // database.
      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          crtTblDesc), conf));
      break;

    case CTLT: // create table like <tbl_name>
      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName,
          isExt, location, ifNotExists, likeTableName);
      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          crtTblLikeDesc), conf));
      break;

    case CTAS: // create table as select
View Full Code Here

    }

    createVwDesc = new CreateViewDesc(
      tableName, cols, comment, tblProps, ifNotExists);
    unparseTranslator.enable();
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        createVwDesc), conf));
    return selectStmt;
  }
View Full Code Here

    Map<String,String> partSpec = partSpecs.get(0);
      AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(
          db.getCurrentDatabase(), tblName, partSpec,
          (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          archiveDesc), conf));

  }
View Full Code Here

      }
    }
    List<Map<String, String>> specs = getPartitionSpecs(ast);
    MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile(),
        repair);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        checkDesc), conf));
  }
View Full Code Here

        && SessionState.get().getAuthenticator() != null) {
      roleOwnerName = SessionState.get().getAuthenticator().getUserName();
    }
    GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(grant,
        roles, principalDesc, roleOwnerName, PrincipalType.USER, true);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        grantRevokeRoleDDL), conf));
  }
View Full Code Here

              + cols.toString());
    }
   
    ShowGrantDesc showGrant = new ShowGrantDesc(ctx.getResFile().toString(),
        principalDesc, privHiveObj, cols);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        showGrant), conf));
  }
View Full Code Here

      userName = SessionState.get().getAuthenticator().getUserName();
    }

    GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
        principalDesc, userName, PrincipalType.USER, grantOption);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        grantDesc), conf));
  }
View Full Code Here

      ASTNode astChild = (ASTNode) ast.getChild(2);
      hiveObj = analyzePrivilegeObject(astChild);
    }
   
    RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        revokeDesc), conf));
  }
View Full Code Here

  private void analyzeCreateRole(ASTNode ast) {
    String roleName = unescapeIdentifier(ast.getChild(0).getText());
    RoleDDLDesc createRoleDesc = new RoleDDLDesc(roleName,
        RoleDDLDesc.RoleOperation.CREATE_ROLE);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        createRoleDesc), conf));
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.DDLWork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.