Examples of ReadEntity


Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    // the write entity
    // contains the new table. This is needed for rename - both the old and the
    // new table names are
    // passed
    if(part != null) {
      work.getInputs().add(new ReadEntity(part));
      work.getOutputs().add(new WriteEntity(part));
    }
    else if (allPartitions != null ){
      for (Partition tmpPart: allPartitions) {
        work.getInputs().add(new ReadEntity(tmpPart));
        work.getOutputs().add(new WriteEntity(tmpPart));
      }
    }
    else {
      work.getInputs().add(new ReadEntity(oldTbl));
      work.getOutputs().add(new WriteEntity(tbl));
    }
    return 0;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    // configured not to fail silently
    boolean throwException =
        !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
    Table tab = getTable(tableName, throwException);
    if (tab != null) {
      inputs.add(new ReadEntity(tab));
      outputs.add(new WriteEntity(tab));
    }

    DropTableDesc dropTblDesc = new DropTableDesc(
        tableName, expectView, ifExists, true);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

  private void addInputsOutputsAlterTable(String tableName, Map<String, String> partSpec,
      AlterTableDesc desc) throws SemanticException {
    Table tab = getTable(tableName, true);
    if (partSpec == null || partSpec.isEmpty()) {
      inputs.add(new ReadEntity(tab));
      outputs.add(new WriteEntity(tab));
    }
    else {
      inputs.add(new ReadEntity(tab));
      if (desc == null || desc.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) {
        Partition part = getPartition(tab, partSpec, true);
        outputs.add(new WriteEntity(part));
      }
      else {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    Table tab = getTable(tableName);
    if (tab.getTableType() == org.apache.hadoop.hive.metastore.TableType.INDEX_TABLE) {
      throw new SemanticException(ErrorMsg.SHOW_CREATETABLE_INDEX.getMsg(tableName
          + " has table type INDEX_TABLE"));
    }
    inputs.add(new ReadEntity(tab));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        showCreateTblDesc), conf));
    setFetchTask(createFetchTask(showCreateTblDesc.getSchema()));
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    default:
      break;
    }

    Table tab = getTable(dbName, tableName, true);
    inputs.add(new ReadEntity(tab));

    showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), dbName, tableName);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
        showColumnsDesc), conf));
    setFetchTask(createFetchTask(showColumnsDesc.getSchema()));
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    if (newPartSpec == null) {
      throw new SemanticException("RENAME PARTITION Missing Destination" + ast);
    }
    Table tab = getTable(tblName, true);
    validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION);
    inputs.add(new ReadEntity(tab));

    List<Map<String, String>> partSpecs = new ArrayList<Map<String, String>>();
    partSpecs.add(oldPartSpec);
    partSpecs.add(newPartSpec);
    addTablePartsOutputs(tblName, partSpecs);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    Table tab = getTable(tblName, true);
    if (tab.getBucketCols() == null || tab.getBucketCols().isEmpty()) {
      throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg());
    }
    validateAlterTableType(tab, AlterTableTypes.ALTERBUCKETNUM);
    inputs.add(new ReadEntity(tab));

    int bucketNum = Integer.parseInt(ast.getChild(0).getText());
    AlterTableDesc alterBucketNum = new AlterTableDesc(tblName, partSpec, bucketNum);

    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
    // get table metadata
    List<PartitionSpec> partSpecs = getFullPartitionSpecs(ast);
    Table tab = getTable(tblName, true);
    validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
    inputs.add(new ReadEntity(tab));

    // Find out if all partition columns are strings. This is needed for JDO
    boolean stringPartitionColumns = true;
    List<FieldSchema> partCols = tab.getPartCols();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    Table tab = null;

    // check if table exists.
    try {
      tab = db.getTable(SessionState.get().getCurrentDatabase(), tblName, true);
      inputs.add(new ReadEntity(tab));
    } catch (HiveException e) {
      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
    }

    // validate the DDL is a valid operation on the table.
View Full Code Here

Examples of org.apache.hadoop.hive.ql.hooks.ReadEntity

    boolean ifNotExists = ast.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS;

    Table tab = getTable(tblName, true);
    boolean isView = tab.isView();
    validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView);
    inputs.add(new ReadEntity(tab));

    List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();

    int numCh = ast.getChildCount();
    int start = ifNotExists ? 2 : 1;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.