Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc


        e.printStackTrace();
      }
    }

    // Handle different types of CREATE TABLE command
    CreateTableDesc crtTblDesc = null;
    switch (command_type) {

    case CREATE_TABLE: // REGULAR CREATE TABLE DDL
      tblProps = addDefaultProperties(tblProps);

      crtTblDesc = new CreateTableDesc(tableName, isExt, cols, partCols,
          bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape,
          rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment,
          storageFormat.inputFormat, storageFormat.outputFormat, location, shared.serde,
          storageFormat.storageHandler, shared.serdeProps, tblProps, ifNotExists);

      validateCreateTable(crtTblDesc);
      // outputs is empty, which means this create table happens in the current
      // database.
      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          crtTblDesc), conf));
      break;

    case CTLT: // create table like <tbl_name>
      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt,
          storageFormat.inputFormat, storageFormat.outputFormat, location,
          shared.serde, shared.serdeProps, ifNotExists, likeTableName);
      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          crtTblLikeDesc), conf));
      break;

    case CTAS: // create table as select

      // Verify that the table does not already exist
      String databaseName;
      try {
        Table dumpTable = db.newTable(tableName);
        databaseName = dumpTable.getDbName();
        if (null == db.getDatabase(dumpTable.getDbName()) ) {
          throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dumpTable.getDbName()));
        }
        if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
          throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
        }
      } catch (HiveException e) {
        throw new SemanticException(e);
      }

      tblProps = addDefaultProperties(tblProps);

      crtTblDesc = new CreateTableDesc(databaseName, tableName, isExt, cols, partCols,
          bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape,
          rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.inputFormat,
          storageFormat.outputFormat, location, shared.serde, storageFormat.storageHandler, shared.serdeProps,
          tblProps, ifNotExists);
      qb.setTableDesc(crtTblDesc);
View Full Code Here


      // CTAS case: the file output format and serde are defined by the create
      // table command
      // rather than taking the default value
      List<FieldSchema> field_schemas = null;
      CreateTableDesc tblDesc = qb.getTableDesc();
      if (tblDesc != null) {
        field_schemas = new ArrayList<FieldSchema>();
      }

      boolean first = true;
      for (ColumnInfo colInfo : colInfos) {
        String[] nm = inputRR.reverseLookup(colInfo.getInternalName());

        if (nm[1] != null) { // non-null column alias
          colInfo.setAlias(nm[1]);
        }

        if (field_schemas != null) {
          FieldSchema col = new FieldSchema();
          if (nm[1] != null) {
            col.setName(unescapeIdentifier(colInfo.getAlias()).toLowerCase()); // remove ``
          } else {
            col.setName(colInfo.getInternalName());
          }
          col.setType(colInfo.getType().getTypeName());
          field_schemas.add(col);
        }

        if (!first) {
          cols = cols.concat(",");
          colTypes = colTypes.concat(":");
        }

        first = false;
        cols = cols.concat(colInfo.getInternalName());

        // Replace VOID type with string when the output is a temp table or
        // local files.
        // A VOID type can be generated under the query:
        //
        // select NULL from tt;
        // or
        // insert overwrite local directory "abc" select NULL from tt;
        //
        // where there is no column type to which the NULL value should be
        // converted.
        //
        String tName = colInfo.getType().getTypeName();
        if (tName.equals(Constants.VOID_TYPE_NAME)) {
          colTypes = colTypes.concat(Constants.STRING_TYPE_NAME);
        } else {
          colTypes = colTypes.concat(tName);
        }
      }

      // update the create table descriptor with the resulting schema.
      if (tblDesc != null) {
        tblDesc.setCols(new ArrayList<FieldSchema>(field_schemas));
      }

      if (!ctx.isMRTmpFileURI(destStr)) {
        idToTableNameMap.put(String.valueOf(destTableId), destStr);
        currentTableId = destTableId;
View Full Code Here

    decideExecMode(rootTasks, ctx, globalLimitCtx);

    if (qb.isCTAS()) {
      // generate a DDL task and make it a dependent task of the leaf
      CreateTableDesc crtTblDesc = qb.getTableDesc();

      validateCreateTable(crtTblDesc);

      // Clear the output for CTAS since we don't need the output from the
      // mapredWork, the
View Full Code Here

        e.printStackTrace();
      }
    }

    // Handle different types of CREATE TABLE command
    CreateTableDesc crtTblDesc = null;
    switch (command_type) {

    case CREATE_TABLE: // REGULAR CREATE TABLE DDL
      tblProps = addDefaultProperties(tblProps);

      crtTblDesc = new CreateTableDesc(tableName, isExt, cols, partCols,
          bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape,
          rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment,
          storageFormat.inputFormat, storageFormat.outputFormat, location, shared.serde,
          storageFormat.storageHandler, shared.serdeProps, tblProps, ifNotExists);

      validateCreateTable(crtTblDesc);
      // outputs is empty, which means this create table happens in the current
      // database.
      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          crtTblDesc), conf));
      break;

    case CTLT: // create table like <tbl_name>
      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt,
          storageFormat.inputFormat, storageFormat.outputFormat, location,
          shared.serde, shared.serdeProps, ifNotExists, likeTableName);
      SessionState.get().setCommandType(HiveOperation.CREATETABLE);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
          crtTblLikeDesc), conf));
      break;

    case CTAS: // create table as select

      // Verify that the table does not already exist
      String databaseName;
      try {
        Table dumpTable = db.newTable(tableName);
        databaseName = dumpTable.getDbName();
        if (null == db.getDatabase(dumpTable.getDbName()) ) {
          throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dumpTable.getDbName()));
        }
        if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
          throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
        }
      } catch (HiveException e) {
        throw new SemanticException(e);
      }

      tblProps = addDefaultProperties(tblProps);

      crtTblDesc = new CreateTableDesc(databaseName, tableName, isExt, cols, partCols,
          bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape,
          rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.inputFormat,
          storageFormat.outputFormat, location, shared.serde, storageFormat.storageHandler, shared.serdeProps,
          tblProps, ifNotExists);
      qb.setTableDesc(crtTblDesc);
View Full Code Here

        if (rootTasks.size() == 0) {
            // There will be no DDL task created in case if its CREATE TABLE IF
            // NOT EXISTS
            return;
        }
        CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1))
                .getWork().getCreateTblDesc();
        if (desc == null) {
          // Desc will be null if its CREATE TABLE LIKE. Desc will be
          // contained in CreateTableLikeDesc. Currently, HCat disallows CTLT in
          // pre-hook. So, desc can never be null.
          return;
        }
        Map<String, String> tblProps = desc.getTblProps();
        if (tblProps == null) {
            // tblProps will be null if user didnt use tblprops in his CREATE
            // TABLE cmd.
            tblProps = new HashMap<String, String>();

        }

        // first check if we will allow the user to create table.
        String storageHandler = desc.getStorageHandler();
        if (StringUtils.isEmpty(storageHandler)) {
        } else {
            try {
                HCatStorageHandler storageHandlerInst = HCatUtil
                        .getStorageHandler(context.getConf(),
                                                     desc.getStorageHandler(),
                                                     desc.getSerName(),
                                                     desc.getInputFormat(),
                                                     desc.getOutputFormat());
                //Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if 
                //StorageDelegationAuthorizationProvider is used.
            } catch (IOException e) {
                throw new SemanticException(e);
            }
        }

        if (desc != null) {
          try {
            Table table = context.getHive().newTable(desc.getTableName());
            if (desc.getLocation() != null) {
              table.setDataLocation(new Path(desc.getLocation()).toUri());
            }
            if (desc.getStorageHandler() != null) {
              table.setProperty(
                org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE,
                desc.getStorageHandler());
            }
            for (Map.Entry<String, String> prop : tblProps.entrySet()) {
              table.setProperty(prop.getKey(), prop.getValue());
            }
            for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
              table.setSerdeParam(prop.getKey(), prop.getValue());
            }
            //TODO: set other Table properties as needed
 
            //authorize against the table operation so that location permissions can be checked if any
           
            if (HiveConf.getBoolVar(context.getConf(),
                HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
              authorize(table, Privilege.CREATE);
            }
          } catch (HiveException ex) {
            throw new SemanticException(ex);
          }
        }

        desc.setTblProps(tblProps);
        context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
    }
View Full Code Here

    if (rootTasks.size() == 0) {
      // There will be no DDL task created in case if its CREATE TABLE IF
      // NOT EXISTS
      return;
    }
    CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1))
      .getWork().getCreateTblDesc();
    if (desc == null) {
      // Desc will be null if its CREATE TABLE LIKE. Desc will be
      // contained in CreateTableLikeDesc. Currently, HCat disallows CTLT in
      // pre-hook. So, desc can never be null.
      return;
    }
    Map<String, String> tblProps = desc.getTblProps();
    if (tblProps == null) {
      // tblProps will be null if user didnt use tblprops in his CREATE
      // TABLE cmd.
      tblProps = new HashMap<String, String>();

    }

    // first check if we will allow the user to create table.
    String storageHandler = desc.getStorageHandler();
    if (StringUtils.isEmpty(storageHandler)) {
    } else {
      try {
        HiveStorageHandler storageHandlerInst = HCatUtil
          .getStorageHandler(context.getConf(),
            desc.getStorageHandler(),
            desc.getSerName(),
            desc.getInputFormat(),
            desc.getOutputFormat());
        //Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if
        //StorageDelegationAuthorizationProvider is used.
      } catch (IOException e) {
        throw new SemanticException(e);
      }
    }

    if (desc != null) {
      try {
        Table table = context.getHive().newTable(desc.getTableName());
        if (desc.getLocation() != null) {
          table.setDataLocation(new Path(desc.getLocation()));
        }
        if (desc.getStorageHandler() != null) {
          table.setProperty(
            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
            desc.getStorageHandler());
        }
        for (Map.Entry<String, String> prop : tblProps.entrySet()) {
          table.setProperty(prop.getKey(), prop.getValue());
        }
        for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
          table.setSerdeParam(prop.getKey(), prop.getValue());
        }
        //TODO: set other Table properties as needed

        //authorize against the table operation so that location permissions can be checked if any

        if (HiveConf.getBoolVar(context.getConf(),
          HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
          authorize(table, Privilege.CREATE);
        }
      } catch (HiveException ex) {
        throw new SemanticException(ex);
      }
    }

    desc.setTblProps(tblProps);
    context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
  }
View Full Code Here

    if (rootTasks.size() == 0) {
      // There will be no DDL task created in case if its CREATE TABLE IF
      // NOT EXISTS
      return;
    }
    CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1))
      .getWork().getCreateTblDesc();
    if (desc == null) {
      // Desc will be null if its CREATE TABLE LIKE. Desc will be
      // contained in CreateTableLikeDesc. Currently, HCat disallows CTLT in
      // pre-hook. So, desc can never be null.
      return;
    }
    Map<String, String> tblProps = desc.getTblProps();
    if (tblProps == null) {
      // tblProps will be null if user didnt use tblprops in his CREATE
      // TABLE cmd.
      tblProps = new HashMap<String, String>();

    }

    // first check if we will allow the user to create table.
    String storageHandler = desc.getStorageHandler();
    if (StringUtils.isEmpty(storageHandler)) {
    } else {
      try {
        HCatStorageHandler storageHandlerInst = HCatUtil
          .getStorageHandler(context.getConf(),
            desc.getStorageHandler(),
            desc.getSerName(),
            desc.getInputFormat(),
            desc.getOutputFormat());
        //Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if
        //StorageDelegationAuthorizationProvider is used.
      } catch (IOException e) {
        throw new SemanticException(e);
      }
    }

    if (desc != null) {
      try {
        Table table = context.getHive().newTable(desc.getTableName());
        if (desc.getLocation() != null) {
          table.setDataLocation(new Path(desc.getLocation()));
        }
        if (desc.getStorageHandler() != null) {
          table.setProperty(
            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
            desc.getStorageHandler());
        }
        for (Map.Entry<String, String> prop : tblProps.entrySet()) {
          table.setProperty(prop.getKey(), prop.getValue());
        }
        for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
          table.setSerdeParam(prop.getKey(), prop.getValue());
        }
        //TODO: set other Table properties as needed

        //authorize against the table operation so that location permissions can be checked if any

        if (HiveConf.getBoolVar(context.getConf(),
          HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
          authorize(table, Privilege.CREATE);
        }
      } catch (HiveException ex) {
        throw new SemanticException(ex);
      }
    }

    desc.setTblProps(tblProps);
    context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
  }
View Full Code Here

    count = 0;
    if (!isCreateTable) {
      return;
    }

    CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1)).getWork()
        .getCreateTblDesc();
    Map<String, String> tblProps = desc.getTblProps();
    if (tblProps == null) {
      tblProps = new HashMap<String, String>();
    }
    tblProps.put("createdBy", DummyCreateTableHook.class.getName());
    tblProps.put("Message", "Hive rocks!! Count: " + myCount);
View Full Code Here

  }

  @Override
  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
    CreateTableDesc desc = ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getCreateTblDesc();
    Map<String,String> tblProps = desc.getTblProps();
    if(tblProps == null) {
      tblProps = new HashMap<String, String>();
    }
    tblProps.put("createdBy", DummyCreateTableHook.class.getName());
    tblProps.put("Message", "Open Source rocks!!");
    desc.setTblProps(tblProps);
  }
View Full Code Here

      AlterDatabaseDesc alterDatabaseDesc = work.getAlterDatabaseDesc();
      if (alterDatabaseDesc != null) {
        return alterDatabase(alterDatabaseDesc);
      }

      CreateTableDesc crtTbl = work.getCreateTblDesc();
      if (crtTbl != null) {
        return createTable(db, crtTbl);
      }

      CreateIndexDesc crtIndex = work.getCreateIndexDesc();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.createTableDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.