Examples of CreateTableDesc


Examples of org.apache.hadoop.hive.ql.plan.CreateTableDesc

            }
          }
          qb.getMetaData().setDestForAlias(name, fname,
              (ast.getToken().getType() == HiveParser.TOK_DIR));

          CreateTableDesc localDirectoryDesc = new CreateTableDesc();
          boolean localDirectoryDescIsSet = false;
          int numCh = ast.getChildCount();
          for (int num = 1; num < numCh ; num++){
            ASTNode child = (ASTNode) ast.getChild(num);
            if (ast.getChild(num) != null){
              switch (child.getToken().getType()) {
                case HiveParser.TOK_TABLEROWFORMAT:
                  rowFormatParams.analyzeRowFormat(shared, child);
                  localDirectoryDesc.setFieldDelim(rowFormatParams.fieldDelim);
                  localDirectoryDesc.setLineDelim(rowFormatParams.lineDelim);
                  localDirectoryDesc.setCollItemDelim(rowFormatParams.collItemDelim);
                  localDirectoryDesc.setMapKeyDelim(rowFormatParams.mapKeyDelim);
                  localDirectoryDesc.setFieldEscape(rowFormatParams.fieldEscape);
                  localDirectoryDesc.setNullFormat(rowFormatParams.nullFormat);
                  localDirectoryDescIsSet=true;
                  break;
                case HiveParser.TOK_TABLESERIALIZER:
                  ASTNode serdeChild = (ASTNode) child.getChild(0);
                  shared.serde = unescapeSQLString(serdeChild.getChild(0).getText());
                  localDirectoryDesc.setSerName(shared.serde);
                  localDirectoryDescIsSet=true;
                  break;
                case HiveParser.TOK_TBLSEQUENCEFILE:
                case HiveParser.TOK_TBLTEXTFILE:
                case HiveParser.TOK_TBLRCFILE:
                case HiveParser.TOK_TBLORCFILE:
                case HiveParser.TOK_TABLEFILEFORMAT:
                  storageFormat.fillStorageFormat(child, shared);
                  localDirectoryDesc.setOutputFormat(storageFormat.outputFormat);
                  localDirectoryDesc.setSerName(shared.serde);
                  localDirectoryDescIsSet=true;
                  break;
              }
            }
          }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

          break;
        default: assert false;
      }
    }

    createTableDesc crtTblDesc =
      new createTableDesc(tableName, isExt, cols, partCols, bucketCols,
                          sortCols, numBuckets,
                          fieldDelim, collItemDelim, mapKeyDelim, lineDelim,
                          comment, inputFormat, outputFormat, location, serde,
                          mapProp, ifNotExists);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

    FileSystem fs;
    try {
      db = Hive.get(conf);
      fs = FileSystem.get(conf);

      createTableDesc crtTbl = work.getCreateTblDesc();
      if (crtTbl != null) {
        return createTable(db, crtTbl);
      }

      dropTableDesc dropTbl = work.getDropTblDesc();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

    // Create the db
    Hive db;
    try {
      db = Hive.get(conf);

      createTableDesc crtTbl = work.getCreateTblDesc();
      if (crtTbl != null) {
        return createTable(db, crtTbl);
      }

      createTableLikeDesc crtTblLike = work.getCreateTblLikeDesc();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

        Vector<ColumnInfo> colInfos = inputRR.getColumnInfos();

        // CTAS case: the file output format and serde are defined by the create table command
        // rather than taking the default value
        List<FieldSchema> field_schemas = null;
        createTableDesc tblDesc = qb.getTableDesc();
        if ( tblDesc != null )
          field_schemas = new ArrayList<FieldSchema>();

        boolean first = true;
        for (ColumnInfo colInfo:colInfos) {
          String[] nm = inputRR.reverseLookup(colInfo.getInternalName());

          if ( nm[1] != null ) { // non-null column alias
            colInfo.setAlias(nm[1]);
          }

          if ( field_schemas != null ) {
            FieldSchema col = new FieldSchema();
            if ( nm[1] != null ) {
              col.setName(colInfo.getAlias());
            } else {
              col.setName(colInfo.getInternalName());
            }
            col.setType(colInfo.getType().getTypeName());
            field_schemas.add(col);
          }

          if (!first) {
            cols = cols.concat(",");
            colTypes = colTypes.concat(":");
          }

          first = false;
          cols = cols.concat(colInfo.getInternalName());

          // Replace VOID type with string when the output is a temp table or local files.
          // A VOID type can be generated under the query:
          //
          //     select NULL from tt;
          // or
          //     insert overwrite local directory "abc" select NULL from tt;
          //
          // where there is no column type to which the NULL value should be converted.
          //
          String tName = colInfo.getType().getTypeName();
          if ( tName.equals(Constants.VOID_TYPE_NAME) )
            colTypes = colTypes.concat(Constants.STRING_TYPE_NAME);
          else
            colTypes = colTypes.concat(tName);
        }

        // update the create table descriptor with the resulting schema.
        if ( tblDesc != null )
          tblDesc.setCols(field_schemas);

        if (!ctx.isMRTmpFileURI(destStr)) {
          this.idToTableNameMap.put( String.valueOf(this.destTableId), destStr);
          currentTableId = this.destTableId;
          this.destTableId ++;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

      for (Task<? extends Serializable> rootTask: rootTasks)
        generateCountersTask(rootTask);

    if ( qb.isCTAS() ) {
      // generate a DDL task and make it a dependent task of the leaf
      createTableDesc crtTblDesc = qb.getTableDesc();

      validateCreateTable(crtTblDesc);

      // Clear the output for CTAS since we don't need the output from the mapredWork, the
      // DDLWork at the tail of the chain will have the output
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

        e.printStackTrace();
      }
    }

    // Handle different types of CREATE TABLE command
    createTableDesc crtTblDesc = null;
    switch ( command_type ) {

      case CREATE_TABLE: // REGULAR CREATE TABLE DDL
        crtTblDesc =
          new createTableDesc(tableName, isExt, cols, partCols, bucketCols,
                              sortCols, numBuckets,
                              fieldDelim, fieldEscape,
                              collItemDelim, mapKeyDelim, lineDelim,
                              comment, inputFormat, outputFormat, location, serde,
                              mapProp, ifNotExists);

        validateCreateTable(crtTblDesc);
        rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblDesc), conf));
        break;

      case CTLT: // create table like <tbl_name>
        createTableLikeDesc crtTblLikeDesc =
          new createTableLikeDesc(tableName, isExt, location, ifNotExists, likeTableName);
        rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc), conf));
        break;

      case CTAS: // create table as select

        // check for existence of table. Throw an exception if it exists.
        try {
          Table tab = this.db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
                                       false); // do not throw exception if table does not exist

          if ( tab != null ) {
            throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
          }
        } catch (HiveException e) { // may be unable to get meta data
          throw new SemanticException(e);
        }

        crtTblDesc =
          new createTableDesc(tableName, isExt, cols, partCols, bucketCols,
                              sortCols, numBuckets,
                              fieldDelim, fieldEscape,
                              collItemDelim, mapKeyDelim, lineDelim,
                              comment, inputFormat, outputFormat, location, serde,
                              mapProp, ifNotExists);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

    // Create the db
    Hive db;
    try {
      db = Hive.get(conf);

      createTableDesc crtTbl = work.getCreateTblDesc();
      if (crtTbl != null) {
        return createTable(db, crtTbl);
      }

      createTableLikeDesc crtTblLike = work.getCreateTblLikeDesc();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

          break;
        default: assert false;
      }
    }
    if (likeTableName == null) {
      createTableDesc crtTblDesc =
        new createTableDesc(tableName, isExt, cols, partCols, bucketCols,
                            sortCols, numBuckets,
                            fieldDelim, fieldEscape,
                            collItemDelim, mapKeyDelim, lineDelim,
                            comment, inputFormat, outputFormat, location, serde,
                            mapProp, ifNotExists);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.createTableDesc

          break;
        default: assert false;
      }
    }

    createTableDesc crtTblDesc =
      new createTableDesc(tableName, isExt, cols, partCols, bucketCols,
                          sortCols, numBuckets,
                          fieldDelim, collItemDelim, mapKeyDelim, lineDelim,
                          comment, isSequenceFile, location, serde, mapProp);

    validateCreateTable(crtTblDesc);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.