Package org.apache.hadoop.hive.metastore.api

Examples of org.apache.hadoop.hive.metastore.api.Table$Isset


          new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
      typ1.getFields().add(
          new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
      client.createType(typ1);

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(typ1.getFields());
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters().put(
          org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "9");
      sd.getSerdeInfo().setSerializationLib(
          org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());

      tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
      tbl.getPartitionKeys().add(
          new FieldSchema("ds",
              org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME, ""));
      tbl.getPartitionKeys().add(
          new FieldSchema("hr",
              org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME, ""));

      client.createTable(tbl);

      Table tbl2 = client.getTable(dbName, tblName);
      assertEquals(tbl2.getDbName(), dbName);
      assertEquals(tbl2.getTableName(), tblName);
      assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
      assertFalse(tbl2.getSd().isCompressed());
      assertEquals(tbl2.getSd().getNumBuckets(), 1);

      assertEquals("Use this for comments etc", tbl2.getSd().getParameters()
          .get("test_param_1"));
      assertEquals("name", tbl2.getSd().getBucketCols().get(0));

      assertNotNull(tbl2.getPartitionKeys());
      assertEquals(2, tbl2.getPartitionKeys().size());
      assertEquals(Constants.DATE_TYPE_NAME, tbl2.getPartitionKeys().get(0)
          .getType());
      assertEquals(Constants.INT_TYPE_NAME, tbl2.getPartitionKeys().get(1)
          .getType());
      assertEquals("ds", tbl2.getPartitionKeys().get(0).getName());
      assertEquals("hr", tbl2.getPartitionKeys().get(1).getName());

      List<FieldSchema> fieldSchemas = client.getFields(dbName, tblName);
      assertNotNull(fieldSchemas);
      assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
      for (FieldSchema fs : tbl.getSd().getCols()) {
View Full Code Here


      ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>(3);
      partCols.add(new FieldSchema("p1", Constants.STRING_TYPE_NAME, ""));
      partCols.add(new FieldSchema("p2", Constants.STRING_TYPE_NAME, ""));
      partCols.add(new FieldSchema("p3", Constants.INT_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(cols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.setBucketCols(new ArrayList<String>());
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters()
          .put(Constants.SERIALIZATION_FORMAT, "1");
      sd.setSortCols(new ArrayList<Order>());

      tbl.setPartitionKeys(partCols);
      client.createTable(tbl);

      tbl = client.getTable(dbName, tblName);

      add_partition(client, tbl, vals, "part1");
View Full Code Here

      cols.add(new FieldSchema("c2", Constants.INT_TYPE_NAME, ""));

      ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
      partCols.add(new FieldSchema("p1", Constants.STRING_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(cols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.setBucketCols(new ArrayList<String>());
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters()
          .put(Constants.SERIALIZATION_FORMAT, "1");
      sd.setSortCols(new ArrayList<Order>());

      tbl.setPartitionKeys(partCols);
      client.createTable(tbl);

      tbl = client.getTable(dbName, tblName);

      add_partition(client, tbl, vals, "part1");
View Full Code Here

      ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>(2);
      partCols.add(new FieldSchema("p1", Constants.STRING_TYPE_NAME, ""));
      partCols.add(new FieldSchema("p2", Constants.STRING_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(cols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.setBucketCols(new ArrayList<String>());
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters()
          .put(Constants.SERIALIZATION_FORMAT, "1");
      sd.setSortCols(new ArrayList<Order>());

      tbl.setPartitionKeys(partCols);
      client.createTable(tbl);

      tbl = client.getTable(dbName, tblName);

      add_partition(client, tbl, vals, "part1");
View Full Code Here

    return success;
  }

  public Table getTable(String dbName, String tableName) throws MetaException {
    boolean commited = false;
    Table tbl = null;
    try {
      openTransaction();
      tbl = convertToTable(getMTable(dbName, tableName));
      commited = commitTransaction();
    } finally {
View Full Code Here

    return mtbl;
  }

  private Table convertToTable(MTable mtbl) throws MetaException {
    if(mtbl == null) return null;
    return new Table(mtbl.getTableName(),
        mtbl.getDatabase().getName(),
        mtbl.getOwner(),
        mtbl.getCreateTime(),
        mtbl.getLastAccessTime(),
        mtbl.getRetention(),
View Full Code Here

    if (columns == null) {
      throw new MetaException("columns not specified for table " + name);
    }
   
    Table tTable = new Table();
    tTable.setTableName(name);
    tTable.setSd(new StorageDescriptor());
    StorageDescriptor sd = tTable.getSd();
    sd.setSerdeInfo(new SerDeInfo());
    SerDeInfo serdeInfo = sd.getSerdeInfo();
    serdeInfo.setSerializationLib(LazySimpleSerDe.class.getName());
    serdeInfo.setParameters(new HashMap<String, String>());
    serdeInfo.getParameters().put(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
   
    List<FieldSchema>  fields = new ArrayList<FieldSchema>();
    sd.setCols(fields);
    for (String col: columns) {
      FieldSchema field = new FieldSchema(col, org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME, "'default'");
      fields.add(field);
    }

    tTable.setPartitionKeys(new ArrayList<FieldSchema>());
    for (String partCol : partCols) {
      FieldSchema part = new FieldSchema();
      part.setName(partCol);
      part.setType(org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME); // default partition key
      tTable.getPartitionKeys().add(part);
    }
    sd.setNumBuckets(-1);
    return tTable;
  }
View Full Code Here

  public static String getMapType(String k, String v) {
    return "map<" + k +"," + v + ">";
  }

  public static Table getTable(Configuration conf, Properties schema) throws MetaException {
    Table t = new Table();
    t.setSd(new StorageDescriptor());
    t.setTableName(schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME));
    t.getSd().setLocation(schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_LOCATION));
    t.getSd().setInputFormat(schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_INPUT_FORMAT,
          org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName()));
    t.getSd().setOutputFormat(schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_OUTPUT_FORMAT,
          org.apache.hadoop.mapred.SequenceFileOutputFormat.class.getName()));
    t.setPartitionKeys(new ArrayList<FieldSchema>());
    t.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME);
    String part_cols_str = schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
    t.setPartitionKeys(new ArrayList<FieldSchema>());
    if (part_cols_str != null && (part_cols_str.trim().length() != 0)) {
      String [] part_keys = part_cols_str.trim().split("/");
      for (String key: part_keys) {
        FieldSchema part = new FieldSchema();
        part.setName(key);
        part.setType(org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME); // default partition key
        t.getPartitionKeys().add(part);
      }
    }
    t.getSd().setNumBuckets(Integer.parseInt(schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.BUCKET_COUNT, "-1")));
    String bucketFieldName = schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.BUCKET_FIELD_NAME);
    t.getSd().setBucketCols(new ArrayList<String>(1));
    if ((bucketFieldName != null) && (bucketFieldName.trim().length() != 0)) {
      t.getSd().setBucketCols(new ArrayList<String>(1));
      t.getSd().getBucketCols().add(bucketFieldName);
    }
   
    t.getSd().setSerdeInfo(new SerDeInfo());
    t.getSd().getSerdeInfo().setParameters(new HashMap<String, String>());
    t.getSd().getSerdeInfo().setName(t.getTableName());
    t.getSd().getSerdeInfo().setSerializationLib(schema.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB));
    setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.serde.Constants.SERIALIZATION_CLASS);
    setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT);
    if(org.apache.commons.lang.StringUtils.isNotBlank(schema.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_CLASS))) {
      setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_SERDE);
    }
    // needed for MetadataTypedColumnSetSerDe and LazySimpleSerDe
    setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS);
    // needed for LazySimpleSerDe
    setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMN_TYPES);
    // needed for DynamicSerDe
    setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL);
     
    String colstr = schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS);
    List<FieldSchema>  fields = new ArrayList<FieldSchema>();
    if(colstr != null) {
      String[] cols =  colstr.split(",");
      for (String colName : cols) {
        FieldSchema col = new FieldSchema(colName, org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME, "'default'");
        fields.add(col);
      }
    }
   
    if(fields.size() == 0) {
      // get the fields from serde
      try {
        fields = getFieldsFromDeserializer(t.getTableName(), getDeserializer(conf, schema));
      } catch (SerDeException e) {
        LOG.error(StringUtils.stringifyException(e));
        throw new MetaException("Invalid serde or schema. " + e.getMessage());
      }
    }
    t.getSd().setCols(fields);
   
    t.setOwner(schema.getProperty("creator"));
   
    // remove all the used up parameters to find out the remaining parameters
    schema.remove(Constants.META_TABLE_NAME);
    schema.remove(Constants.META_TABLE_LOCATION);
    schema.remove(Constants.FILE_INPUT_FORMAT);
    schema.remove(Constants.FILE_OUTPUT_FORMAT);
    schema.remove(Constants.META_TABLE_PARTITION_COLUMNS);
    schema.remove(Constants.BUCKET_COUNT);
    schema.remove(Constants.BUCKET_FIELD_NAME);
    schema.remove(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_CLASS);
    schema.remove(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT);
    schema.remove(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
    schema.remove(Constants.META_TABLE_SERDE);
    schema.remove(Constants.META_TABLE_COLUMNS);
    schema.remove(Constants.META_TABLE_COLUMN_TYPES);
   
    // add the remaining unknown parameters to the table's parameters
    t.setParameters(new HashMap<String, String>());
    for(Entry<Object, Object> e : schema.entrySet()) {
     t.getParameters().put(e.getKey().toString(), e.getValue().toString());
    }

    return t;
  }
View Full Code Here

   * @throws NoSuchObjectException
   */
  private Partition getPartitionObject(String dbName, String tableName, List<String> partVals)
      throws MetaException, TException, NoSuchObjectException {
    Properties schema = this.getSchema(tableName);
    Table tbl = MetaStoreUtils.getTable(conf, schema);
    List<FieldSchema> partKeys = tbl.getPartitionKeys();
    if(partKeys.size() != partVals.size()) {
      throw new MetaException("Invalid partition key values: " + partVals);
    }
    LinkedHashMap<String, String> pm = new LinkedHashMap<String, String>(partKeys.size());
    for (int i=0; i < partKeys.size(); i++) {
      if(partVals.get(i) == null || partVals.get(i).length() == 0) {
        throw new MetaException("Invalid partition spec: " + partVals);
      }
      pm.put(partKeys.get(i).getName(), partVals.get(i));
    }
    Path partPath = wh.getPartitionPath(dbName, tableName, pm);
    Partition tPartition = new Partition();
    tPartition.setValues(partVals);
    tPartition.setSd(tbl.getSd()); // TODO: get a copy
    tPartition.setParameters(new HashMap<String, String>());
    tPartition.getSd().setLocation(partPath.toString());
    return tPartition;
  }
View Full Code Here

      ArrayList<FieldSchema> invCols = new ArrayList<FieldSchema>(2);
      invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, ""));
      invCols.add(new FieldSchema("in.come", Constants.INT_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(invTblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(invCols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters().put(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
      boolean failed = false;
      try {
        client.createTable(tbl);
      } catch (InvalidObjectException ex) {
        failed = true;
      }
      if(!failed) {
        assertTrue("Able to create table with invalid name: " + invTblName, false);
      }
      ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
      cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
      cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));

      // create a valid table
      tbl.setTableName(tblName);
      tbl.getSd().setCols(cols);
      client.createTable(tbl);
     
      // now try to invalid alter table
      Table tbl2 = client.getTable(dbName, tblName);
      failed = false;
      try {
        tbl2.setTableName(invTblName);
        tbl2.getSd().setCols(invCols);
        client.alter_table(dbName, tblName, tbl2);
      } catch (InvalidOperationException ex) {
        failed = true;
      }
      if(!failed) {
        assertTrue("Able to rename table with invalid name: " + invTblName, false);
      }
      // try a valid alter table
      tbl2.setTableName(tblName);
      tbl2.getSd().setCols(cols);
      tbl2.getSd().setNumBuckets(32);
      client.alter_table(dbName, tblName, tbl2);
      Table tbl3 = client.getTable(dbName, tblName);
      assertEquals("Alter table didn't succeed. Num buckets ", tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets());
    } catch (Exception e) {
      System.err.println(StringUtils.stringifyException(e));
      System.err.println("testSimpleTable() failed.");
      throw e;
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.metastore.api.Table$Isset

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.