Package org.apache.hadoop.hive.metastore.api

Examples of org.apache.hadoop.hive.metastore.api.Table$Isset


  public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException{

    howlDriver.run("drop table junit_sem_analysis");
    howlDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");

    Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
    assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
    assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());

    Map<String,String> tblParams = tbl.getParameters();
    assertEquals(RCFileInputDriver.class.getName(), tblParams.get("howl.isd"));
    assertEquals(RCFileOutputDriver.class.getName(), tblParams.get("howl.osd"));

    howlDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
        "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
    howlDriver.run("desc extended junit_sem_analysis");

    tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
    assertEquals("org.apache.hadoop.hive.ql.io.RCFileInputFormat",tbl.getSd().getInputFormat());
    assertEquals("org.apache.hadoop.hive.ql.io.RCFileOutputFormat",tbl.getSd().getOutputFormat());
    tblParams = tbl.getParameters();
    assertEquals("mydriver", tblParams.get("howl.isd"));
    assertEquals("yourdriver", tblParams.get("howl.osd"));

    howlDriver.run("drop table junit_sem_analysis");
  }
View Full Code Here


    query =  "create table junit_sem_analysis (a int) partitioned by (b string)  stored as " +
        "INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
        "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' ";
    assertEquals(0,howlDriver.run(query).getResponseCode());

    Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
    assertEquals("org.apache.hadoop.hive.ql.io.RCFileInputFormat",tbl.getSd().getInputFormat());
    assertEquals("org.apache.hadoop.hive.ql.io.RCFileOutputFormat",tbl.getSd().getOutputFormat());
    Map<String, String> tblParams = tbl.getParameters();
    assertEquals("mydriver", tblParams.get("howl.isd"));
    assertEquals("yourdriver", tblParams.get("howl.osd"));

    howlDriver.run("drop table junit_sem_analysis");
  }
View Full Code Here

    assertNotNull((client.getDatabase(dbName).getLocationUri()));

    List<FieldSchema> fields = new ArrayList<FieldSchema>();
    fields.add(new FieldSchema("colname", Constants.STRING_TYPE_NAME, ""));

    Table tbl = new Table();
    tbl.setDbName(dbName);
    tbl.setTableName(tblName);
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(fields);
    tbl.setSd(sd);

    //sd.setLocation("hdfs://tmp");
    sd.setParameters(new HashMap<String, String>());
    sd.getParameters().put("test_param_1", "Use this for comments etc");
    sd.setBucketCols(new ArrayList<String>(2));
    sd.getBucketCols().add("name");
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(
        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(
        org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
    tbl.setPartitionKeys(fields);

    Map<String, String> tableParams = new HashMap<String, String>();
    tableParams.put(HowlConstants.HOWL_OSD_CLASS, RCFileOutputDriver.class.getName());
    tableParams.put(HowlConstants.HOWL_ISD_CLASS, "testInputClass");
    tableParams.put("howl.testarg", "testArgValue");

    tbl.setParameters(tableParams);

    client.createTable(tbl);
    Path tblPath = new Path(client.getTable(dbName, tblName).getSd().getLocation());
    assertTrue(tblPath.getFileSystem(hiveConf).mkdirs(new Path(tblPath,"colname=p1")));
View Full Code Here

      client.dropTable(databaseName, tableName);
    } catch(Exception e) {
    } //can fail with NoSuchObjectException


    Table tbl = new Table();
    tbl.setDbName(databaseName);
    tbl.setTableName(tableName);
    tbl.setTableType("MANAGED_TABLE");
    StorageDescriptor sd = new StorageDescriptor();

    sd.setCols(getTableColumns());
    tbl.setPartitionKeys(getPartitionKeys());

    tbl.setSd(sd);

    sd.setBucketCols(new ArrayList<String>(2));
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(
        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(serdeClass);
    sd.setInputFormat(inputFormat);
    sd.setOutputFormat(outputFormat);

    Map<String, String> tableParams = new HashMap<String, String>();
    tableParams.put(HowlConstants.HOWL_ISD_CLASS, inputSD);
    tableParams.put(HowlConstants.HOWL_OSD_CLASS, outputSD);
    tbl.setParameters(tableParams);

    client.createTable(tbl);
  }
View Full Code Here

    if(retCode != 0) {
      throw new IOException("Failed to create table.");
    }

    // assert that the table created has no howl instrumentation, and that we're still able to read it.
    Table table = client.getTable("default", "junit_unparted_noisd");
    assertFalse(table.getParameters().containsKey(HowlConstants.HOWL_ISD_CLASS));
    assertTrue(table.getSd().getInputFormat().equals(HowlConstants.HIVE_RCFILE_IF_CLASS));
   
    PigServer server = new PigServer(ExecType.LOCAL, props);
    UDFContext.getUDFContext().setClientSystemProps();
    server.registerQuery("A = load '"+fullFileName+"' as (a:int);");
    server.registerQuery("store A into 'default.junit_unparted_noisd' using org.apache.howl.pig.HowlStorer();");
    server.registerQuery("B = load 'default.junit_unparted_noisd' using "+HowlLoader.class.getName()+"();");
    Iterator<Tuple> itr= server.openIterator("B");

    int i = 0;

    while(itr.hasNext()){
      Tuple t = itr.next();
      assertEquals(1, t.size());
      assertEquals(t.get(0), i);
      i++;
    }

    assertFalse(itr.hasNext());
    assertEquals(11, i);
   
    // assert that the table created still has no howl instrumentation
    Table table2 = client.getTable("default", "junit_unparted_noisd");
    assertFalse(table2.getParameters().containsKey(HowlConstants.HOWL_ISD_CLASS));
    assertTrue(table2.getSd().getInputFormat().equals(HowlConstants.HIVE_RCFILE_IF_CLASS));

    driver.run("drop table junit_unparted_noisd");
  }
View Full Code Here

    if(retCode != 0) {
      throw new IOException("Failed to create table.");
    }

    // assert that the table created has no howl instrumentation, and that we're still able to read it.
    Table table = client.getTable("default", "junit_parted_noisd");
   
    assertFalse(table.getParameters().containsKey(HowlConstants.HOWL_ISD_CLASS));
    assertTrue(table.getSd().getInputFormat().equals(HowlConstants.HIVE_RCFILE_IF_CLASS));

    PigServer server = new PigServer(ExecType.LOCAL, props);
    UDFContext.getUDFContext().setClientSystemProps();
    server.registerQuery("A = load '"+fullFileName+"' as (a:int);");
    server.registerQuery("store A into 'default.junit_parted_noisd' using org.apache.howl.pig.HowlStorer('b=42');");
    server.registerQuery("B = load 'default.junit_parted_noisd' using "+HowlLoader.class.getName()+"();");
    Iterator<Tuple> itr= server.openIterator("B");

    int i = 0;

    while(itr.hasNext()){
      Tuple t = itr.next();
      assertEquals(2, t.size());
      assertEquals(t.get(0), i);
      assertEquals(t.get(1), "42");
      i++;
    }

    assertFalse(itr.hasNext());
    assertEquals(11, i);
   
    // assert that the table created still has no howl instrumentation
    Table table2 = client.getTable("default", "junit_parted_noisd");
    assertFalse(table2.getParameters().containsKey(HowlConstants.HOWL_ISD_CLASS));
    assertTrue(table2.getSd().getInputFormat().equals(HowlConstants.HIVE_RCFILE_IF_CLASS));
   
    // assert that there is one partition present, and it had howl instrumentation inserted when it was created.
    Partition ptn = client.getPartition("default", "junit_parted_noisd", Arrays.asList("42"));

    assertNotNull(ptn);
View Full Code Here

        throws NoSuchObjectException, MetaException {

      boolean success = false;
      boolean isExternal = false;
      Path tblPath = null;
      Table tbl = null;
      isExternal = false;
      boolean isIndexTable = false;
      try {
        ms.openTransaction();
        // drop any partitions
        tbl = get_table(dbname, name);
        if (tbl == null) {
          throw new NoSuchObjectException(name + " doesn't exist");
        }
        if (tbl.getSd() == null) {
          throw new MetaException("Table metadata is corrupted");
        }

        isIndexTable = isIndexTable(tbl);
        if (isIndexTable) {
          throw new RuntimeException(
              "The table " + name + " is an index table. Please do drop index instead.");
        }

        if (!isIndexTable) {
          try {
            List<Index> indexes = ms.getIndexes(dbname, name, Short.MAX_VALUE);
            while(indexes != null && indexes.size()>0) {
              for (Index idx : indexes) {
                this.drop_index_by_name(dbname, name, idx.getIndexName(), true);
              }
              indexes = ms.getIndexes(dbname, name, Short.MAX_VALUE);
            }
          } catch (TException e) {
            throw new MetaException(e.getMessage());
          }
        }
        isExternal = isExternal(tbl);
        if (tbl.getSd().getLocation() != null) {
          tblPath = new Path(tbl.getSd().getLocation());
        }

        if (!ms.dropTable(dbname, name)) {
          throw new MetaException("Unable to drop table");
        }
View Full Code Here

      return MetaStoreUtils.isIndexTable(table);
    }

    public Table get_table(final String dbname, final String name) throws MetaException,
        NoSuchObjectException {
      Table t = null;
      incrementCounter("get_table");
      logStartTableFunction("get_table", dbname, name);
      try {
        t = executeWithRetry(new Command<Table>() {
          @Override
          Table run(RawStore ms) throws Exception {
            Table t = ms.getTable(dbname, name);
            if (t == null) {
              throw new NoSuchObjectException(dbname + "." + name
                  + " table not found");
            }
            return t;
View Full Code Here

        part = new Partition();
        part.setDbName(dbName);
        part.setTableName(tableName);
        part.setValues(part_vals);

        Table tbl = ms.getTable(part.getDbName(), part.getTableName());
        if (tbl == null) {
          throw new InvalidObjectException(
              "Unable to add partition because table or database do not exist");
        }

        part.setSd(tbl.getSd());
        partLocation = new Path(tbl.getSd().getLocation(), Warehouse
            .makePartName(tbl.getPartitionKeys(), part_vals));
        part.getSd().setLocation(partLocation.toString());

        Partition old_part = null;
        try {
          old_part = get_partition(part.getDbName(), part
View Full Code Here

          old_part = null;
        }
        if (old_part != null) {
          throw new AlreadyExistsException("Partition already exists:" + part);
        }
        Table tbl = ms.getTable(part.getDbName(), part.getTableName());
        if (tbl == null) {
          throw new InvalidObjectException(
              "Unable to add partition because table or database do not exist");
        }

        String partLocationStr = part.getSd().getLocation();
        if (partLocationStr == null || partLocationStr.isEmpty()) {
          // set default location if not specified
          partLocation = new Path(tbl.getSd().getLocation(), Warehouse
              .makePartName(tbl.getPartitionKeys(), part.getValues()));

        } else {
          partLocation = wh.getDnsPath(new Path(partLocationStr));
        }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.metastore.api.Table$Isset

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.