Package com.facebook.hiveio.common

Examples of com.facebook.hiveio.common.HiveTableDesc


    Map<String, Integer> partitionToIndex = Maps.newHashMap();
    for (String partitionName : partitionNames) {
      partitionToIndex.put(partitionName, index++);
    }

    HiveTableDesc hiveTableDesc = new HiveTableDesc(table.getDbName(), table.getTableName());
    return new HiveTableSchemaImpl(hiveTableDesc, partitionToIndex, columnToIndex, hiveTypes);
  }
View Full Code Here


   * Get name of table we're writing to
   *
   * @return HiveTableName
   */
  private static HiveTableDesc getHiveTableName() {
    return new HiveTableDesc("default", "hive_io_test");
  }
View Full Code Here

    hapi.setMyProfileId(DEFAULT_PROFILE_ID);

    List<InputSplit> splits = hapi.getSplits(new JobContext(hiveConf, new JobID()));
    LOG.info("Have {} splits to read", splits.size());

    HiveTableDesc hiveTableDesc = new HiveTableDesc(args.inputTable.database,
        args.inputTable.table);
    HiveTableSchema schema = HiveTableSchemas.lookup(client, hiveConf,
        hiveTableDesc);
    chooseRowParser(schema);
View Full Code Here

   */
  public List<InputSplit> getSplits(Configuration conf,
    HiveInputDescription inputDesc, ThriftHiveMetastore.Iface client)
    throws IOException
  {
    HiveTableDesc tableDesc = inputDesc.getTableDesc();
    Table table;
    try {
      table = client.get_table(tableDesc.getDatabaseName(), tableDesc.getTableName());
      // CHECKSTYLE: stop IllegalCatch
    } catch (Exception e) {
      // CHECKSTYLE: resume IllegalCatch
      throw new IOException(e);
    }
View Full Code Here

      // table without partitions
      partitions.add(InputPartition.newFromHiveTable(table));
    } else {
      // table with partitions, find matches to user filter.
      List<Partition> hivePartitions;
      HiveTableDesc tableDesc = inputDesc.getTableDesc();
      try {
        hivePartitions = client.get_partitions_by_filter(tableDesc.getDatabaseName(),
            tableDesc.getTableName(), inputDesc.getPartitionFilter(), (short) -1);
        // CHECKSTYLE: stop IllegalCatch
      } catch (Exception e) {
        // CHECKSTYLE: resume IllegalCatch
        throw new IOException(e);
      }
View Full Code Here

   */
  public List<InputSplit> getSplits(Configuration conf,
    HiveInputDescription inputDesc, ThriftHiveMetastore.Iface client)
    throws IOException
  {
    HiveTableDesc tableDesc = inputDesc.getTableDesc();
    Table table;
    try {
      table = client.get_table(tableDesc.getDatabaseName(), tableDesc.getTableName());
      // CHECKSTYLE: stop IllegalCatch
    } catch (Exception e) {
      // CHECKSTYLE: resume IllegalCatch
      throw new IOException(e);
    }
View Full Code Here

      // table without partitions
      partitions.add(InputPartition.newFromHiveTable(table));
    } else {
      // table with partitions, find matches to user filter.
      List<Partition> hivePartitions;
      HiveTableDesc tableDesc = inputDesc.getTableDesc();
      try {
        hivePartitions = client.get_partitions_by_filter(tableDesc.getDatabaseName(),
            tableDesc.getTableName(), inputDesc.getPartitionFilter(), (short) -1);
        // CHECKSTYLE: stop IllegalCatch
      } catch (Exception e) {
        // CHECKSTYLE: resume IllegalCatch
        throw new IOException(e);
      }
View Full Code Here

  /**
   * Constructor
   */
  public HiveTableSchemaImpl() {
    tableName = new HiveTableDesc("_unknown_", "_unknown_");
    partitionPositions = Maps.newHashMap();
    columnPositions = Maps.newHashMap();
    hiveTypes = new HiveType[0];
  }
View Full Code Here

    Map<String, Integer> partitionToIndex = Maps.newHashMap();
    for (String partitionName : partitionNames) {
      partitionToIndex.put(partitionName, index++);
    }

    HiveTableDesc hiveTableDesc = new HiveTableDesc(table.getDbName(), table.getTableName());
    return new HiveTableSchemaImpl(hiveTableDesc, partitionToIndex, columnToIndex, hiveTypes);
  }
View Full Code Here

    Writables.readStrIntMap(in, partitionPositions);
    Writables.readStrIntMap(in, columnPositions);
    numColumns = sizeFromIndexes(columnPositions);
    String dbName = WritableUtils.readString(in);
    String tblName = WritableUtils.readString(in);
    tableName = new HiveTableDesc(dbName, tblName);
    hiveTypes = Writables.readEnumArray(in, HiveType.class);
  }
View Full Code Here

TOP

Related Classes of com.facebook.hiveio.common.HiveTableDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.