Package com.facebook.hiveio.schema

Examples of com.facebook.hiveio.schema.HiveTableSchema


    outputDesc.setTableDesc(hiveTableDesc);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setTableDesc(hiveTableDesc);

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    List<HiveWritableRecord> writeRecords = Lists.newArrayList();
    HiveWritableRecord r1 = HiveRecordFactory.newWritableRecord(schema);
    writeRecords.add(r1);
View Full Code Here


        " ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'");

    HiveOutputDescription outputDesc = new HiveOutputDescription();
    outputDesc.setTableDesc(hiveTableDesc);

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    writeData(outputDesc, schema);

    HiveInputDescription inputDesc = new HiveInputDescription();
View Full Code Here

    HiveOutputDescription outputDesc = new HiveOutputDescription();
    outputDesc.putPartitionValue("ds", "foobar");
    outputDesc.setTableDesc(hiveTableDesc);

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    writeData(outputDesc, schema);

    HiveInputDescription inputDesc = new HiveInputDescription();
View Full Code Here

   * @return RowToBean
   */
  public static <X> RowToBean<X> rowToBean(HiveInputDescription inputDesc, Class<X> rowClass) {
    try {
      HiveConf conf = HiveUtils.newHiveConf(HiveInput.class);
      HiveTableSchema schema = HiveTableSchemas.lookup(conf, inputDesc.getTableDesc());
      RowToBean<X> rowToBean = new UnsafeRowToBean<X>(rowClass, schema);
      return rowToBean;
    } catch (IOException e) {
      throw new IllegalStateException(e);
    }
View Full Code Here

   * @param value data
   * @return hive record
   */
  public static HiveWritableRecord mapToHiveRecord(Configuration conf, MapWritable value) {
    try {
      HiveTableSchema schema = HiveTableSchemas.lookup(conf, getHiveTableName());
      HiveWritableRecord record = HiveRecordFactory.newWritableRecord(schema);
      for (Map.Entry<Writable, Writable> entry : value.entrySet()) {
        IntWritable intKey = (IntWritable) entry.getKey();
        LongWritable longValue = (LongWritable) entry.getValue();
        record.set(intKey.get(), longValue.get());
View Full Code Here

    List<InputSplit> splits = hapi.getSplits(new JobContext(hiveConf, new JobID()));
    LOG.info("Have {} splits to read", splits.size());

    HiveTableDesc hiveTableDesc = new HiveTableDesc(args.inputTable.database,
        args.inputTable.table);
    HiveTableSchema schema = HiveTableSchemas.lookup(client, hiveConf,
        hiveTableDesc);
    chooseRowParser(schema);

    Stats stats = Stats.create(hiveStats);
    Context context = new Context(hapi, hiveConf, schema, hiveStats, stats);
View Full Code Here

  @Test
  public void testCheckEdge() throws Exception {
    HiveToEdge hiveToEdge = new HiveIntNullEdge();
    HiveInputDescription inputDesc = new HiveInputDescription();
    HiveTableSchema schema = TestSchema.builder()
        .addColumn("foo", HiveType.INT)
        .addColumn("bar", HiveType.INT)
        .build();
    hiveToEdge.checkInput(inputDesc, schema);
View Full Code Here

  @Test
  public void testCheckVertex() throws Exception {
    HiveToVertex hiveToVertex = new HiveIntNullNullVertex();
    HiveInputDescription inputDesc = new HiveInputDescription();
    HiveTableSchema schema = TestSchema.builder()
        .addColumn("foo", HiveType.INT)
        .addColumn("bar", HiveType.LIST)
        .build();
    hiveToVertex.checkInput(inputDesc, schema);
View Full Code Here

  @Test
  public void testCheck() throws Exception {
    VertexToHive vertexToHive = new HiveOutputIntIntVertex();
    HiveOutputDescription outputDesc = new HiveOutputDescription();
    HiveTableSchema schema = TestSchema.builder()
        .addColumn("foo", HiveType.LONG)
        .addColumn("bar", HiveType.LONG)
        .build();
    vertexToHive.checkOutput(outputDesc, schema, newWritableRecord(schema));
View Full Code Here

    } catch (Exception e) {
      // CHECKSTYLE: resume IllegalCatch
      throw new IOException(e);
    }

    final HiveTableSchema tableSchema = HiveTableSchemaImpl.fromTable(conf, table);
    HiveTableSchemas.put(conf, myProfileId, tableSchema);

    List<InputPartition> partitions = computePartitions(inputDesc, client, table);

    List<InputSplit> splits = computeSplits(conf, inputDesc, tableSchema, partitions);
View Full Code Here

TOP

Related Classes of com.facebook.hiveio.schema.HiveTableSchema

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.