Package com.facebook.hiveio.input

Examples of com.facebook.hiveio.input.HiveInputDescription


    HadoopNative.requireHadoopNative();

    Timer allTime = Metrics.newTimer(InputBenchmark.class, "all-time", MILLISECONDS, MILLISECONDS);
    TimerContext allTimerContext = allTime.time();

    HiveInputDescription input = new HiveInputDescription();
    input.setDbName(args.tableOpts.database);
    input.setTableName(args.tableOpts.table);
    input.setPartitionFilter(args.tableOpts.partitionFilter);

    HiveConf hiveConf = new HiveConf(InputBenchmark.class);
    ThriftHiveMetastore.Iface client = HiveMetastores.create(args.metastoreOpts.hiveHost, args.metastoreOpts.hivePort);

    System.err.println("Initialize profile with input data");
View Full Code Here


    LOG.info("Creating Hive client for Metastore at {}", metastoreHostPort);
    ThriftHiveMetastore.Iface client = HiveMetastores.create(metastoreHostPort.host,
        metastoreHostPort.port);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setDbName(opts.tableOpts.database);
    inputDesc.setTableName(opts.tableOpts.table);
    inputDesc.setPartitionFilter(opts.tableOpts.partitionFilter);
    if (opts.requestNumSplits == 0) {
      opts.requestNumSplits = opts.threads * opts.requestSplitsPerThread;
    }
    inputDesc.setNumSplits(opts.requestNumSplits);

    HiveStats hiveStats = HiveUtils.statsOf(client, inputDesc);
    LOG.info("{}", hiveStats);

    HiveConf hiveConf = new HiveConf(Tailer.class);
View Full Code Here

    HadoopNative.requireHadoopNative();

    Timer allTime = Metrics.newTimer(InputBenchmark.class, "all-time", MILLISECONDS, MILLISECONDS);
    TimerContext allTimerContext = allTime.time();

    HiveInputDescription input = new HiveInputDescription();
    input.setDbName(args.tableOpts.database);
    input.setTableName(args.tableOpts.table);
    input.setPartitionFilter(args.tableOpts.partitionFilter);
    input.getMetastoreDesc().setHost(args.metastoreOpts.host);
    input.getMetastoreDesc().setPort(args.metastoreOpts.port);

    HiveConf hiveConf = HiveUtils.newHiveConf(InputBenchmark.class);

    System.err.println("Initialize profile with input data");
    HiveApiInputFormat.setProfileInputDesc(hiveConf, input, DEFAULT_PROFILE_ID);
View Full Code Here

    LOG.info("Creating Hive client for Metastore at {}", metastoreHostPort);
    ThriftHiveMetastore.Iface client = HiveMetastores.create(
        metastoreHostPort.host, metastoreHostPort.port);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setDbName(args.inputTable.database);
    inputDesc.setTableName(args.inputTable.table);
    inputDesc.setPartitionFilter(args.inputTable.partitionFilter);
    if (args.requestNumSplits == 0) {
      args.requestNumSplits = args.multiThread.threads * args.requestSplitsPerThread;
    }
    inputDesc.setNumSplits(args.requestNumSplits);
    inputDesc.getMetastoreDesc().setHost(metastoreHostPort.host);
    inputDesc.getMetastoreDesc().setPort(metastoreHostPort.port);

    HiveStats hiveStats = HiveUtils.statsOf(client, inputDesc);
    LOG.info("{}", hiveStats);

    HiveConf hiveConf = HiveUtils.newHiveConf(TailerCmd.class);
View Full Code Here

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    writeData(outputDesc, schema);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setTableDesc(hiveTableDesc);

    verifyData(inputDesc);
  }
View Full Code Here

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    writeData(outputDesc, schema);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setPartitionFilter("ds='foobar'");
    inputDesc.setTableDesc(hiveTableDesc);

    verifyData(inputDesc);
  }
View Full Code Here

    createTestTable();

    HiveOutputDescription outputDesc = new HiveOutputDescription();
    outputDesc.setTableDesc(hiveTableDesc);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setTableDesc(hiveTableDesc);

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    List<HiveWritableRecord> writeRecords = Lists.newArrayList();
View Full Code Here

    createTestTable();

    HiveOutputDescription outputDesc = new HiveOutputDescription();
    outputDesc.setTableDesc(hiveTableDesc);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setTableDesc(hiveTableDesc);

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    List<HiveWritableRecord> writeRecords = Lists.newArrayList();
View Full Code Here

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    writeData(outputDesc, schema);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setTableDesc(hiveTableDesc);

    verifyData(inputDesc);
  }
View Full Code Here

    HiveTableSchema schema = HiveTableSchemas.lookup(hiveServer.getClient(),
        null, hiveTableDesc);

    writeData(outputDesc, schema);

    HiveInputDescription inputDesc = new HiveInputDescription();
    inputDesc.setPartitionFilter("ds='foobar'");
    inputDesc.setTableDesc(hiveTableDesc);

    verifyData(inputDesc);
  }
View Full Code Here

TOP

Related Classes of com.facebook.hiveio.input.HiveInputDescription

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.