ThriftHiveMetastore.Iface client = HiveMetastores.create(args.metastoreOpts.hiveHost, args.metastoreOpts.hivePort);
System.err.println("Initialize profile with input data");
HiveApiInputFormat.setProfileInputDesc(hiveConf, input, HiveApiInputFormat.DEFAULT_PROFILE_ID);
HiveApiInputFormat defaultInputFormat = new HiveApiInputFormat();
if (args.trackMetrics) {
defaultInputFormat.setObserver(new MetricsObserver("default", args.recordPrintPeriod));
}
List<InputSplit> splits = defaultInputFormat.getSplits(hiveConf, client);
System.err.println("getSplits returned " + splits.size() + " splits");
long numRows = 0;
for (int i = 0; i < splits.size(); ++i) {
InputSplit split = splits.get(i);
TaskAttemptID taskID = new TaskAttemptID();
TaskAttemptContext taskContext = new TaskAttemptContext(hiveConf, taskID);
if (i % args.splitPrintPeriod == 0) {
System.err.println("Handling split " + i + " of " + splits.size());
}
RecordReader<WritableComparable, HiveReadableRecord> reader =
defaultInputFormat.createRecordReader(split, taskContext);
reader.initialize(split, taskContext);
numRows += readFully(reader);
}
System.err.println("Parsed " + numRows + " rows");