final List<ClusterCriteria> clusterCriterias = new ArrayList<>();
final Set<String> commandCriteria = new HashSet<>();
clusterCriterias.add(criteria);
commandCriteria.add("hive");
Job job = new Job(
userName,
jobName,
"-f hive.q",
commandCriteria,
clusterCriterias,
null);
job.setDescription("This is a test");
// Add some tags for metadata about the job. This really helps for reporting on
// the jobs and categorization.
Set<String> jobTags = new HashSet<>();
jobTags.add("testgenie");
jobTags.add("sample");
job.setTags(jobTags);
// send the query as an attachment
final File query = File.createTempFile("hive", ".q");
try (PrintWriter pw = new PrintWriter(query, "UTF-8")) {
pw.println("select count(*) from counters where dateint=20120430 and hour=10;");
}
final Set<FileAttachment> attachments = new HashSet<>();
final FileAttachment attachment = new FileAttachment();
attachment.setName("hive.q");
FileInputStream fin = null;
ByteArrayOutputStream bos = null;
try {
fin = new FileInputStream(query);
bos = new ByteArrayOutputStream();
final byte[] buf = new byte[4096];
int read;
while ((read = fin.read(buf)) != -1) {
bos.write(buf, 0, read);
}
attachment.setData(bos.toByteArray());
} finally {
if (fin != null) {
fin.close();
}
if (bos != null) {
bos.close();
}
}
attachments.add(attachment);
job.setAttachments(attachments);
job = client.submitJob(job);
final String jobID = job.getId();
final String outputURI = job.getOutputURI();
LOG.info("Job ID: " + jobID);
LOG.info("Output URL: " + outputURI);
LOG.info("Getting jobInfo by jobID");
job = client.getJob(jobID);
LOG.info(job.toString());
LOG.info("Waiting for job to finish");
job = client.waitForCompletion(jobID, 600000, 5000);
LOG.info("Job status: " + job.getStatus());
LOG.info("Killing jobs using jobID");
final Job killedJob = client.killJob(jobID);
LOG.info("Job status: " + killedJob.getStatus());
LOG.info("Done");
}