throws HadoopException {
Check.notEmpty(user, "user");
Check.notNull(conf, "conf");
Check.notNull(executor, "executor");
if (conf.get(JOB_TRACKER_PROPERTY) == null || conf.getTrimmed(JOB_TRACKER_PROPERTY).length() == 0) {
throw new HadoopException(HadoopException.ERROR.H06, JOB_TRACKER_PROPERTY);
}
if (conf.get(NAME_NODE_PROPERTY) == null || conf.getTrimmed(NAME_NODE_PROPERTY).length() == 0) {
throw new HadoopException(HadoopException.ERROR.H06, NAME_NODE_PROPERTY);
}
try {
validateJobtracker(new URI(conf.get(JOB_TRACKER_PROPERTY)).getAuthority());
validateNamenode(new URI(conf.get(NAME_NODE_PROPERTY)).getAuthority());
UserGroupInformation ugi = getUGI(user);
return ugi.doAs(new PrivilegedExceptionAction<T>() {
public T run() throws Exception {
JobConf jobtrackerConf = createJobTrackerConf(conf);
Configuration namenodeConf = createNameNodeConf(conf);
JobClient jobClient = createJobClient(jobtrackerConf);
try {
checkJobTrackerHealth(jobClient);
FileSystem fs = createFileSystem(namenodeConf);
Instrumentation instrumentation = getServer().get(Instrumentation.class);
Instrumentation.Cron cron = instrumentation.createCron();
try {
checkNameNodeHealth(fs);
cron.start();
return executor.execute(jobClient, fs);
}
finally {
cron.stop();
instrumentation.addCron(INSTRUMENTATION_GROUP, executor.getClass().getSimpleName(), cron);
closeFileSystem(fs);
}
}
finally {
closeJobClient(jobClient);
}
}
});
}
catch (HadoopException ex) {
throw ex;
}
catch (Exception ex) {
throw new HadoopException(HadoopException.ERROR.H04, ex);
}
}