errorMessage = null;
SQLState = null;
downstreamError = null;
if (!validateConfVariables()) {
return new CommandProcessorResponse(12, errorMessage, SQLState);
}
HiveDriverRunHookContext hookContext = new HiveDriverRunHookContextImpl(conf, command);
// Get all the driver run hooks and pre-execute them.
List<HiveDriverRunHook> driverRunHooks;
try {
driverRunHooks = getHooks(HiveConf.ConfVars.HIVE_DRIVER_RUN_HOOKS,
HiveDriverRunHook.class);
for (HiveDriverRunHook driverRunHook : driverRunHooks) {
driverRunHook.preDriverRun(hookContext);
}
} catch (Exception e) {
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
downstreamError = e;
console.printError(errorMessage + "\n"
+ org.apache.hadoop.util.StringUtils.stringifyException(e));
return new CommandProcessorResponse(12, errorMessage, SQLState);
}
// Reset the perf logger
PerfLogger perfLogger = PerfLogger.getPerfLogger(true);
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_RUN);
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.TIME_TO_SUBMIT);
int ret;
boolean requireLock = false;
boolean ckLock = false;
try {
ckLock = checkConcurrency();
createTxnManager();
} catch (SemanticException e) {
errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
SQLState = ErrorMsg.findSQLState(e.getMessage());
downstreamError = e;
console.printError(errorMessage, "\n"
+ org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 10;
return new CommandProcessorResponse(ret, errorMessage, SQLState);
}
ret = recordValidTxns();
if (ret != 0) return new CommandProcessorResponse(ret, errorMessage, SQLState);
if (!alreadyCompiled) {
ret = compileInternal(command);
if (ret != 0) {
return new CommandProcessorResponse(ret, errorMessage, SQLState);
}
}
// the reason that we set the txn manager for the cxt here is because each
// query has its own ctx object. The txn mgr is shared across the
// same instance of Driver, which can run multiple queries.
ctx.setHiveTxnManager(txnMgr);
if (ckLock) {
boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
if(lockOnlyMapred) {
Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
taskQueue.addAll(plan.getRootTasks());
while (taskQueue.peek() != null) {
Task<? extends Serializable> tsk = taskQueue.remove();
requireLock = requireLock || tsk.requireLock();
if(requireLock) {
break;
}
if (tsk instanceof ConditionalTask) {
taskQueue.addAll(((ConditionalTask)tsk).getListTasks());
}
if(tsk.getChildTasks()!= null) {
taskQueue.addAll(tsk.getChildTasks());
}
// does not add back up task here, because back up task should be the same
// type of the original task.
}
} else {
requireLock = true;
}
}
if (requireLock) {
ret = acquireReadWriteLocks();
if (ret != 0) {
try {
releaseLocks(ctx.getHiveLocks());
} catch (LockException e) {
// Not much to do here
}
return new CommandProcessorResponse(ret, errorMessage, SQLState);
}
}
ret = execute();
if (ret != 0) {
//if needRequireLock is false, the release here will do nothing because there is no lock
try {
releaseLocks(ctx.getHiveLocks());
} catch (LockException e) {
// Nothing to do here
}
return new CommandProcessorResponse(ret, errorMessage, SQLState);
}
//if needRequireLock is false, the release here will do nothing because there is no lock
try {
releaseLocks(ctx.getHiveLocks());
} catch (LockException e) {
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
downstreamError = e;
console.printError(errorMessage + "\n"
+ org.apache.hadoop.util.StringUtils.stringifyException(e));
return new CommandProcessorResponse(12, errorMessage, SQLState);
}
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DRIVER_RUN);
perfLogger.close(LOG, plan);
// Take all the driver run hooks and post-execute them.
try {
for (HiveDriverRunHook driverRunHook : driverRunHooks) {
driverRunHook.postDriverRun(hookContext);
}
} catch (Exception e) {
errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
SQLState = ErrorMsg.findSQLState(e.getMessage());
downstreamError = e;
console.printError(errorMessage + "\n"
+ org.apache.hadoop.util.StringUtils.stringifyException(e));
return new CommandProcessorResponse(12, errorMessage, SQLState);
}
return new CommandProcessorResponse(ret);
}