return rp;
}
public ResponsePojo updateJob(String userid, String jobidortitle, String title, String desc, String communityIds, String jarURL, String nextRunTime, String schedFreq, String mapperClass, String reducerClass, String combinerClass, String query, String inputColl, String outputKey, String outputValue, String appendResults, String ageOutInDays, Boolean incrementalMode, String jobsToDependOn, String json, Boolean exportToHdfs, boolean bQuickRun, Boolean selfMerge)
{
ResponsePojo rp = new ResponsePojo();
//first make sure job exists, and user is allowed to edit
List<Object> searchTerms = new ArrayList<Object>();
try
{
ObjectId jid = new ObjectId(jobidortitle);
searchTerms.add(new BasicDBObject(CustomMapReduceJobPojo._id_,jid));
}
catch (Exception ex)
{
//oid failed, will only add title
}
searchTerms.add(new BasicDBObject(CustomMapReduceJobPojo.jobtitle_,jobidortitle));
DBObject dbo = DbManager.getCustom().getLookup().findOne(new BasicDBObject(DbManager.or_,searchTerms.toArray()));
if ( dbo != null )
{
CustomMapReduceJobPojo cmr = CustomMapReduceJobPojo.fromDb(dbo, CustomMapReduceJobPojo.class);
//verify user can update this job
if ( RESTTools.adminLookup(userid) || cmr.submitterID.toString().equals(userid) )
{
//check if job is already running
if ( ( cmr.jobidS != null ) && !cmr.jobidS.equals( "CHECKING_COMPLETION" ) && !cmr.jobidS.equals( "" ) ) // (< robustness, sometimes server gets stuck here...)
{
// If it is running and we're trying to turn it off .. .then kill the job:
com.ikanow.infinit.e.processing.custom.utils.PropertiesManager customProps = new com.ikanow.infinit.e.processing.custom.utils.PropertiesManager();
boolean bLocalMode = customProps.getHadoopLocalMode();
boolean tryToKillJob = false;
if (!bLocalMode) { // else not possible
// This line means: either we're NONE already (and it hasn't changed), or we've been changed to NONE
if ((((null == schedFreq) || (schedFreq.equalsIgnoreCase("null")))
&& (CustomMapReduceJobPojo.SCHEDULE_FREQUENCY.NONE == cmr.scheduleFreq))
||
(null != schedFreq) && (schedFreq.equalsIgnoreCase("none")))
{
long candidateNextRuntime = 0L;
try {
candidateNextRuntime = Long.parseLong(nextRunTime);
}
catch (Exception e) {}
if (candidateNextRuntime >= DONT_RUN_TIME) {
tryToKillJob = true;
}
}
}//TESTED - (don't run/daily/once-only) - covers all the cases, except the "theoretical" null cases
if (tryToKillJob) {
// (ie is running and updating it to mean don't run anymore .. that 4e12 number is 2099 in ms, anything bigger than that is assumed to mean "don't run)
CustomProcessingController pxController = new CustomProcessingController();
if (pxController.killRunningJob(cmr)) {
rp.setResponse(new ResponseObject("Update MapReduce Job",true,"Killed job, may take a few moments for the status to update."));
}
else {
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Failed to kill the job - it may not have started yet, try again in a few moments."));
}
return rp;
}//TODO (INF-2395): TOTEST
else {
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Job is currently running (or not yet marked as completed). Please wait until the job completes to update it."));
return rp;
}
}
if (cmr.jobidS != null) { // (must be checking completion, ie in bug state, so reset...)
cmr.jobidS = null;
cmr.jobidN = 0;
}
//check each variable to see if its needs/can be updated
if ( (null != communityIds) && !communityIds.equals("null") )
{
List<ObjectId> commids = new ArrayList<ObjectId>();
for ( String s : communityIds.split(","))
commids.add(new ObjectId(s));
boolean bAdmin = RESTTools.adminLookup(userid);
//make sure user is allowed to submit on behalf of the commids given
if ( bAdmin || isInAllCommunities(commids, userid) )
{
ElasticSearchManager customIndex = CustomOutputIndexingEngine.getExistingIndex(cmr);
if (null != customIndex) {
CustomOutputIndexingEngine.swapAliases(customIndex, commids, true);
}//TESTED (by hand - removal and deletion)
cmr.communityIds = commids;
}
else
{
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"You do have permissions for all the communities given."));
return rp;
}
}
if ( (null != inputColl) && !inputColl.equals("null"))
{
//make sure user can use the input collection
String inputCollection = getStandardInputCollection(inputColl);
if ( inputCollection != null )
{
cmr.isCustomTable = false;
}
else
{
inputCollection = getCustomInputCollection(inputColl, cmr.communityIds);
cmr.isCustomTable = true;
}
if ( inputCollection != null)
{
cmr.inputCollection = inputCollection;
}
else
{
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"You do not have permission to use the given input collection."));
return rp;
}
}
try
{
if ( (null != title) && !title.equals("null"))
{
// If this is indexed then can't change the title
if (null != CustomOutputIndexingEngine.getExistingIndex(cmr)) {
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"You cannot change the title of a non-empty indexed job - you can turn indexing off and then change the title"));
return rp;
}//TESTED (by hand)
cmr.jobtitle = title;
//make sure the new title hasn't been used before
DBObject dbo1 = DbManager.getCustom().getLookup().findOne(new BasicDBObject("jobtitle",title));
if ( dbo1 != null )
{
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"A job already matches that title, please choose another title"));
return rp;
}
}
if ( (null != desc) && !desc.equals("null"))
{
cmr.jobdesc = desc;
}
if ( (null != jarURL) && !jarURL.equals("null"))
{
cmr.jarURL = jarURL;
}
if ( (null != nextRunTime) && !nextRunTime.equals("null"))
{
cmr.nextRunTime = Long.parseLong(nextRunTime);
long nNow = new Date().getTime();
cmr.firstSchedule = new Date(cmr.nextRunTime);
if (cmr.nextRunTime < nNow) { // ie leave firstSchedule alone since that affects when we next run, but just set this to now...
cmr.nextRunTime = nNow - 1;
}//TESTED
cmr.timesRan = 0;
cmr.timesFailed = 0;
}
if ( (null != schedFreq) && !schedFreq.equals("null"))
{
cmr.scheduleFreq = SCHEDULE_FREQUENCY.valueOf(schedFreq);
}
if ( (null != mapperClass) && !mapperClass.equals("null"))
{
cmr.mapper = mapperClass;
}
if ( (null != reducerClass) && !reducerClass.equals("null"))
{
cmr.reducer = reducerClass;
}
if ( (null != combinerClass) && !combinerClass.equals("null"))
{
cmr.combiner = combinerClass;
}
if ( (null != query) && !query.equals("null"))
{
boolean wasIndexed = CustomOutputIndexingEngine.isIndexed(cmr);
if ( !query.isEmpty() )
cmr.query = query;
else
cmr.query = "{}";
// If we're in indexing mode, check if the index has been turned off, in which case delete the index
if (wasIndexed && !CustomOutputIndexingEngine.isIndexed(cmr)) {
CustomOutputIndexingEngine.deleteOutput(cmr);
}//TESTED (by hand)
}
if (null == cmr.jarURL) { // (if in savedQuery mode, force types to be Text/BSONWritable)
// Force the types:
outputKey = "org.apache.hadoop.io.Text";
outputValue = "com.mongodb.hadoop.io.BSONWritable";
}
if ( (null != outputKey) && !outputKey.equals("null"))
{
cmr.outputKey = outputKey;
}
if ( (null != outputValue) && !outputValue.equals("null"))
{
cmr.outputValue = outputValue;
}
if ( (null != appendResults) && !appendResults.equals("null"))
{
try
{
cmr.appendResults = Boolean.parseBoolean(appendResults);
}
catch (Exception ex)
{
cmr.appendResults = false;
}
}
if ( (null != ageOutInDays) && !ageOutInDays.equals("null"))
{
try
{
cmr.appendAgeOutInDays = Double.parseDouble(ageOutInDays);
}
catch (Exception ex)
{
cmr.appendAgeOutInDays = 0.0;
}
}
if (null != incrementalMode)
{
cmr.incrementalMode = incrementalMode;
}
if (null != selfMerge)
{
cmr.selfMerge = selfMerge;
}
if (null != exportToHdfs) {
cmr.exportToHdfs = exportToHdfs;
}
//try to work out dependencies, error out if they fail
if ( (null != jobsToDependOn) && !jobsToDependOn.equals("null"))
{
try
{
cmr.jobDependencies = getJobDependencies(jobsToDependOn);
cmr.waitingOn = cmr.jobDependencies;
}
catch (Exception ex)
{
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Error parsing the job dependencies, did a title or id get set incorrectly or did a job not exist?"));
return rp;
}
}
if ( json != null && !json.equals("null"))
{
cmr.arguments = json;
}
else
{
cmr.arguments = null;
}
if ((null == cmr.jarURL) && (null != cmr.arguments) && !cmr.arguments.isEmpty()) {
// In saved query, if arguments is valid BSON then copy over query
try {
Object tmpQuery = com.mongodb.util.JSON.parse(cmr.arguments);
if (tmpQuery instanceof BasicDBObject) {
cmr.query = cmr.arguments;
}
}
catch (Exception e) {} // fine just carry on
}
else if ((null == cmr.jarURL)) { // ie args == null, copy from query
cmr.arguments = cmr.query;
}
}
catch (IllegalArgumentException e)
{
// If an exception occurs log the error
logger.error("Exception Message: " + e.getMessage(), e);
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Illegal arg (enum needs to be DAILY/WEEKLY/MONTHLY/NONE?): " + e.getMessage()));
return rp;
}
catch (Exception e)
{
// If an exception occurs log the error
logger.error("Exception Message: " + e.getMessage(), e);
rp.setResponse(new ResponseObject("Update MapReduce Job",false,"error scheduling job: " + e.getMessage()));
return rp;
}
// Setup post-processing
String nextRunString = new Date(cmr.nextRunTime).toString();
boolean bRunNowIfPossible = false;
if ( cmr.nextRunTime < new Date().getTime() ) {
nextRunString = "next available timeslot";
bRunNowIfPossible = true;
}
rp.setResponse(new ResponseObject("Update MapReduce Job",true,"Job updated successfully, will run on: " + nextRunString));
rp.setData(cmr._id.toString(), null);
if (bRunNowIfPossible) {
runJobAndWaitForCompletion(cmr, bQuickRun);
}//TESTED
else {
DbManager.getCustom().getLookup().save(cmr.toDb());
}
}
else
{
rp.setResponse(new ResponseObject("Update MapReduce Job", false, "You do not have permission to submit this job"));
}
}
else
{
rp.setResponse(new ResponseObject("Update MapReduce Job", false, "No jobs with this ID exist"));
}
return rp;
}