PigHCatUtil.getConfigFromUDFProperties(udfProps,
job.getConfiguration(), emr.nextElement().toString());
}
if (!HCatUtil.checkJobContextIfRunningFromBackend(job)) {
//Combine credentials and credentials from job takes precedence for freshness
Credentials crd = jobCredentials.get(INNER_SIGNATURE_PREFIX + "_" + signature);
crd.addAll(job.getCredentials());
job.getCredentials().addAll(crd);
}
} else {
Job clone = new Job(job.getConfiguration());
HCatInputFormat.setInput(job, dbName, tableName).setFilter(getPartitionFilterString());
// We will store all the new /changed properties in the job in the
// udf context, so the the HCatInputFormat.setInput method need not
//be called many times.
for (Entry<String, String> keyValue : job.getConfiguration()) {
String oldValue = clone.getConfiguration().getRaw(keyValue.getKey());
if ((oldValue == null) || (keyValue.getValue().equals(oldValue) == false)) {
udfProps.put(keyValue.getKey(), keyValue.getValue());
}
}
udfProps.put(HCatConstants.HCAT_PIG_LOADER_LOCATION_SET, true);
//Store credentials in a private hash map and not the udf context to
// make sure they are not public.
Credentials crd = new Credentials();
crd.addAll(job.getCredentials());
jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + signature, crd);
}
// Need to also push projections by calling setOutputSchema on
// HCatInputFormat - we have to get the RequiredFields information