// input location into the configuration (for example,
// FileInputFormat stores this in mapred.input.dir in the conf),
// then for different inputs, the loader's don't end up
// over-writing the same conf.
FuncSpec loadFuncSpec = inputs.get(i).getFuncSpec();
LoadFunc loadFunc = (LoadFunc) PigContext.instantiateFuncFromSpec(
loadFuncSpec);
boolean combinable = !(loadFunc instanceof MergeJoinIndexer
|| loadFunc instanceof IndexableLoadFunc
|| (loadFunc instanceof CollectableLoadFunc && loadFunc instanceof OrderedLoadFunc));
if (combinable)
combinable = !conf.getBoolean("pig.noSplitCombination", false);
JobConf confClone = new JobConf(conf);
Job inputSpecificJob = new Job(confClone);
// Pass loader signature to LoadFunc and to InputFormat through
// the conf
passLoadSignature(loadFunc, i, inputSpecificJob.getConfiguration());
loadFunc.setLocation(inputs.get(i).getFileName(),
inputSpecificJob);
// The above setLocation call could write to the conf within
// the inputSpecificJob - use this updated conf
// get the InputFormat from it and ask for splits
InputFormat inpFormat = loadFunc.getInputFormat();
List<InputSplit> oneInputSplits = inpFormat.getSplits(
HadoopShims.createJobContext(inputSpecificJob.getConfiguration(),
jobcontext.getJobID()));
List<InputSplit> oneInputPigSplits = getPigSplits(
oneInputSplits, i, inpTargets.get(i),