Configuration conf = jobcontext.getConfiguration();
ArrayList<FileSpec> inputs;
ArrayList<ArrayList<OperatorKey>> inpTargets;
PigContext pigContext;
try {
inputs = (ArrayList<FileSpec>) ObjectSerializer
.deserialize(conf.get("pig.inputs"));
inpTargets = (ArrayList<ArrayList<OperatorKey>>) ObjectSerializer
.deserialize(conf.get("pig.inpTargets"));
pigContext = (PigContext) ObjectSerializer.deserialize(conf
.get("pig.pigContext"));
PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(conf.get("udf.import.list")));
} catch (Exception e) {
int errCode = 2094;
String msg = "Unable to deserialize object.";
throw new ExecException(msg, errCode, PigException.BUG, e);
}
ArrayList<InputSplit> splits = new ArrayList<InputSplit>();
for (int i = 0; i < inputs.size(); i++) {
try {
Path path = new Path(inputs.get(i).getFileName());
FileSystem fs;
try {
fs = path.getFileSystem(conf);
} catch (Exception e) {
// If an application specific
// scheme was used
// (e.g.: "hbase://table") we will fail
// getting the file system. That's
// ok, we just use the dfs in that case.
fs = new Path("/").getFileSystem(conf);
}
// if the execution is against Mapred DFS, set
// working dir to /user/<userid>
if(pigContext.getExecType() == ExecType.MAPREDUCE) {
fs.setWorkingDirectory(jobcontext.getWorkingDirectory());
}
// first pass input location to the loader - for this send a
// clone of the configuration we have - this is so that if the