return lastConf;
}
public RecordReader<Text, Tuple> makeReader(JobConf job) throws IOException {
lastConf = job;
DataStorage store = new HDataStorage(ConfigurationUtil.toProperties(job));
// if the execution is against Mapred DFS, set
// working dir to /user/<userid>
if(execType == ExecType.MAPREDUCE)
store.setActiveContainer(store.asContainer("/user/" + job.getUser()));
PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(job.get("udf.import.list")));
wrapped.init(store);
job.set("map.target.ops", ObjectSerializer.serialize(targetOps));
// Mimic org.apache.hadoop.mapred.FileSplit if feasible...