public void store(DataBag data, FuncSpec storeFuncSpec, PigContext pigContext) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(pigContext.getProperties());
// create a simulated JobContext
JobContext jc = HadoopShims.createJobContext(conf, new JobID());
StoreFuncInterface sfunc = (StoreFuncInterface)PigContext.instantiateFuncFromSpec(
storeFuncSpec);
OutputFormat<?,?> of = sfunc.getOutputFormat();
POStore store = new POStore(new OperatorKey());
store.setSFile(new FileSpec(file, storeFuncSpec));
PigOutputFormat.setLocation(jc, store);
OutputCommitter oc;
// create a simulated TaskAttemptContext
TaskAttemptContext tac = HadoopShims.createTaskAttemptContext(conf, new TaskAttemptID());
PigOutputFormat.setLocation(tac, store);
RecordWriter<?,?> rw ;
try {
of.checkOutputSpecs(jc);
oc = of.getOutputCommitter(tac);
oc.setupJob(jc);
oc.setupTask(tac);
rw = of.getRecordWriter(tac);
sfunc.prepareToWrite(rw);
for (Iterator<Tuple> it = data.iterator(); it.hasNext();) {
Tuple row = it.next();
sfunc.putNext(row);
}
rw.close(tac);
} catch (InterruptedException e) {
throw new IOException(e);
}