Job job = new Job();
loader.setUDFContextSignature("12345");
loader.setLocation(tempFilename, job);
// simulate Pig back-end runtime
final RecordReader<DataInputBuffer, DataInputBuffer> reader = new RawSequenceFileRecordReader();
final FileSplit fileSplit =
new FileSplit(new Path(tempFilename), 0, new File(tempFilename).length(),
new String[] { "localhost" });
final TaskAttemptContext context =
HadoopCompat.newTaskAttemptContext(HadoopCompat.getConfiguration(job), new TaskAttemptID());
reader.initialize(fileSplit, context);
final InputSplit[] wrappedSplits = new InputSplit[] { fileSplit };
final int inputIndex = 0;
final List<OperatorKey> targetOps = Arrays.asList(new OperatorKey("54321", 0));
final int splitIndex = 0;
final PigSplit split = new PigSplit(wrappedSplits, inputIndex, targetOps, splitIndex);