this(null);
}
@Override
public List<Event> initialize() throws Exception {
InputInitializerContext rootInputContext = getContext();
context = rootInputContext;
MRInputUserPayloadProto userPayloadProto =
MRInputHelpers.parseMRInputPayload(rootInputContext.getInputUserPayload());
Configuration conf =
TezUtils.createConfFromByteString(userPayloadProto.getConfigurationBytes());
boolean sendSerializedEvents =
conf.getBoolean("mapreduce.tez.input.initializer.serialize.event.payload", true);
// Read all credentials into the credentials instance stored in JobConf.
JobConf jobConf = new JobConf(conf);
ShimLoader.getHadoopShims().getMergedCredentials(jobConf);
MapWork work = Utilities.getMapWork(jobConf);
// perform dynamic partition pruning
pruner.prune(work, jobConf, context);
InputSplitInfoMem inputSplitInfo = null;
String realInputFormatName = conf.get("mapred.input.format.class");
boolean groupingEnabled = userPayloadProto.getGroupingEnabled();
if (groupingEnabled) {
// Need to instantiate the realInputFormat
InputFormat<?, ?> inputFormat =
(InputFormat<?, ?>) ReflectionUtils.newInstance(Class.forName(realInputFormatName),
jobConf);
int totalResource = rootInputContext.getTotalAvailableResource().getMemory();
int taskResource = rootInputContext.getVertexTaskResource().getMemory();
int availableSlots = totalResource / taskResource;
// Create the un-grouped splits
float waves =
conf.getFloat(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_WAVES,