}
jobContext.getJobRepository().addPartitionExecution(stepExecution, partitionExecution1);
jobContext.getBatchEnvironment().submitTask(runner1);
}
BatchStatus consolidatedBatchStatus = BatchStatus.STARTED;
final List<PartitionExecutionImpl> fromAllPartitions = new ArrayList<PartitionExecutionImpl>();
tm.begin();
try {
while (fromAllPartitions.size() < numOfPartitions) {
final Serializable data = collectorDataQueue.take();
if (data instanceof PartitionExecutionImpl) {
final PartitionExecutionImpl s = (PartitionExecutionImpl) data;
if (step.getChunk() != null) {
stepExecution.getStepMetrics().addStepMetrics(s.getStepMetrics());
}
//save status and data for the terminated partition
jobContext.getJobRepository().savePersistentData(jobContext.getJobExecution(), s);
fromAllPartitions.add(s);
final BatchStatus bs = s.getBatchStatus();
if (bs == BatchStatus.FAILED || bs == BatchStatus.STOPPED) {
if (consolidatedBatchStatus != BatchStatus.FAILED) {
consolidatedBatchStatus = bs;
}