rowOverData = toBeClosedBuffer.getLastData(rowOverFeedIDs, TimeUnit.NANOSECONDS, 0, Long.MAX_VALUE);
}
closeBuffer(toBeClosedBuffer);
}
DataBufferEnv newBufferEnv = this.currentParition.getBufferEnv().advanceBufferPartition();
PartitionDataBuffer newPartitionBuffer = dataBufferHelper.newPartitionBuffer(newBufferEnv);
if (rowOverData != null) {
Map<String, Map<Long, Map<String, String>>> data = new HashMap<String, Map<Long,Map<String,String>>>();
for (Entry<String, SortedMap<Long, Map<String, String>>> entry: rowOverData.entrySet()) {
Map<Long, Map<String, String>> feedData = new HashMap<Long, Map<String,String>>(entry.getValue());
data.put(entry.getKey(), feedData);
}
try {
Map<String, PartitionTimestamps> timeStamps = putData(newPartitionBuffer, data, TimeUnit.NANOSECONDS);
if (timeStamps != null) {
metaDataBuffer.updatePartitionMetaData(newBufferPartition, timeStamps);
}
} catch (BufferFullException e) {
LOGGER.error("Buffer full during prepareForNextPartition", e);
}
}
this.partitionDataBuffers[newBufferEnv.getCurrentBufferPartition()].set(newPartitionBuffer);
} finally {
synchronized(resetLock) {
prepareNewPartitionInProgress = false;
resetLock.notifyAll();
}