}
return (Map<String, Map<Long, Map<String, String>>>[]) groupInputData;
}
public Map<String, PartitionTimestamps> putData(Map<String, Map<Long, Map<String, String>>> value, final TimeUnit timeUnit) throws BufferFullException {
final ElapsedTimer timer = new ElapsedTimer();
timer.startInterval();
final Map<String, Map<Long, Map<String, String>>>[] groupData = groupInputDataByFeed(value);
final Map<String, PartitionTimestamps> timestamps = new HashMap<String, PartitionTimestamps>();
final AtomicBoolean bufferFull = new AtomicBoolean(false);
final CountDownLatch latch = new CountDownLatch(groupData.length);
for (int i = 0; i < groupData.length; i++) {
final int dataIndex = i;
Runnable r = new Runnable() {
@Override
public void run() {
try {
for (Entry<String, Map<Long, Map<String, String>>> feedData : groupData[dataIndex].entrySet()) {
PartitionTimestamps timeStamp = null;
try {
timeStamp = putData(null, feedData.getKey(), databases[dataIndex], timeUnit, feedData.getValue());
} catch (BufferFullException e) {
bufferFull.compareAndSet(false, true);
}
if (timeStamp == null) {
break;
} else {
timestamps.put(feedData.getKey(), timeStamp);
LOGGER.debug("feedData.getKey(): " + feedData.getKey() + ", timeStamp: " + timeStamp);
}
}
} finally {
latch.countDown();
}
}
};
writeThreads.execute(r);
}
try {
latch.await();
} catch (InterruptedException e) {
LOGGER.warn("Internal error during putData thread", e);
}
if (bufferFull.get()) {
throw new BufferFullException(env.getErrorMsg());
}
timer.stopInterval();
WRITE_PERF_LOGGER.debug("Time to write {} feeds: {}", value.size(), timer.getIntervalInMillis());
return timestamps;
}