private void refill() {
assert eventBundleBuffer.isEmpty();
try {
RecordProcessor processor = new RecordProcessor();
ClusterRecordDeserializer deserializer = new ClusterRecordDeserializer();
RecordIterator records;
if (lastRevision != null) {
log.debug("refilling event bundle buffer starting at revision {}",
lastRevision);
records = journal.getRecords(lastRevision.longValue());
} else {
log.debug("refilling event bundle buffer starting at journal beginning");
records = journal.getRecords();
}
try {
while (processor.getNumEvents() < MIN_BUFFER_SIZE && records.hasNext()) {
Record record = records.nextRecord();
if (record.getProducerId().equals(producerId)) {
ClusterRecord cr = deserializer.deserialize(record);
if (!session.getWorkspace().getName().equals(cr.getWorkspace())) {
continue;
}
cr.process(processor);
lastRevision = new Long(cr.getRevision());
}
}
if (processor.getNumEvents() >= MIN_BUFFER_SIZE) {
// remember in skip map
SortedMap<Long, Long> skipMap = getSkipMap();
Long timestamp = new Long(processor.getLastTimestamp());
synchronized (skipMap) {
if (log.isDebugEnabled()) {
DateFormat df = DateFormat.getDateTimeInstance();
log.debug("remember record in skip map: {} -> {}",
df.format(new Date(timestamp.longValue())),
lastRevision);
}
skipMap.put(timestamp, lastRevision);
}
}
} finally {
records.close();
}
} catch (JournalException e) {
log.warn("Unable to read journal records", e);
}
}