Message message = null;
while (true) {
try {
if (reader == null || !reader.hasNext()) {
EtlRequest request = (EtlRequest) split.popRequest();
if (request == null) {
return false;
}
if (maxPullHours > 0) {
endTimeStamp = 0;
}
key.set(request.getTopic(), request.getLeaderId(), request.getPartition(),
request.getOffset(), request.getOffset(), 0);
value = null;
log.info("\n\ntopic:" + request.getTopic() + " partition:"
+ request.getPartition() + " beginOffset:" + request.getOffset()
+ " estimatedLastOffset:" + request.getLastOffset());
statusMsg += statusMsg.length() > 0 ? "; " : "";
statusMsg += request.getTopic() + ":" + request.getLeaderId() + ":"
+ request.getPartition();
context.setStatus(statusMsg);
if (reader != null) {
closeReader();
}
reader = new KafkaReader(context, request,
CamusJob.getKafkaTimeoutValue(mapperContext),
CamusJob.getKafkaBufferSize(mapperContext));
decoder = MessageDecoderFactory.createMessageDecoder(context, request.getTopic());
}
int count = 0;
while (reader.getNext(key, msgValue, msgKey)) {
readBytes += key.getMessageSize();
count++;