}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
Message message = null;
while (true) {
try {
if (reader == null || !reader.hasNext()) {
EtlRequest request = (EtlRequest) split.popRequest();
if (request == null) {
return false;
}
if (maxPullHours > 0) {
endTimeStamp = 0;
}
key.set(request.getTopic(), request.getLeaderId(), request.getPartition(),
request.getOffset(), request.getOffset(), 0);
value = null;
log.info("\n\ntopic:" + request.getTopic() + " partition:"
+ request.getPartition() + " beginOffset:" + request.getOffset()
+ " estimatedLastOffset:" + request.getLastOffset());
statusMsg += statusMsg.length() > 0 ? "; " : "";
statusMsg += request.getTopic() + ":" + request.getLeaderId() + ":"
+ request.getPartition();
context.setStatus(statusMsg);
if (reader != null) {
closeReader();
}
reader = new KafkaReader(context, request,
CamusJob.getKafkaTimeoutValue(mapperContext),
CamusJob.getKafkaBufferSize(mapperContext));
decoder = MessageDecoderFactory.createMessageDecoder(context, request.getTopic());
}
int count = 0;
while (reader.getNext(key, msgValue, msgKey)) {
readBytes += key.getMessageSize();
count++;
context.progress();
mapperContext.getCounter("total", "data-read").increment(msgValue.getLength());
mapperContext.getCounter("total", "event-count").increment(1);
byte[] bytes = getBytes(msgValue);
byte[] keyBytes = getBytes(msgKey);
// check the checksum of message.
// If message has partition key, need to construct it with Key for checkSum to match
Message messageWithKey = new Message(bytes,keyBytes);
Message messageWithoutKey = new Message(bytes);
long checksum = key.getChecksum();
if (checksum != messageWithKey.checksum() && checksum != messageWithoutKey.checksum()) {
throw new ChecksumException("Invalid message checksum : MessageWithKey : "
+ messageWithKey.checksum() + " MessageWithoutKey checksum : "
+ messageWithoutKey.checksum()
+ ". Expected " + key.getChecksum(),
key.getOffset());
}
long tempTime = System.currentTimeMillis();