tempTime = System.currentTimeMillis();
MultiFetchResponse response = _consumer.multifetch(array);
requestTime += (System.currentTimeMillis() - tempTime);
while (response.hasNext()) {
ByteBufferMessageSet messages = response.next();
// check error codes
_toContinue = checkErrorCode(messages, input);
if (!_toContinue)
break;
Iterator<Message> iter = (Iterator<Message>) messages
.iterator();
long messageOffset = 0;
while (iter.hasNext()) {
Message message = iter.next();
messageOffset += MessageSet.entrySize(message);
reporter.incrCounter("topic-counters", _topic, 1);
_count++;
try {
tempTime = System.currentTimeMillis();
_timestamp = getTimestamp(message);
decodeTime += (System.currentTimeMillis() - tempTime);
} catch (IOException e) {
System.err.println("SetOffset=" + _offset
+ "messageOffset=" + messageOffset
+ ": ignore message with exception: ");
if (_ignoreErrors) {
reporter.incrCounter(_topic, _topic
+ "_PARSING_ERROR", 1);
continue;
} else {
e.printStackTrace(System.err);
throw e;
}
}
// determine whether to stop
Status status = getStatus(message, reporter);
// generate output
switch (status) {
case OUTPUT_AND_CONTINUE:
case OUTPUT_AND_BREAK:
tempTime = System.currentTimeMillis();
ByteBuffer buffer = message.payload();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes, buffer.position(), bytes.length);
collector.collect(new KafkaETLKey(_timestamp,
_granularity), new BytesWritable(bytes));
outputTime += (System.currentTimeMillis() - tempTime);
}
// report progress
float percentage = getProgress();
reporter.setStatus("collected " + percentage + "%");
switch (status) {
case OUTPUT_AND_BREAK:
case BREAK:
break;
}
}
_offset += messages.validBytes();
}
}
_consumer.close();
long endTime = System.currentTimeMillis();