public ArrayList<CamusRequest> fetchLatestOffsetAndCreateEtlRequests(
JobContext context,
HashMap<LeaderInfo, ArrayList<TopicAndPartition>> offsetRequestInfo) {
ArrayList<CamusRequest> finalRequests = new ArrayList<CamusRequest>();
for (LeaderInfo leader : offsetRequestInfo.keySet()) {
SimpleConsumer consumer = new SimpleConsumer(leader.getUri()
.getHost(), leader.getUri().getPort(),
CamusJob.getKafkaTimeoutValue(context),
CamusJob.getKafkaBufferSize(context),
CamusJob.getKafkaClientName(context));
// Latest Offset
PartitionOffsetRequestInfo partitionLatestOffsetRequestInfo = new PartitionOffsetRequestInfo(
kafka.api.OffsetRequest.LatestTime(), 1);
// Earliest Offset
PartitionOffsetRequestInfo partitionEarliestOffsetRequestInfo = new PartitionOffsetRequestInfo(
kafka.api.OffsetRequest.EarliestTime(), 1);
Map<TopicAndPartition, PartitionOffsetRequestInfo> latestOffsetInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
Map<TopicAndPartition, PartitionOffsetRequestInfo> earliestOffsetInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
ArrayList<TopicAndPartition> topicAndPartitions = offsetRequestInfo
.get(leader);
for (TopicAndPartition topicAndPartition : topicAndPartitions) {
latestOffsetInfo.put(topicAndPartition,
partitionLatestOffsetRequestInfo);
earliestOffsetInfo.put(topicAndPartition,
partitionEarliestOffsetRequestInfo);
}
OffsetResponse latestOffsetResponse = consumer
.getOffsetsBefore(new OffsetRequest(latestOffsetInfo,
kafka.api.OffsetRequest.CurrentVersion(), CamusJob
.getKafkaClientName(context)));
OffsetResponse earliestOffsetResponse = consumer
.getOffsetsBefore(new OffsetRequest(earliestOffsetInfo,
kafka.api.OffsetRequest.CurrentVersion(), CamusJob
.getKafkaClientName(context)));
consumer.close();
for (TopicAndPartition topicAndPartition : topicAndPartitions) {
long latestOffset = latestOffsetResponse.offsets(
topicAndPartition.topic(),
topicAndPartition.partition())[0];
long earliestOffset = earliestOffsetResponse.offsets(