" \"request.required.acks\": 1,\n" +
fileQueue + ",\n" +
keyTopicMap + "\n" +
"}";
ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.registerSubtypes(new NamedType(KafkaSink.class, "kafka"));
KafkaSink sink = jsonMapper.readValue(description, new TypeReference<Sink>(){});
sink.open();
int messageCount = 10;
for (int i = 0; i < messageCount; ++i) {
Map<String, Object> msgMap = new ImmutableMap.Builder<String, Object>()
.put("key", Integer.toString(i % numPartitions))
.put("value", "message:" + i).build();
sink.writeTo(new DefaultMessageContainer(
new Message(TOPIC_NAME_PARTITION_BY_KEY, jsonMapper.writeValueAsBytes(msgMap)),
jsonMapper));
}
sink.close();
System.out.println(sink.getStat());
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
createConsumerConfig("localhost:" + zk.getServerPort(), "gropuid"));
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(TOPIC_NAME_PARTITION_BY_KEY, 1);
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = consumerMap.get(TOPIC_NAME_PARTITION_BY_KEY).get(0);
Map<Integer, Set<Map<String, Object>>> resultSet = new HashMap<Integer, Set<Map<String, Object>>>();
for (int i = 0; i < messageCount; ++i) {
MessageAndMetadata<byte[], byte[]> msgAndMeta = stream.iterator().next();
System.out.println(new String(msgAndMeta.message()));
Map<String, Object> msg = jsonMapper.readValue(new String(msgAndMeta.message()), new TypeReference<Map<String, Object>>() {});
Set<Map<String, Object>> s = resultSet.get(msgAndMeta.partition());
if (s == null) {
s = new HashSet<Map<String, Object>>();
resultSet.put(msgAndMeta.partition(), s);
}