Package kafka.javaapi.consumer

Examples of kafka.javaapi.consumer.ConsumerConnector


        curator.start();

        RebalanceListener listener = null;
        for (int i = 0; i < 5; i++) {
            System.out.format("%nCreating consumer #%d%n", i + 1);
            ConsumerConnector connector = Consumer.createJavaConsumerConnector(config);
            connector.createMessageStreams(Collections.singletonMap("mytopic", 1));
            if (i == 0) {
                PathChildrenCache cache = new PathChildrenCache(curator, "/consumers/foo/owners/mytopic", true);
                listener = new RebalanceListener(5);
                cache.getListenable().addListener(listener);
                cache.start(PathChildrenCache.StartMode.POST_INITIALIZED_EVENT);
View Full Code Here


        assertTrue(sink.getNumOfPendingMessages() > 0);
        sink.close();
        System.out.println(sink.getStat());
        assertEquals(sink.getNumOfPendingMessages(), 0);

        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
                createConsumerConfig("localhost:" + zk.getServerPort(), "gropuid_multhread"));
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(TOPIC_NAME_MULTITHREAD, 1);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        KafkaStream<byte[], byte[]> stream = consumerMap.get(TOPIC_NAME_MULTITHREAD).get(0);
        for (int i = 0; i < msgCount; ++i) {
            stream.iterator().next();
        }

        try {
            stream.iterator().next();
            fail();
        } catch (ConsumerTimeoutException e) {
            //this is expected
            consumer.shutdown();
        }
    }
View Full Code Here

                    jsonMapper));
        }
        sink.close();
        System.out.println(sink.getStat());

        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
                createConsumerConfig("localhost:" + zk.getServerPort(), "gropuid"));
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(TOPIC_NAME_PARTITION_BY_KEY, 1);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        KafkaStream<byte[], byte[]> stream = consumerMap.get(TOPIC_NAME_PARTITION_BY_KEY).get(0);
        Map<Integer, Set<Map<String, Object>>> resultSet = new HashMap<Integer, Set<Map<String, Object>>>();
        for (int i = 0; i < messageCount; ++i) {
            MessageAndMetadata<byte[], byte[]> msgAndMeta = stream.iterator().next();
            System.out.println(new String(msgAndMeta.message()));

            Map<String, Object> msg = jsonMapper.readValue(new String(msgAndMeta.message()), new TypeReference<Map<String, Object>>() {});
            Set<Map<String, Object>> s = resultSet.get(msgAndMeta.partition());
            if (s == null) {
                s = new HashSet<Map<String, Object>>();
                resultSet.put(msgAndMeta.partition(), s);
            }
            s.add(msg);
        }

        int sizeSum = 0;
        for (Map.Entry<Integer, Set<Map<String, Object>>> e : resultSet.entrySet()) {
            sizeSum += e.getValue().size();
            String key = (String) e.getValue().iterator().next().get("key");
            for (Map<String, Object> ss : e.getValue()) {
                assertEquals(key, (String) ss.get("key"));
            }
        }
        assertEquals(sizeSum, messageCount);

        try {
            stream.iterator().next();
            fail();
        } catch (ConsumerTimeoutException e) {
            //this is expected
            consumer.shutdown();
        }
    }
View Full Code Here

        assertTrue(sink.getNumOfPendingMessages() > 0);
        sink.close();
        System.out.println(sink.getStat());
        assertEquals(sink.getNumOfPendingMessages(), 0);

        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
                createConsumerConfig("localhost:" + zk.getServerPort(), "gropuid_multhread"));
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(TOPIC_NAME_MULTITHREAD, 1);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        KafkaStream<byte[], byte[]> stream = consumerMap.get(TOPIC_NAME_MULTITHREAD).get(0);
        for (int i = 0; i < msgCount; ++i) {
            stream.iterator().next();
        }

        try {
            stream.iterator().next();
            fail();
        } catch (ConsumerTimeoutException e) {
            //this is expected
            consumer.shutdown();
        }
    }
View Full Code Here

        }
        sink.close();
        System.out.println(sink.getStat());

        // read data back from Kafka
        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
                createConsumerConfig("localhost:" + zk.getServerPort(), "gropuid"));
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(TOPIC_NAME_PARTITION_BY_KEY, 1);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        KafkaStream<byte[], byte[]> stream = consumerMap.get(TOPIC_NAME_PARTITION_BY_KEY).get(0);
        Map<Integer, Set<Map<String, Object>>> resultSet = new HashMap<Integer, Set<Map<String, Object>>>();
        for (int i = 0; i < messageCount; ++i) {
            MessageAndMetadata<byte[], byte[]> msgAndMeta = stream.iterator().next();
            System.out.println(new String(msgAndMeta.message()));

            Map<String, Object> msg = jsonMapper.readValue(new String(msgAndMeta.message()), new TypeReference<Map<String, Object>>() {});
            Set<Map<String, Object>> s = resultSet.get(msgAndMeta.partition());
            if (s == null) {
                s = new HashSet<Map<String, Object>>();
                resultSet.put(msgAndMeta.partition(), s);
            }
            s.add(msg);
        }

        // verify we received what was sent
        int sizeSum = 0;
        for (Map.Entry<Integer, Set<Map<String, Object>>> e : resultSet.entrySet()) {
            sizeSum += e.getValue().size();
            String key = (String) e.getValue().iterator().next().get("key");
            for (Map<String, Object> ss : e.getValue()) {
                assertEquals(key, (String) ss.get("key"));
            }
        }
        assertEquals(sizeSum, messageCount);

        try {
            stream.iterator().next();
            fail();
        } catch (ConsumerTimeoutException e) {
            //this is expected
            consumer.shutdown();
        }
    }
View Full Code Here

        List<Sink> sinks = new ArrayList<Sink>();
        sinks.add(sinkV1);
        sinks.add(sinkV2);

        // setup Kafka consumer (to read back messages)
        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
            createConsumerConfig("localhost:" + zk.getServerPort(), "gropuid"));
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(TOPIC_NAME_BACKWARD_COMPAT, 1);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
                                                consumer.createMessageStreams(topicCountMap);
        KafkaStream<byte[], byte[]> stream = consumerMap.get(TOPIC_NAME_BACKWARD_COMPAT).get(0);

        // Send 20 test message, using the old and new Kafka sinks.
        // Retrieve the messages and ensure that they are identical and sent to the same partition.
        Random rand = new Random();
        int messageCount = 20;
        for (int i = 0; i < messageCount; ++i) {
            Map<String, Object> msgMap = new ImmutableMap.Builder<String, Object>()
                    .put("key", new Long( rand.nextLong() ) )
                    .put("value", "message:" + i).build();

            // send message to both sinks
            for( Sink sink : sinks ){
              sink.writeTo(new DefaultMessageContainer(
                    new Message(TOPIC_NAME_BACKWARD_COMPAT, jsonMapper.writeValueAsBytes(msgMap)),
                    jsonMapper));
            }

            // read two copies of message back from Kafka and check that partitions and data match
            MessageAndMetadata<byte[], byte[]> msgAndMeta1 = stream.iterator().next();
            MessageAndMetadata<byte[], byte[]> msgAndMeta2 = stream.iterator().next();
            System.out.println( "iteration: "+i+" partition1: "+msgAndMeta1.partition() );
            System.out.println( "iteration: "+i+" partition2: "+msgAndMeta2.partition() );
            assertEquals( msgAndMeta1.partition(), msgAndMeta2.partition() );
            String msg1Str = new String( msgAndMeta1.message() );
            String msg2Str = new String( msgAndMeta2.message() );
            System.out.println( "iteration: "+i+" message1: "+msg1Str );
            System.out.println( "iteration: "+i+" message2: "+msg2Str );
            assertEquals( msg1Str, msg2Str );
        }

        // close sinks
        sinkV1.close();
        sinkV2.close();
        // close consumer
        try {
            stream.iterator().next();
            fail(); // there should be no data left to consume
        } catch (ConsumerTimeoutException e) {
            //this is expected
            consumer.shutdown();
        }
    }
View Full Code Here

        }
        System.out.println("SUCCESS -- WE ARE HAPPY !...");
    }

    private void consumeMessages() {
        final ConsumerConnector   consumer =
                kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());
        final Map<String, Integer> topicCountMap = ImmutableMap.of(topic, 1);
        final Map<String, List<KafkaStream<String>>> consumerMap;
        consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder());

        final KafkaStream<String> stream = consumerMap.get(topic).get(0);
        final ConsumerIterator<String> iterator = stream.iterator();

        kafkaMessageReceiverThread = new Thread(
                new Runnable() {
                    @Override
                    public void run() {
                        while (iterator.hasNext()) {
                            String msg = iterator.next().message();
                            msg = msg == null ? "<null>" : msg;
                            System.out.println("got message" + msg);
                            if (msg.equals("SHUTDOWN")) {
                                consumer.shutdown();
                                return;
                            }
                            messagesReceived.add(msg);
                        }
                    }
View Full Code Here

    public List<String> consumeMessages() {
        String ttt = topic;
        System.out.println("topic in kafka consumer: " + topic);
        try {
            final ConsumerConnector consumer =
                    kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());
            final Map<String, Integer> topicCountMap = ImmutableMap.of(topic, 1);
            final Map<String, List<KafkaStream<String,String>>> consumerMap;

            StringDecoder decoder = new StringDecoder(new VerifiableProperties());
            consumerMap = consumer.createMessageStreams(topicCountMap, decoder,  decoder);

            final KafkaStream<String,String> stream = consumerMap.get(topic).get(0);
            final ConsumerIterator<String,String> iterator = stream.iterator();
            while (iterator.hasNext()) {
                String msg = iterator.next().message();
                msg =  ( msg == null ? "<null>" : msg );
                System.out.println("got message" + msg);
                messagesReceived.add(msg);
                if (msg.contains("SHUTDOWN")) {
                    consumer.shutdown();
                    return messagesReceived;
                }
            }
        } catch (Exception e) {
            e.printStackTrace()//To change body of catch statement use File | Settings | File Templates.
View Full Code Here

        }
        System.out.println("SUCCESS -- WE ARE HAPPY !...");
    }

    private void consumeMessages() {
        final ConsumerConnector   consumer =
                kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());
        final Map<String, Integer> topicCountMap =
                ImmutableMap.of(topic, 1);
        final StringDecoder decoder =
                new StringDecoder(new VerifiableProperties());
        final Map<String, List<KafkaStream<String,String>>>  consumerMap =
                consumer.createMessageStreams(topicCountMap, decoder,  decoder);
        final KafkaStream<String,String> stream =
                consumerMap.get(topic).get(0);
        final ConsumerIterator<String,String> iterator = stream.iterator();

        kafkaMessageReceiverThread = new Thread(
                new Runnable() {
                    @Override
                    public void run() {
                        while (iterator.hasNext()) {
                            String msg = iterator.next().message();
                            msg = msg == null ? "<null>" : msg;
                            System.out.println("got message" + msg);
                            if (msg.equals("SHUTDOWN")) {
                                consumer.shutdown();
                                return;
                            }
                            messagesReceived.add(msg);
                        }
                    }
View Full Code Here

        kafkaReporter.report();
    }

    @Then("Kafka consumer should be able to read this data.")
    public void consumerReadsMetrics() throws IOException {
        ConsumerConnector consumer = Consumer.createJavaConsumerConnector(createConsumerConfig());
        String message = readMessage(consumer);
        assertNotNull(message);
        ObjectMapper objectMapper = new ObjectMapper();
        KafkaMetricsReport report = objectMapper.readValue(message, KafkaMetricsReport.class);
        assertNotNull(report);
View Full Code Here

TOP

Related Classes of kafka.javaapi.consumer.ConsumerConnector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.