Package kafka.javaapi.consumer

Examples of kafka.javaapi.consumer.ConsumerConnector


  }


  @Test
  public void testGetConsumer() {
    ConsumerConnector cc = KafkaSourceUtil.getConsumer(props);
    assertNotNull(cc);

  }
View Full Code Here


  }

  public static ConsumerConnector getConsumer(Properties kafkaProps) {
    ConsumerConfig consumerConfig =
            new ConsumerConfig(kafkaProps);
    ConsumerConnector consumer =
            Consumer.createJavaConsumerConnector(consumerConfig);
    return consumer;
  }
View Full Code Here

  }

  private synchronized ConsumerAndIterator createConsumerAndIter() {
    try {
      ConsumerConfig consumerConfig = new ConsumerConfig(kafkaConf);
      ConsumerConnector consumer =
        Consumer.createJavaConsumerConnector(consumerConfig);
      Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
        consumer.createMessageStreams(topicCountMap);
      final List<KafkaStream<byte[], byte[]>> streamList = consumerMap
        .get(topic.get());
      KafkaStream<byte[], byte[]> stream = streamList.remove(0);
      ConsumerAndIterator ret =
        new ConsumerAndIterator(consumer, stream.iterator(), channelUUID);
View Full Code Here

                                            newDimExclus
                                        )
                      )
    );

    final ConsumerConnector connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps));

    final Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(
        ImmutableMap.of(
            feed,
            1
        )
    );

    final List<KafkaStream<byte[], byte[]>> streamList = streams.get(feed);
    if (streamList == null || streamList.size() != 1) {
      return null;
    }

    final KafkaStream<byte[], byte[]> stream = streamList.get(0);
    final ConsumerIterator<byte[], byte[]> iter = stream.iterator();

    return new Firehose()
    {
      @Override
      public boolean hasMore()
      {
        return iter.hasNext();
      }

      @Override
      public InputRow nextRow()
      {
        final byte[] message = iter.next().message();

        if (message == null) {
          return null;
        }

        return theParser.parse(ByteBuffer.wrap(message));
      }

      @Override
      public Runnable commit()
      {
        return new Runnable()
        {
          @Override
          public void run()
          {
               /*
                 This is actually not going to do exactly what we want, cause it will be called asynchronously
                 after the persist is complete.  So, it's going to commit that it's processed more than was actually
                 persisted.  This is unfortunate, but good enough for now.  Should revisit along with an upgrade
                 of our Kafka version.
               */

            log.info("committing offsets");
            connector.commitOffsets();
          }
        };
      }

      @Override
      public void close() throws IOException
      {
        connector.shutdown();
      }
    };
  }
View Full Code Here

                                            newDimExclus
                                        )
                      )
    );

    final ConsumerConnector connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps));

    final Map<String, List<KafkaStream<Message>>> streams = connector.createMessageStreams(ImmutableMap.of(feed, 1));

    final List<KafkaStream<Message>> streamList = streams.get(feed);
    if (streamList == null || streamList.size() != 1) {
      return null;
    }

    final KafkaStream<Message> stream = streamList.get(0);
    final Iterator<MessageAndMetadata<Message>> iter = stream.iterator();

    return new Firehose()
    {
      @Override
      public boolean hasMore()
      {
        return iter.hasNext();
      }

      @Override
      public InputRow nextRow()
      {
        final Message message = iter.next().message();

        if (message == null) {
          return null;
        }

        return parseMessage(message);
      }

      public InputRow parseMessage(Message message)
      {
        return theParser.parse(message.payload());
      }

      @Override
      public Runnable commit()
      {
        return new Runnable()
        {
          @Override
          public void run()
          {
                /*
                 * This is actually not going to do exactly what we want, cause it
                 * will be called asynchronously after the persist is complete. So,
                 * it's going to commit that it's processed more than was actually
                 * persisted. This is unfortunate, but good enough for now. Should
                 * revisit along with an upgrade of our Kafka version.
                 */

            log.info("committing offsets");
            connector.commitOffsets();
          }
        };
      }

      @Override
      public void close() throws IOException
      {
        connector.shutdown();
      }
    };
  }
View Full Code Here

    public void testIsRunningAfterUnrecoverableException() throws Exception {

        //Mock a KafkaStream
        KafkaStream messageStream = Mockito.mock(KafkaStream.class);
        Map<String, List<KafkaStream>> mockedMessageStreams = Collections.singletonMap("TesTopic",Collections.singletonList(messageStream));
        ConsumerConnector consumerConnector = Mockito.mock(ConsumerConnector.class);
        when(consumerConnector.createMessageStreams(Mockito.anyMap(),Mockito.any(Decoder.class),Mockito.any(Decoder.class))).thenReturn(mockedMessageStreams);
        Server jettyServer = PowerMockito.mock(Server.class);
        Mockito.doNothing().when(jettyServer).stop();
        //Mock an Executor to simply run the StreamProcessorRunnable
        ExecutorService executor = Mockito.mock(ExecutorService.class);
        when(executor.isTerminated()).thenReturn(false);
View Full Code Here

    public void testIsRunningAfterRecoverableException() throws Exception {

        //Mock a KafkaStream
        KafkaStream messageStream = Mockito.mock(KafkaStream.class);
        Map<String, List<KafkaStream>> mockedMessageStreams = Collections.singletonMap("TesTopic",Collections.singletonList(messageStream));
        ConsumerConnector consumerConnector = Mockito.mock(ConsumerConnector.class);
        when(consumerConnector.createMessageStreams(Mockito.anyMap(),Mockito.any(Decoder.class),Mockito.any(Decoder.class))).thenReturn(mockedMessageStreams);
        Server jettyServer = PowerMockito.mock(Server.class);
        Mockito.doNothing().when(jettyServer).stop();
        //Mock an Executor to simply run the StreamProcessorRunnable
        ExecutorService executor = Mockito.mock(ExecutorService.class);
        when(executor.isTerminated()).thenReturn(false);
View Full Code Here

    public void testIsRunningAfterMaximumRecoverableException() throws Exception {

        //Mock a KafkaStream
        KafkaStream messageStream = Mockito.mock(KafkaStream.class);
        Map<String, List<KafkaStream>> mockedMessageStreams = Collections.singletonMap("TesTopic",Collections.singletonList(messageStream));
        ConsumerConnector consumerConnector = Mockito.mock(ConsumerConnector.class);
        when(consumerConnector.createMessageStreams(Mockito.anyMap(),Mockito.any(Decoder.class),Mockito.any(Decoder.class))).thenReturn(mockedMessageStreams);
        Server jettyServer = PowerMockito.mock(Server.class);
        Mockito.doNothing().when(jettyServer).stop();
        //Mock an Executor to simply run the StreamProcessorRunnable
        ExecutorService executor = Mockito.mock(ExecutorService.class);
        when(executor.isTerminated()).thenReturn(false);
View Full Code Here

    public void testServerShutDownOnUnrecoverableException() throws Exception {

        //Mock a KafkaStream
        KafkaStream messageStream = Mockito.mock(KafkaStream.class);
        Map<String, List<KafkaStream>> mockedMessageStreams = Collections.singletonMap("TesTopic",Collections.singletonList(messageStream));
        ConsumerConnector consumerConnector = Mockito.mock(ConsumerConnector.class);
        when(consumerConnector.createMessageStreams(Mockito.anyMap(),Mockito.any(Decoder.class),Mockito.any(Decoder.class))).thenReturn(mockedMessageStreams);
        Server jettyServer = PowerMockito.mock(Server.class);
        Mockito.doNothing().when(jettyServer).stop();
        //Mock an Executor to simply run the StreamProcessorRunnable
        ExecutorService executor = Mockito.mock(ExecutorService.class);
        when(executor.isTerminated()).thenReturn(false);
View Full Code Here

    public void testServerShutDownAfterMaximumRecoverableException() throws Exception {

        //Mock a KafkaStream
        KafkaStream messageStream = Mockito.mock(KafkaStream.class);
        Map<String, List<KafkaStream>> mockedMessageStreams = Collections.singletonMap("TesTopic",Collections.singletonList(messageStream));
        ConsumerConnector consumerConnector = Mockito.mock(ConsumerConnector.class);
        when(consumerConnector.createMessageStreams(Mockito.anyMap(),Mockito.any(Decoder.class),Mockito.any(Decoder.class))).thenReturn(mockedMessageStreams);
        Server jettyServer = PowerMockito.mock(Server.class);
        Mockito.doNothing().when(jettyServer).stop();
        //Mock an Executor to simply run the StreamProcessorRunnable
        ExecutorService executor = Mockito.mock(ExecutorService.class);
        when(executor.isTerminated()).thenReturn(false);
View Full Code Here

TOP

Related Classes of kafka.javaapi.consumer.ConsumerConnector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.