Package kafka.api

Examples of kafka.api.FetchRequestBuilder


    public static ByteBufferMessageSet fetchMessages(KafkaConfig config, SimpleConsumer consumer, Partition partition, long offset) throws UpdateOffsetException {
        ByteBufferMessageSet msgs = null;
        String topic = config.topic;
        int partitionId = partition.partition;
        FetchRequestBuilder builder = new FetchRequestBuilder();
        FetchRequest fetchRequest = builder.addFetch(topic, partitionId, offset, config.fetchSizeBytes).
                clientId(config.clientId).maxWait(config.fetchMaxWait).build();
        FetchResponse fetchResponse;
        try {
            fetchResponse = consumer.fetch(fetchRequest);
        } catch (Exception e) {
View Full Code Here


    public static ByteBufferMessageSet fetchMessages(KafkaConfig config, SimpleConsumer consumer, Partition partition, long offset) {
        ByteBufferMessageSet msgs = null;
        String topic = config.topic;
        int partitionId = partition.partition;
        for (int errors = 0; errors < 2 && msgs == null; errors++) {
            FetchRequestBuilder builder = new FetchRequestBuilder();
            FetchRequest fetchRequest = builder.addFetch(topic, partitionId, offset, config.fetchSizeBytes).
                    clientId(config.clientId).build();
            FetchResponse fetchResponse;
            try {
                fetchResponse = consumer.fetch(fetchRequest);
            } catch (Exception e) {
View Full Code Here

                               SimpleConsumer consumer) {
        LOG.info("fetching message topic " + topicPartition.getTopic() + " partition " +
                topicPartition.getPartition() + " offset " + offset);
        final int MAX_MESSAGE_SIZE_BYTES = mConfig.getMaxMessageSizeBytes();
        final String clientName = getClientName(topicPartition);
        kafka.api.FetchRequest request = new FetchRequestBuilder().clientId(clientName)
                .addFetch(topicPartition.getTopic(), topicPartition.getPartition(), offset,
                          MAX_MESSAGE_SIZE_BYTES)
                .build();
        FetchResponse response = consumer.fetch(request);
        if (response.hasError()) {
View Full Code Here

        private void openFetchRequest()
        {
            if (messageAndOffsetIterator == null) {
                log.debug("Fetching %d bytes from offset %d (%d - %d). %d messages read so far", KAFKA_READ_BUFFER_SIZE, cursorOffset, split.getStart(), split.getEnd(), totalMessages);
                FetchRequest req = new FetchRequestBuilder()
                        .clientId("presto-worker-" + Thread.currentThread().getName())
                        .addFetch(split.getTopicName(), split.getPartitionId(), cursorOffset, KAFKA_READ_BUFFER_SIZE)
                        .build();

                // TODO - this should look at the actual node this is running on and prefer
View Full Code Here

    public static ByteBufferMessageSet fetchMessages(KafkaConfig config, SimpleConsumer consumer, Partition partition, long offset) {
        ByteBufferMessageSet msgs = null;
        String topic = config.topic;
        int partitionId = partition.partition;
        for (int errors = 0; errors < 2 && msgs == null; errors++) {
            FetchRequestBuilder builder = new FetchRequestBuilder();
            FetchRequest fetchRequest = builder.addFetch(topic, partitionId, offset, config.fetchSizeBytes).
                    clientId(config.clientId).maxWait(config.fetchMaxWait).build();
            FetchResponse fetchResponse;
            try {
                fetchResponse = consumer.fetch(fetchRequest);
            } catch (Exception e) {
View Full Code Here

    /**
     * Makes a call to kafka to fetch messages.
     */
    private FetchResponse fetchMessages(SimpleConsumer consumer, long offset) {
      FetchRequest request = new FetchRequestBuilder()
        .clientId(consumer.clientId())
        .addFetch(topicPart.getTopic(), topicPart.getPartition(), offset, FETCH_SIZE)
        .maxWait(MAX_WAIT)
        .build();
      return consumer.fetch(request);
View Full Code Here

        private void openFetchRequest()
        {
            if (messageAndOffsetIterator == null) {
                log.debug("Fetching %d bytes from offset %d (%d - %d). %d messages read so far", KAFKA_READ_BUFFER_SIZE, cursorOffset, split.getStart(), split.getEnd(), totalMessages);
                FetchRequest req = new FetchRequestBuilder()
                        .clientId("presto-worker-" + Thread.currentThread().getName())
                        .addFetch(split.getTopicName(), split.getPartitionId(), cursorOffset, KAFKA_READ_BUFFER_SIZE)
                        .build();

                // TODO - this should look at the actual node this is running on and prefer
View Full Code Here

                                                       KafkaProperties.connectionTimeOut,
                                                       KafkaProperties.kafkaProducerBufferSize,
                                                       KafkaProperties.clientId);

    System.out.println("Testing single fetch");
    FetchRequest req = new FetchRequestBuilder()
            .clientId(KafkaProperties.clientId)
            .addFetch(KafkaProperties.topic2, 0, 0L, 100)
            .build();
    FetchResponse fetchResponse = simpleConsumer.fetch(req);
      printMessages((ByteBufferMessageSet) fetchResponse.messageSet(KafkaProperties.topic2, 0));

    System.out.println("Testing single multi-fetch");
    Map<String, List<Integer>> topicMap = new HashMap<String, List<Integer>>() {{
        put(KafkaProperties.topic2, new ArrayList<Integer>(){{ add(0); }});
        put(KafkaProperties.topic3, new ArrayList<Integer>(){{ add(0); }});
    }};
    req = new FetchRequestBuilder()
            .clientId(KafkaProperties.clientId)
            .addFetch(KafkaProperties.topic2, 0, 0L, 100)
            .addFetch(KafkaProperties.topic3, 0, 0L, 100)
            .build();
    fetchResponse = simpleConsumer.fetch(req);
View Full Code Here

            config = kafkaServer.getServer(0).config();
        } else {
            config = kafkaServer.getServer(1).config();
        }
        SimpleConsumer consumer = new SimpleConsumer(config.hostName(), config.port(), 100000, 100000, "clientId");
        FetchResponse response = consumer.fetch(new FetchRequestBuilder().addFetch(TOPIC_NAME, 0, 0, 100000).build());

        List<MessageAndOffset> messageSet = Lists.newArrayList(response.messageSet(TOPIC_NAME, 0).iterator());
        assertEquals("Should have fetched 2 messages", 2, messageSet.size());

        assertEquals(new String(extractMessage(messageSet, 0)), "testMessage" + 0);
View Full Code Here

        } else {
            config = kafkaServer.getServer(1).config();
        }
        // get data back from Kafka
        SimpleConsumer consumer = new SimpleConsumer(config.hostName(), config.port(), 100000, 100000, "clientId");
        FetchResponse response = consumer.fetch(new FetchRequestBuilder().addFetch(TOPIC_NAME, 0, 0, 100000).build());

        List<MessageAndOffset> messageSet = Lists.newArrayList(response.messageSet(TOPIC_NAME, 0).iterator());
        assertEquals("Should have fetched 2 messages", 2, messageSet.size());

        for( int i=0; i<messageSet.size(); i++ ){
View Full Code Here

TOP

Related Classes of kafka.api.FetchRequestBuilder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.