kafka.consumer.ConsumerIterator Java Examples

The following examples show how to use kafka.consumer.ConsumerIterator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaPublisherTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateSuccessfulSendAsWhole() throws Exception {
    InputStream contentStream = new ByteArrayInputStream("Hello Kafka".getBytes(StandardCharsets.UTF_8));
    String topicName = "validateSuccessfulSendAsWhole";

    Properties kafkaProperties = this.buildProducerProperties();
    KafkaPublisher publisher = new KafkaPublisher(kafkaProperties, mock(ComponentLog.class));

    PublishingContext publishingContext = new PublishingContext(contentStream, topicName);
    KafkaPublisherResult result = publisher.publish(publishingContext);

    assertEquals(0, result.getLastMessageAcked());
    assertEquals(1, result.getMessagesSent());
    contentStream.close();
    publisher.close();

    ConsumerIterator<byte[], byte[]> iter = this.buildConsumer(topicName);
    assertNotNull(iter.next());
    try {
        iter.next();
    } catch (ConsumerTimeoutException e) {
        // that's OK since this is the Kafka mechanism to unblock
    }
}
 
Example #2
Source File: PutKafkaTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateMultiCharacterDelimitedMessages() {
    String topicName = "validateMultiCharacterDemarcatedMessagesAndCustomPartitioner";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.KEY, "key1");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "foo");

    runner.enqueue("Hello WorldfooGoodbyefoo1foo2foo3foo4foo5".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    assertEquals("Hello World", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("Goodbye", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("1", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("2", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("3", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("4", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("5", new String(consumer.next().message(), StandardCharsets.UTF_8));

    runner.shutdown();
}
 
Example #3
Source File: PutKafkaTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateComplexLeftPartialDemarcatedMessages() {
    String topicName = "validateComplexLeftPartialDemarcatedMessages";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "僠<僠WILDSTUFF僠>僠");

    runner.enqueue("Hello World僠<僠WILDSTUFF僠>僠Goodbye僠<僠WILDSTUFF僠>僠I Mean IT!僠<僠WILDSTUFF僠>僠<僠WILDSTUFF僠>僠".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    byte[] message = consumer.next().message();
    assertEquals("Hello World", new String(message, StandardCharsets.UTF_8));
    assertEquals("Goodbye", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("I Mean IT!", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("<僠WILDSTUFF僠>僠", new String(consumer.next().message(), StandardCharsets.UTF_8));
    runner.shutdown();
}
 
Example #4
Source File: PutKafkaTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateComplexPartialMatchDemarcatedMessages() {
    String topicName = "validateComplexPartialMatchDemarcatedMessages";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "僠<僠WILDSTUFF僠>僠");

    runner.enqueue("Hello World僠<僠WILDSTUFF僠>僠Goodbye僠<僠WILDBOOMSTUFF僠>僠".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    assertEquals("Hello World", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("Goodbye僠<僠WILDBOOMSTUFF僠>僠", new String(consumer.next().message(), StandardCharsets.UTF_8));
    runner.shutdown();
}
 
Example #5
Source File: Consumer.java    From cep with GNU Affero General Public License v3.0 6 votes vote down vote up
/**
 * Starts the consumer thread.
 */

@Override
public void run() {
    log.debug("Starting consumer for topic {}", topic);
    ConsumerIterator<byte[], byte[]> it = stream.iterator();

    // For each message present on the partition...
    while (it.hasNext()) {
        Map<String, Object> event = null;

        // Parse it with the parser associated with the topic
        try {
            event = parser.parse(new String(it.next().message(), "UTF-8"));
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }

        // Send it to the source
        if (event != null) {
            source.send(topic.getName(), event);
        }
    }

    log.debug("Finished consumer for topic {}", topic);
}
 
Example #6
Source File: KafkaPublisherTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateWithMultiByteCharactersNoDelimiter() throws Exception {
    String data = "僠THIS IS MY NEW TEXT.僠IT HAS A NEWLINE.";
    InputStream contentStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
    String topicName = "validateWithMultiByteCharacters";

    Properties kafkaProperties = this.buildProducerProperties();

    KafkaPublisher publisher = new KafkaPublisher(kafkaProperties, mock(ComponentLog.class));
    PublishingContext publishingContext = new PublishingContext(contentStream, topicName);

    publisher.publish(publishingContext);
    publisher.close();

    ConsumerIterator<byte[], byte[]> iter = this.buildConsumer(topicName);
    String r = new String(iter.next().message(), StandardCharsets.UTF_8);
    assertEquals(data, r);
}
 
Example #7
Source File: PutKafkaTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateComplexRightPartialDemarcatedMessages() {
    String topicName = "validateComplexRightPartialDemarcatedMessages";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "僠<僠WILDSTUFF僠>僠");

    runner.enqueue("Hello World僠<僠WILDSTUFF僠>僠Goodbye僠<僠WILDSTUFF僠>僠I Mean IT!僠<僠WILDSTUFF僠>".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    assertEquals("Hello World", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("Goodbye", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("I Mean IT!僠<僠WILDSTUFF僠>", new String(consumer.next().message(), StandardCharsets.UTF_8));
    runner.shutdown();
}
 
Example #8
Source File: KafkaPublisherTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateSuccessfulSendAsWhole() throws Exception {
    InputStream contentStream = new ByteArrayInputStream("Hello Kafka".getBytes(StandardCharsets.UTF_8));
    String topicName = "validateSuccessfulSendAsWhole";

    Properties kafkaProperties = this.buildProducerProperties();
    KafkaPublisher publisher = new KafkaPublisher(kafkaProperties, mock(ComponentLog.class));

    PublishingContext publishingContext = new PublishingContext(contentStream, topicName);
    KafkaPublisherResult result = publisher.publish(publishingContext);

    assertEquals(0, result.getLastMessageAcked());
    assertEquals(1, result.getMessagesSent());
    contentStream.close();
    publisher.close();

    ConsumerIterator<byte[], byte[]> iter = this.buildConsumer(topicName);
    assertNotNull(iter.next());
    try {
        iter.next();
    } catch (ConsumerTimeoutException e) {
        // that's OK since this is the Kafka mechanism to unblock
    }
}
 
Example #9
Source File: PutKafkaTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateComplexLeftPartialDemarcatedMessages() {
    String topicName = "validateComplexLeftPartialDemarcatedMessages";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "僠<僠WILDSTUFF僠>僠");

    runner.enqueue("Hello World僠<僠WILDSTUFF僠>僠Goodbye僠<僠WILDSTUFF僠>僠I Mean IT!僠<僠WILDSTUFF僠>僠<僠WILDSTUFF僠>僠".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    byte[] message = consumer.next().message();
    assertEquals("Hello World", new String(message, StandardCharsets.UTF_8));
    assertEquals("Goodbye", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("I Mean IT!", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("<僠WILDSTUFF僠>僠", new String(consumer.next().message(), StandardCharsets.UTF_8));
    runner.shutdown();
}
 
Example #10
Source File: JavaKafkaConsumerHighAPIHbaseImpl.java    From dk-fitting with Apache License 2.0 6 votes vote down vote up
public void run() {
            // 1. 获取数据迭代器
            ConsumerIterator<String, String> iter = this.stream.iterator();
            // 2. 迭代输出数据
            while (iter.hasNext()) {
                // 2.1 获取数据值
                MessageAndMetadata value = iter.next();

                // 2.2 输出
//                logger.info(this.threadNumber + ":" + value.offset() + ":"  + value.key() + ":" + value.message());
//                System.out.println(this.threadNumber + ":" + value.offset() + ":"  + value.key() + ":" + value.message());

                try {

                    HbaseUtils.insertData(providerProp.getProperty("consumer.hbase.tablename"),
                            providerProp.getProperty("consumer.hbase.columnFamilyName"),value.message().toString());

                } catch (Exception e) {
                    e.printStackTrace();
                }

            }
            // 3. 表示当前线程执行完成
            logger.info("Shutdown Thread:" + this.threadNumber);
        }
 
Example #11
Source File: JavaKafkaConsumerHighAPIESImpl.java    From dk-fitting with Apache License 2.0 6 votes vote down vote up
public void run() {
            // 1. 获取数据迭代器
            ConsumerIterator<String, String> iter = this.stream.iterator();
            // 2. 迭代输出数据
            while (iter.hasNext()) {
                // 2.1 获取数据值
                MessageAndMetadata value = iter.next();

                // 2.2 输出
//                logger.info(this.threadNumber + ":" + value.offset() + ":"  + value.key() + ":" + value.message());
//                System.out.println(this.threadNumber + ":" + value.offset() + ":"  + value.key() + ":" + value.message());

                try {
                    ElasticsearchUtils.sendToES(esIps,esPort,esClusterName,
                            indexName,typeName,
                            value.message().toString(),providerProp.getProperty("consumer.es.kafkaMessage.separator"),
                            Boolean.parseBoolean(providerProp.getProperty("consumer.es.kafkaMessage.isJsonMessage")));
                } catch (Exception e) {
                    e.printStackTrace();
                }

            }
            // 3. 表示当前线程执行完成
            logger.info("Shutdown Thread:" + this.threadNumber);
        }
 
Example #12
Source File: PutKafkaTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateComplexPartialMatchDemarcatedMessages() {
    String topicName = "validateComplexPartialMatchDemarcatedMessages";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "僠<僠WILDSTUFF僠>僠");

    runner.enqueue("Hello World僠<僠WILDSTUFF僠>僠Goodbye僠<僠WILDBOOMSTUFF僠>僠".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    assertEquals("Hello World", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("Goodbye僠<僠WILDBOOMSTUFF僠>僠", new String(consumer.next().message(), StandardCharsets.UTF_8));
    runner.shutdown();
}
 
Example #13
Source File: PutKafkaTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateSingleCharacterDemarcatedMessages() {
    String topicName = "validateSingleCharacterDemarcatedMessages";
    PutKafka putKafka = new PutKafka();
    TestRunner runner = TestRunners.newTestRunner(putKafka);
    runner.setProperty(PutKafka.TOPIC, topicName);
    runner.setProperty(PutKafka.CLIENT_NAME, "foo");
    runner.setProperty(PutKafka.KEY, "key1");
    runner.setProperty(PutKafka.SEED_BROKERS, "localhost:" + kafkaLocal.getKafkaPort());
    runner.setProperty(PutKafka.MESSAGE_DELIMITER, "\n");

    runner.enqueue("Hello World\nGoodbye\n1\n2\n3\n4\n5".getBytes(StandardCharsets.UTF_8));
    runner.run(1, false);

    runner.assertAllFlowFilesTransferred(PutKafka.REL_SUCCESS, 1);
    ConsumerIterator<byte[], byte[]> consumer = this.buildConsumer(topicName);
    assertEquals("Hello World", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("Goodbye", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("1", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("2", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("3", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("4", new String(consumer.next().message(), StandardCharsets.UTF_8));
    assertEquals("5", new String(consumer.next().message(), StandardCharsets.UTF_8));

    runner.shutdown();
}
 
Example #14
Source File: KafkaPublisherTest.java    From nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateWithMultiByteCharactersNoDelimiter() throws Exception {
    String data = "僠THIS IS MY NEW TEXT.僠IT HAS A NEWLINE.";
    InputStream contentStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
    String topicName = "validateWithMultiByteCharacters";

    Properties kafkaProperties = this.buildProducerProperties();

    KafkaPublisher publisher = new KafkaPublisher(kafkaProperties, mock(ComponentLog.class));
    PublishingContext publishingContext = new PublishingContext(contentStream, topicName);

    publisher.publish(publishingContext);
    publisher.close();

    ConsumerIterator<byte[], byte[]> iter = this.buildConsumer(topicName);
    String r = new String(iter.next().message(), StandardCharsets.UTF_8);
    assertEquals(data, r);
}
 
Example #15
Source File: ConsumerWorker.java    From yuzhouwan with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
    ConsumerIterator<byte[], byte[]> iter = kafkaStream.iterator();
    MessageAndMetadata<byte[], byte[]> msg;
    int total = 0, fail = 0, success = 0;
    long start = System.currentTimeMillis();
    while (iter.hasNext()) {
        try {
            msg = iter.next();
            _log.info("Thread {}: {}", threadNum, new String(msg.message(), StandardCharsets.UTF_8));
            _log.info("partition: {}, offset: {}", msg.partition(), msg.offset());
            success++;
        } catch (Exception e) {
            _log.error("", e);
            fail++;
        }
        _log.info("Count [fail/success/total]: [{}/{}/{}], Time: {}s", fail, success, ++total,
                (System.currentTimeMillis() - start) / 1000);
    }
}
 
Example #16
Source File: KafkaOffsetGetter.java    From Kafka-Insight with Apache License 2.0 6 votes vote down vote up
/**
 * When an object implementing interface <code>Runnable</code> is used
 * to create a thread, starting the thread causes the object's
 * <code>run</code> method to be called in that separately executing
 * thread.
 * <p>
 * The general contract of the method <code>run</code> is that it may
 * take any action whatsoever.
 *
 * @see Thread#run()
 */
@Override
public void run() {
    ConsumerConnector consumerConnector = KafkaUtils.createConsumerConnector(zkAddr, group);
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(CONSUMER_OFFSET_TOPIC, new Integer(1));
    KafkaStream<byte[], byte[]> offsetMsgStream = consumerConnector.createMessageStreams(topicCountMap).get(CONSUMER_OFFSET_TOPIC).get(0);

    ConsumerIterator<byte[], byte[]> it = offsetMsgStream.iterator();
    while (true) {

        MessageAndMetadata<byte[], byte[]> offsetMsg = it.next();
        if (ByteBuffer.wrap(offsetMsg.key()).getShort() < 2) {
            try {
                GroupTopicPartition commitKey = readMessageKey(ByteBuffer.wrap(offsetMsg.key()));
                if (offsetMsg.message() == null) {
                    continue;
                }
                kafka.common.OffsetAndMetadata commitValue = readMessageValue(ByteBuffer.wrap(offsetMsg.message()));
                kafkaConsumerOffsets.put(commitKey, commitValue);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
}
 
Example #17
Source File: KafkaWorker.java    From elasticsearch-river-kafka with Apache License 2.0 6 votes vote down vote up
/**
 * Consumes the messages from the partition via specified stream.
 */
private void consumeMessagesAndAddToBulkProcessor(final KafkaStream stream) {

    try {
        // by default it waits forever for message, but there is timeout configured
        final ConsumerIterator<byte[], byte[]> consumerIterator = stream.iterator();

        // Consume all the messages of the stream (partition)
        while (consumerIterator.hasNext() && consume) {

            final MessageAndMetadata messageAndMetadata = consumerIterator.next();
            logMessage(messageAndMetadata);

            elasticsearchProducer.addMessagesToBulkProcessor(messageAndMetadata);

            // StatsD reporting
            stats.messagesReceived.incrementAndGet();
            stats.lastCommitOffsetByPartitionId.put(messageAndMetadata.partition(), messageAndMetadata.offset());
        }
    } catch (ConsumerTimeoutException ex) {
        logger.debug("Nothing to be consumed for now. Consume flag is: {}", consume);
    }
}
 
Example #18
Source File: KafkaSourceTest.java    From flume-ng-kafka-source with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Before
public void setup() throws Exception {
	mockIt = mock(ConsumerIterator.class);
	mockMessageAndMetadata = mock(MessageAndMetadata.class);
	mockChannelProcessor = mock(ChannelProcessor.class);
	mockBuffer = mock(ByteBuffer.class);
	mockMessage = mock(Message.class);
	mockKafkaSource = new KafkaSource();
	
	when(mockMessage.payload()).thenReturn(mockBuffer);
	when(mockMessageAndMetadata.message()).thenReturn(mockMessage);
	
	Field field = AbstractSource.class.getDeclaredField("channelProcessor");
	field.setAccessible(true);
	field.set(mockKafkaSource, mockChannelProcessor);

	field = KafkaSource.class.getDeclaredField("it");
	field.setAccessible(true);
	field.set(mockKafkaSource, mockIt);
}
 
Example #19
Source File: KafkaConsumer.java    From opensoc-streaming with Apache License 2.0 6 votes vote down vote up
public void run() 
  {
logger.debug( "calling ConsumerTest.run()" );
ConsumerIterator<byte[], byte[]> it = m_stream.iterator();
  
while (it.hasNext())
{    
	String message = new String(it.next().message());
	try 
	{
		remote.sendString( message );
	} 
	catch (IOException e) 
	{
		e.printStackTrace();
	}
}
  	
logger.debug("Shutting down Thread: " + m_threadNumber);
  }
 
Example #20
Source File: Processer.java    From blog_demos with Apache License 2.0 6 votes vote down vote up
public void run() {
    // 1. 获取数据迭代器
    ConsumerIterator<String, String> iter = this.stream.iterator();

    logger.info("server [{}] start run", TOMCAT_ID);

    // 2. 迭代输出数据
    while (iter.hasNext()) {
        // 2.1 获取数据值
        MessageAndMetadata value = iter.next();

        // 2.2 输出
        logger.info("server [{}], threadNumber [{}], offset [{}], key [{}], message[{}]",
                TOMCAT_ID,
                threadNumber,
                value.offset(),
                value.key(),
                value.message());
    }
    // 3. 表示当前线程执行完成
    logger.info("Shutdown Thread:" + this.threadNumber);
}
 
Example #21
Source File: PollingKafkaConsumer.java    From opensoc-streaming with Apache License 2.0 6 votes vote down vote up
public void run() 
  {
logger.warn( "calling PollingKafkaConsumer.run()" );
ConsumerIterator<byte[], byte[]> it = m_stream.iterator();
  
try
{
	while (it.hasNext())
	{    
		String message = new String(it.next().message());
		logger.warn( "adding message: " + message);
		messages.add(message);
	}
}
catch( Exception e)
{
	logger.error( "Exception waiting on Kafka...", e );
}

latch.countDown();

logger.warn("Shutting down Thread: " + m_threadNumber);
  }
 
Example #22
Source File: KafkaConsumer.java    From flume-ng-kafka-sink with Apache License 2.0 6 votes vote down vote up
public MessageAndMetadata getNextMessage(String topic){
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
    KafkaStream stream = streams.get(0); // it has only a single stream, because there is only one consumer
    final ConsumerIterator<byte[], byte[]> it = stream.iterator();
    int counter = 0;
    while (!it.hasNext()){
        // Wait time >= 10s, so return null and exit
        if(counter == 5){
            logger.error("0 messages available to fetch for the topic " + topic);
            return null;
        }
        // wait till a message is published. this is a blocking call.
        try {
            Thread.sleep(2 * 1000);
        } catch (InterruptedException e) {
            // ignore
        }
        counter++;
    }
    return it.next();
}
 
Example #23
Source File: KafkaConsumer.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
public MessageAndMetadata getNextMessage(String topic) {
  List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
  // it has only a single stream, because there is only one consumer
  KafkaStream stream = streams.get(0);
  final ConsumerIterator<byte[], byte[]> it = stream.iterator();
  int counter = 0;
  try {
    if (it.hasNext()) {
      return it.next();
    } else {
      return null;
    }
  } catch (ConsumerTimeoutException e) {
    logger.error("0 messages available to fetch for the topic " + topic);
    return null;
  }
}
 
Example #24
Source File: KafkaReceiver.java    From koper with Apache License 2.0 6 votes vote down vote up
private void processStreamsByTopic(String topicKeys, List<KafkaStream<byte[], byte[]>> streamList) {
    // init stream thread pool
    ExecutorService streamPool = Executors.newFixedThreadPool(partitions);
    String[] topics = StringUtils.split(topicKeys, ",");
    if (log.isDebugEnabled())
        log.debug("准备处理消息流集合 KafkaStreamList,topic count={},topics={}, partitions/topic={}", topics.length, topicKeys, partitions);

    //遍历stream
    AtomicInteger index = new AtomicInteger(0);
    for (KafkaStream<byte[], byte[]> stream : streamList) {
        Thread streamThread = new Thread() {

            @Override
            public void run() {
                int i = index.getAndAdd(1);
                if (log.isDebugEnabled())
                    log.debug("处理消息流KafkaStream -- No.={}, partitions={}", i, partitions + ":" + i);

                ConsumerIterator<byte[], byte[]> consumerIterator = stream.iterator();

                processStreamByConsumer(topicKeys, consumerIterator);
            }
        };
        streamPool.execute(streamThread);
    }
}
 
Example #25
Source File: KafkaMqCollect.java    From light_drtc with Apache License 2.0 6 votes vote down vote up
public void collectMq(){
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
       topicCountMap.put(Constants.kfTopic, new Integer(1));

       StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
       StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

       Map<String, List<KafkaStream<String, String>>> consumerMap =
               consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
       
       KafkaStream<String, String> stream = consumerMap.get(Constants.kfTopic).get(0);
       ConsumerIterator<String, String> it = stream.iterator();
       MessageAndMetadata<String, String> msgMeta;
       while (it.hasNext()){
       	msgMeta = it.next();
       	super.mqTimer.parseMqText(msgMeta.key(), msgMeta.message());
       	//System.out.println(msgMeta.key()+"\t"+msgMeta.message());
       }
}
 
Example #26
Source File: KafkaSpout.java    From monasca-thresh with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
  while (this.shouldContinue) {
    final ConsumerIterator<byte[], byte[]> it = streams.get(0).iterator();
    if (it.hasNext()) {
      final byte[] message = it.next().message();
      synchronized (this) {
        this.message = message;
        // Wake up getMessage() if it is waiting
        if (this.waiting) {
          notify();
        }
        while (this.message != null && this.shouldContinue)
          try {
            wait();
          } catch (InterruptedException e) {
            logger.info("Wait interrupted", e);
          }
      }
    }
  }
  logger.info("readerThread {} exited", this.readerThread.getName());
  this.readerThread = null;
}
 
Example #27
Source File: KafkaProducerPusherTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Test
public void test() throws IOException {
  // Test that the scoped config overrides the generic config
  Pusher pusher = new KafkaProducerPusher("127.0.0.1:dummy", TOPIC, Optional.of(ConfigFactory.parseMap(ImmutableMap.of(
      ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:" + this.kafkaTestHelper.getKafkaServerPort()))));

  String msg1 = "msg1";
  String msg2 = "msg2";

  pusher.pushMessages(Lists.newArrayList(msg1.getBytes(), msg2.getBytes()));

  try {
    Thread.sleep(1000);
  } catch(InterruptedException ex) {
    Thread.currentThread().interrupt();
  }

  ConsumerIterator<byte[], byte[]> iterator = this.kafkaTestHelper.getIteratorForTopic(TOPIC);

  assert(iterator.hasNext());
  Assert.assertEquals(new String(iterator.next().message()), msg1);
  assert(iterator.hasNext());
  Assert.assertEquals(new String(iterator.next().message()), msg2);

  pusher.close();
}
 
Example #28
Source File: Kafka.java    From jlogstash-input-plugin with Apache License 2.0 6 votes vote down vote up
public void run() {
	try {
		while(true){
			ConsumerIterator<byte[], byte[]> it = m_stream.iterator();
			while (it.hasNext()) {
				String m = null;
				try {
					m = new String(it.next().message(),
							this.kafkaInput.encoding);
					Map<String, Object> event = this.decoder
							.decode(m);
					if (event!=null&&event.size()>0){
						this.kafkaInput.process(event);
					} 
				} catch (Exception e) {
					logger.error("process event:{} failed:{}",m,e.getCause());
				}
			}
		}
	} catch (Exception t) {
		logger.error("kakfa Consumer fetch is error:{}",t.getCause());
	}
}
 
Example #29
Source File: KafkaProducerServiceIntegrationTest.java    From vertx-kafka-service with Apache License 2.0 6 votes vote down vote up
private void consumeMessages() {
    final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(TOPIC, 1);
    final StringDecoder decoder =
            new StringDecoder(new VerifiableProperties());
    final Map<String, List<KafkaStream<String, String>>> consumerMap =
            consumer.createMessageStreams(topicCountMap, decoder, decoder);
    final KafkaStream<String, String> stream =
            consumerMap.get(TOPIC).get(0);
    final ConsumerIterator<String, String> iterator = stream.iterator();

    Thread kafkaMessageReceiverThread = new Thread(
            () -> {
                while (iterator.hasNext()) {
                    String msg = iterator.next().message();
                    msg = msg == null ? "<null>" : msg;
                    System.out.println("got message: " + msg);
                    messagesReceived.add(msg);
                }
            },
            "kafkaMessageReceiverThread"
    );
    kafkaMessageReceiverThread.start();

}
 
Example #30
Source File: KafkaDistributed.java    From jlogstash-input-plugin with Apache License 2.0 6 votes vote down vote up
public void run() {
	try {
		while(true){
			ConsumerIterator<byte[], byte[]> it = m_stream.iterator();
			while (it.hasNext()) {
				String m = null;
				try {
					m = new String(it.next().message(),
							this.kafkaInput.encoding);
					Map<String, Object> event = this.decoder
							.decode(m);
					if(zkDistributed==null){
						this.kafkaInput.process(event);
					}else{
						zkDistributed.route(event);
					}
				} catch (Exception e) {
					logger.error("process event:{} failed:{}",m,ExceptionUtil.getErrorMessage(e));
				}
			}
		}
	} catch (Exception t) {
		logger.error("kakfa Consumer fetch is error:{}",ExceptionUtil.getErrorMessage(t));
	}
}