kafka.consumer.Consumer Java Examples

The following examples show how to use kafka.consumer.Consumer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaConsumerTest.java    From pentaho-kafka-consumer with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() {
    data = new KafkaConsumerData();
    meta = new KafkaConsumerMeta();
    meta.setKafkaProperties(getDefaultKafkaProperties());
    meta.setLimit(STEP_LIMIT);

    stepMeta = new StepMeta("KafkaConsumer", meta);
    transMeta = new TransMeta();
    transMeta.addStep(stepMeta);
    trans = new Trans(transMeta);

    PowerMockito.mockStatic(Consumer.class);

    when(Consumer.createJavaConsumerConnector(any(ConsumerConfig.class))).thenReturn(zookeeperConsumerConnector);
    when(zookeeperConsumerConnector.createMessageStreams(anyMapOf(String.class, Integer.class))).thenReturn(streamsMap);
    when(streamsMap.get(anyObject())).thenReturn(stream);
    when(stream.get(anyInt())).thenReturn(kafkaStream);
    when(kafkaStream.iterator()).thenReturn(streamIterator);
    when(streamIterator.next()).thenReturn(generateKafkaMessage());
}
 
Example #2
Source File: KafkaConsumerModule.java    From heroic with Apache License 2.0 6 votes vote down vote up
@Provides
@KafkaScope
ConsumerSchema.Consumer consumer(final IngestionManager ingestionManager) {
    // XXX: make target group configurable?
    final IngestionGroup ingestion = ingestionManager.useDefaultGroup();

    if (ingestion.isEmpty()) {
        throw new IllegalStateException("No backends are part of the ingestion group");
    }

    final ConsumerSchema.Depends d = DaggerConsumerSchema_Depends
        .builder()
        .primaryComponent(primary)
        .depends(depends)
        .dependsModule(new ConsumerSchema.DependsModule(ingestion))
        .build();

    final ConsumerSchema.Exposed exposed = schema.setup(d);
    return exposed.consumer();
}
 
Example #3
Source File: KafkaSourceOp.java    From PoseidonX with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void initialize()
    throws StreamingException
{
    ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
    consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);

    Map<String, Integer> topicCountMap = Maps.newHashMap();
    topicCountMap.put(topic, TOPIC_COUNT);

    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
        consumerConnector.createMessageStreams(topicCountMap);
    KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
    consumerIterator = stream.iterator();
}
 
Example #4
Source File: LegacyKafkaMessageIterator.java    From secor with Apache License 2.0 6 votes vote down vote up
@Override
public void init(SecorConfig config) throws UnknownHostException {
    this.mConfig = config;

    mConsumerConnector = Consumer.createJavaConsumerConnector(createConsumerConfig());

    if (!mConfig.getKafkaTopicBlacklist().isEmpty() && !mConfig.getKafkaTopicFilter().isEmpty()) {
        throw new RuntimeException("Topic filter and blacklist cannot be both specified.");
    }
    TopicFilter topicFilter = !mConfig.getKafkaTopicBlacklist().isEmpty() ? new Blacklist(mConfig.getKafkaTopicBlacklist()) :
            new Whitelist(mConfig.getKafkaTopicFilter());
    LOG.debug("Use TopicFilter {}({})", topicFilter.getClass(), topicFilter);
    List<KafkaStream<byte[], byte[]>> streams =
            mConsumerConnector.createMessageStreamsByFilter(topicFilter);
    KafkaStream<byte[], byte[]> stream = streams.get(0);
    mIterator = stream.iterator();
    mKafkaMessageTimestampFactory = new KafkaMessageTimestampFactory(mConfig.getKafkaMessageTimestampClass());
}
 
Example #5
Source File: KafkaTestBase.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
KafkaConsumerSuite(String zkConnectString, String topic)
{
  _topic = topic;
  Properties consumeProps = new Properties();
  consumeProps.put("zookeeper.connect", zkConnectString);
  consumeProps.put("group.id", _topic+"-"+System.nanoTime());
  consumeProps.put("zookeeper.session.timeout.ms", "10000");
  consumeProps.put("zookeeper.sync.time.ms", "10000");
  consumeProps.put("auto.commit.interval.ms", "10000");
  consumeProps.put("_consumer.timeout.ms", "10000");

  _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
      _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1));
  List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic);
  _stream = streams.get(0);
  _iterator = _stream.iterator();
}
 
Example #6
Source File: KafkaTestBase.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
KafkaConsumerSuite(String zkConnectString, String topic)
{
  _topic = topic;
  Properties consumeProps = new Properties();
  consumeProps.put("zookeeper.connect", zkConnectString);
  consumeProps.put("group.id", _topic+"-"+System.nanoTime());
  consumeProps.put("zookeeper.session.timeout.ms", "10000");
  consumeProps.put("zookeeper.sync.time.ms", "10000");
  consumeProps.put("auto.commit.interval.ms", "10000");
  consumeProps.put("_consumer.timeout.ms", "10000");

  _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
      _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1));
  List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic);
  _stream = streams.get(0);
  _iterator = _stream.iterator();
}
 
Example #7
Source File: KafkaSpout.java    From monasca-thresh with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
  logger.info("Opened");
  this.collector = collector;
  logger.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
  this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId());

  Properties kafkaProperties =
      KafkaConsumerProperties.createKafkaProperties(kafkaSpoutConfig.kafkaConsumerConfiguration);
  // Have to use a different consumer.id for each spout so use the storm taskId. Otherwise,
  // zookeeper complains about a conflicted ephemeral node when there is more than one spout
  // reading from a topic
  kafkaProperties.setProperty("consumer.id", String.valueOf(context.getThisTaskId()));
  ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
  this.consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
}
 
Example #8
Source File: KAFKAMessageListener.java    From micro-integrator with Apache License 2.0 5 votes vote down vote up
/**
 * Create the connection with the zookeeper to consume the messages
 */
public boolean createKafkaConsumerConnector() throws Exception {

    log.debug("Create the connection and start to consume the streams");
    boolean isCreated;
    try {
        if (consumerConnector == null) {
            log.info("Creating Kafka Consumer Connector...");

            //set default consumer timeout to 3000ms if it is not set by the user
            if (!kafkaProperties.containsKey(KAFKAConstants.CONSUMER_TIMEOUT)) {
                kafkaProperties.put(KAFKAConstants.CONSUMER_TIMEOUT, "3000");
            }
            consumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(kafkaProperties));
            log.info("Kafka Consumer Connector is created");
            start();
        }
        isCreated = true;
    } catch (ZkTimeoutException toe) {
        log.error(" Error in Creating Kafka Consumer Connector | ZkTimeout" + toe.getMessage());
        throw new SynapseException(" Error in Creating Kafka Consumer Connector| ZkTimeout");

    } catch (Exception e) {
        log.error(" Error in Creating Kafka Consumer Connector." + e.getMessage(), e);
        throw new SynapseException(" Error in Creating Kafka Consumer Connector ", e);
    }
    return isCreated;
}
 
Example #9
Source File: KafkaSourceUtil.java    From flume-ng-extends-source with MIT License 5 votes vote down vote up
public static ConsumerConnector getConsumer(Properties kafkaProps) {
  ConsumerConfig consumerConfig =
          new ConsumerConfig(kafkaProps);
  ConsumerConnector consumer =
          Consumer.createJavaConsumerConnector(consumerConfig);
  return consumer;
}
 
Example #10
Source File: KafkaConsumerModule.java    From heroic with Apache License 2.0 5 votes vote down vote up
private List<ConsumerThread> buildThreads(
    final AsyncFramework async, final Clock clock, final ConsumerReporter reporter,
    final Map<String, List<KafkaStream<byte[]>>> streams,
    final ConsumerSchema.Consumer consumer, final AtomicInteger consuming,
    final AtomicLong errors, final LongAdder consumed, final boolean enablePeriodicCommit,
    final long periodicCommitInterval, final AtomicLong nextOffsetsCommitTS
) {
    final List<ConsumerThread> threads = new ArrayList<>();

    final Set<Map.Entry<String, List<KafkaStream<byte[]>>>> entries = streams.entrySet();

    for (final Map.Entry<String, List<KafkaStream<byte[]>>> entry : entries) {
        final String topic = entry.getKey();
        final List<KafkaStream<byte[]>> list = entry.getValue();

        int count = 0;

        for (final KafkaStream<byte[]> stream : list) {
            final String name = String.format("%s:%d", topic, count++);

            threads.add(
                new ConsumerThread(async, clock, name, reporter, stream, consumer, consuming,
                    errors, consumed, enablePeriodicCommit, periodicCommitInterval,
                    nextOffsetsCommitTS));
        }
    }

    return threads;
}
 
Example #11
Source File: AlertKafkaPublisherTest.java    From eagle with Apache License 2.0 5 votes vote down vote up
private static void consumeWithOutput(final List<String> outputMessages) {
    Thread t = new Thread(new Runnable() {
        @Override
        public void run() {
            Properties props = new Properties();
            props.put("group.id", "B");
            props.put("zookeeper.connect", "127.0.0.1:" + + TEST_KAFKA_ZOOKEEPER_PORT);
            props.put("zookeeper.session.timeout.ms", "4000");
            props.put("zookeeper.sync.time.ms", "2000");
            props.put("auto.commit.interval.ms", "1000");
            props.put("auto.offset.reset", "smallest");

            ConsumerConnector jcc = null;
            try {
                ConsumerConfig ccfg = new ConsumerConfig(props);
                jcc = Consumer.createJavaConsumerConnector(ccfg);
                Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
                topicCountMap.put(TEST_TOPIC_NAME, 1);
                Map<String, List<KafkaStream<byte[], byte[]>>> topicMap = jcc.createMessageStreams(topicCountMap);
                KafkaStream<byte[], byte[]> cstrm = topicMap.get(TEST_TOPIC_NAME).get(0);
                for (MessageAndMetadata<byte[], byte[]> mm : cstrm) {
                    String message = new String(mm.message());
                    outputMessages.add(message);

                    try {
                        Thread.sleep(5000);
                    } catch (InterruptedException e) {
                    }
                }
            } finally {
                if (jcc != null) {
                    jcc.shutdown();
                }
            }
        }
    });
    t.start();
}
 
Example #12
Source File: KafkaChannel.java    From monasca-persister with Apache License 2.0 5 votes vote down vote up
@Inject
public KafkaChannel(PersisterConfig configuration, @Assisted PipelineConfig pipelineConfig,
    @Assisted String threadId) {

  this.topic = pipelineConfig.getTopic();
  this.threadId = threadId;
  this.commitBatchtimeInMills = pipelineConfig.getCommitBatchTime();
  nextCommitTime = System.currentTimeMillis() + commitBatchtimeInMills;
  Properties kafkaProperties = createKafkaProperties(configuration.getKafkaConfig(), pipelineConfig);
  consumerConnector = Consumer.createJavaConsumerConnector(createConsumerConfig(kafkaProperties));
}
 
Example #13
Source File: SimpleKafkaConsumer.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint)
{
  Config config = ConfigFactory.parseProperties(props);
  topic = config.getString("topic");
  String zkConnect = config.getString("zookeeper.connect");

  schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
  deserializer = new LiAvroDeserializer(schemaRegistry);
  /** TODO: Make Confluent schema registry integration configurable
   * HashMap<String, String> avroSerDeConfig = new HashMap<>();
   * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081");
   * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
   * deserializer.configure(avroSerDeConfig, false);
   *
   **/

  Properties consumeProps = new Properties();
  consumeProps.put("zookeeper.connect", zkConnect);
  consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime());
  consumeProps.put("zookeeper.session.timeout.ms", "10000");
  consumeProps.put("zookeeper.sync.time.ms", "10000");
  consumeProps.put("auto.commit.interval.ms", "10000");
  consumeProps.put("auto.offset.reset", "smallest");
  consumeProps.put("auto.commit.enable", "false");
  //consumeProps.put("consumer.timeout.ms", "10000");

  consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1));
  List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
  stream = streams.get(0);

  iterator = stream.iterator();
}
 
Example #14
Source File: KafkaTestBase.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public KafkaTestBase(String topic) throws InterruptedException, RuntimeException {

    startServer();

    this.topic = topic;

    AdminUtils.createTopic(zkClient, topic, 1, 1, new Properties());

    List<KafkaServer> servers = new ArrayList<>();
    servers.add(kafkaServer);
    TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, 0, 5000);

    Properties consumeProps = new Properties();
    consumeProps.put("zookeeper.connect", zkConnect);
    consumeProps.put("group.id", "testConsumer");
    consumeProps.put("zookeeper.session.timeout.ms", "10000");
    consumeProps.put("zookeeper.sync.time.ms", "10000");
    consumeProps.put("auto.commit.interval.ms", "10000");
    consumeProps.put("consumer.timeout.ms", "10000");

    consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

    Map<String, Integer> topicCountMap = new HashMap<>();
    topicCountMap.put(this.topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
    stream = streams.get(0);

    iterator = stream.iterator();
  }
 
Example #15
Source File: MessageResource.java    From dropwizard-kafka-http with Apache License 2.0 5 votes vote down vote up
@GET
@Timed
public Response consume(
        @QueryParam("topic") String topic,
        @QueryParam("timeout") Integer timeout
) {
    if (Strings.isNullOrEmpty(topic))
        return Response.status(400)
                .entity(new String[]{"Undefined topic"})
                .build();

    Properties props = (Properties) consumerCfg.clone();
    if (timeout != null) props.put("consumer.timeout.ms", "" + timeout);

    ConsumerConfig config = new ConsumerConfig(props);
    ConsumerConnector connector = Consumer.createJavaConsumerConnector(config);

    Map<String, Integer> streamCounts = Collections.singletonMap(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(streamCounts);
    KafkaStream<byte[], byte[]> stream = streams.get(topic).get(0);

    List<Message> messages = new ArrayList<>();
    try {
        for (MessageAndMetadata<byte[], byte[]> messageAndMetadata : stream)
            messages.add(new Message(messageAndMetadata));
    } catch (ConsumerTimeoutException ignore) {
    } finally {
        connector.commitOffsets();
        connector.shutdown();
    }

    return Response.ok(messages).build();
}
 
Example #16
Source File: KafkaPublisherTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
private ConsumerIterator<byte[], byte[]> buildConsumer(String topic) {
    Properties props = new Properties();
    props.put("zookeeper.connect", "localhost:" + kafkaLocal.getZookeeperPort());
    props.put("group.id", "test");
    props.put("consumer.timeout.ms", "5000");
    props.put("auto.offset.reset", "smallest");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<>(1);
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
    ConsumerIterator<byte[], byte[]> iter = streams.get(0).iterator();
    return iter;
}
 
Example #17
Source File: PutKafkaTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
private ConsumerIterator<byte[], byte[]> buildConsumer(String topic) {
    Properties props = new Properties();
    props.put("zookeeper.connect", "0.0.0.0:" + kafkaLocal.getZookeeperPort());
    props.put("group.id", "test");
    props.put("consumer.timeout.ms", "5000");
    props.put("auto.offset.reset", "smallest");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<>(1);
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
    ConsumerIterator<byte[], byte[]> iter = streams.get(0).iterator();
    return iter;
}
 
Example #18
Source File: KafkaConsumer.java    From pentaho-kafka-consumer with Apache License 2.0 5 votes vote down vote up
public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    super.init(smi, sdi);

    KafkaConsumerMeta meta = (KafkaConsumerMeta) smi;
    KafkaConsumerData data = (KafkaConsumerData) sdi;

    Properties properties = meta.getKafkaProperties();
    Properties substProperties = new Properties();
    for (Entry<Object, Object> e : properties.entrySet()) {
        substProperties.put(e.getKey(), environmentSubstitute(e.getValue().toString()));
    }
    if (meta.isStopOnEmptyTopic()) {

        // If there isn't already a provided value, set a default of 1s
        if (!substProperties.containsKey(CONSUMER_TIMEOUT_KEY)) {
            substProperties.put(CONSUMER_TIMEOUT_KEY, "1000");
        }
    } else {
        if (substProperties.containsKey(CONSUMER_TIMEOUT_KEY)) {
            logError(Messages.getString("KafkaConsumer.WarnConsumerTimeout"));
        }
    }
    ConsumerConfig consumerConfig = new ConsumerConfig(substProperties);

    logBasic(Messages.getString("KafkaConsumer.CreateKafkaConsumer.Message", consumerConfig.zkConnect()));
    data.consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    String topic = environmentSubstitute(meta.getTopic());
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> streamsMap = data.consumer.createMessageStreams(topicCountMap);
    logDebug("Received streams map: " + streamsMap);
    data.streamIterator = streamsMap.get(topic).get(0).iterator();

    return true;
}
 
Example #19
Source File: KafkaConsumerTest.java    From pentaho-kafka-consumer with Apache License 2.0 5 votes vote down vote up
@Test
public void withStopOnEmptyTopic() throws KettleException {

    meta.setStopOnEmptyTopic(true);
    TransMeta tm = TransTestFactory.generateTestTransformation(new Variables(), meta, STEP_NAME);

    TransTestFactory.executeTestTransformation(tm, TransTestFactory.INJECTOR_STEPNAME,
            STEP_NAME, TransTestFactory.DUMMY_STEPNAME, new ArrayList<RowMetaAndData>());

    PowerMockito.verifyStatic();
    ArgumentCaptor<ConsumerConfig> consumerConfig = ArgumentCaptor.forClass(ConsumerConfig.class);
    Consumer.createJavaConsumerConnector(consumerConfig.capture());

    assertEquals(1000, consumerConfig.getValue().consumerTimeoutMs());
}
 
Example #20
Source File: HighLevelConsumerExample.java    From pulsar with Apache License 2.0 5 votes vote down vote up
private static void consumeMessage(Arguments arguments) {

        Properties properties = new Properties();
        properties.put("zookeeper.connect", arguments.serviceUrl);
        properties.put("group.id", arguments.groupName);
        properties.put("consumer.id", "cons1");
        properties.put("auto.commit.enable", Boolean.toString(!arguments.autoCommitDisable));
        properties.put("auto.commit.interval.ms", "100");
        properties.put("queued.max.message.chunks", "100");

        ConsumerConfig conSConfig = new ConsumerConfig(properties);
        ConsumerConnector connector = Consumer.createJavaConsumerConnector(conSConfig);
        Map<String, Integer> topicCountMap = Collections.singletonMap(arguments.topicName, 2);
        Map<String, List<KafkaStream<String, Tweet>>> streams = connector.createMessageStreams(topicCountMap,
                new StringDecoder(null), new Tweet.TestDecoder());

        int count = 0;
        while (count < arguments.totalMessages || arguments.totalMessages == -1) {
            for (int i = 0; i < streams.size(); i++) {
                List<KafkaStream<String, Tweet>> kafkaStreams = streams.get(arguments.topicName);
                for (KafkaStream<String, Tweet> kafkaStream : kafkaStreams) {
                    for (MessageAndMetadata<String, Tweet> record : kafkaStream) {
                        log.info("Received tweet: {}-{}", record.message().userName, record.message().message);
                        count++;
                    }
                }
            }
        }

        connector.shutdown();

        log.info("successfully consumed message {}", count);
    }
 
Example #21
Source File: DemoHighLevelConsumer.java    From KafkaExample with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	args = new String[] { "zookeeper0:2181/kafka", "topic1", "group2", "consumer1" };
	if (args == null || args.length != 4) {
		System.err.println("Usage:\n\tjava -jar kafka_consumer.jar ${zookeeper_list} ${topic_name} ${group_name} ${consumer_id}");
		System.exit(1);
	}
	String zk = args[0];
	String topic = args[1];
	String groupid = args[2];
	String consumerid = args[3];
	Properties props = new Properties();
	props.put("zookeeper.connect", zk);
	props.put("group.id", groupid);
	props.put("client.id", "test");
	props.put("consumer.id", consumerid);
	props.put("auto.offset.reset", "largest");
	props.put("auto.commit.enable", "false");
	props.put("auto.commit.interval.ms", "60000");

	ConsumerConfig consumerConfig = new ConsumerConfig(props);
	ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);

	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put(topic, 1);
	Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);

	KafkaStream<byte[], byte[]> stream1 = consumerMap.get(topic).get(0);
	ConsumerIterator<byte[], byte[]> interator = stream1.iterator();
	while (interator.hasNext()) {
		MessageAndMetadata<byte[], byte[]> messageAndMetadata = interator.next();
		String message = String.format(
				"Topic:%s, GroupID:%s, Consumer ID:%s, PartitionID:%s, Offset:%s, Message Key:%s, Message Payload: %s",
				messageAndMetadata.topic(), groupid, consumerid, messageAndMetadata.partition(),
				messageAndMetadata.offset(), new String(messageAndMetadata.key()),
				new String(messageAndMetadata.message()));
		System.out.println(message);
		consumerConnector.commitOffsets();
	}
}
 
Example #22
Source File: KafkaMqCollect.java    From light_drtc with Apache License 2.0 5 votes vote down vote up
public void init(){
	Properties props = new Properties();
       props.put("zookeeper.connect", Constants.kfZkServers);
       props.put("group.id", Constants.kfGroupId);
       props.put("auto.offset.reset", Constants.kfAutoOffsetReset);
       props.put("zookeeper.session.timeout.ms", "4000");
       props.put("zookeeper.sync.time.ms", "200");
       props.put("auto.commit.interval.ms", "1000");
       props.put("serializer.class", "kafka.serializer.StringEncoder");
       ConsumerConfig config = new ConsumerConfig(props);
       consumer = Consumer.createJavaConsumerConnector(config);
}
 
Example #23
Source File: KafkaDemoClient.java    From iotplatform with Apache License 2.0 5 votes vote down vote up
private static ConsumerIterator<String, String> buildConsumer(String topic) {
    Map<String, Integer> topicCountMap = new HashMap<>();
    topicCountMap.put(topic, 1);
    ConsumerConfig consumerConfig = new ConsumerConfig(consumerProperties());
    ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, List<KafkaStream<String, String>>> consumers = consumerConnector.createMessageStreams(topicCountMap, new StringDecoder(null), new StringDecoder(null));
    KafkaStream<String, String> stream = consumers.get(topic).get(0);
    return stream.iterator();
}
 
Example #24
Source File: KafkaConsumer.java    From blog_demos with Apache License 2.0 5 votes vote down vote up
/**
 * 启动一个consumer
 * @param topic
 */
public void startConsume(String topic){
    Properties props = new Properties();
    props.put("zookeeper.connect", zkConnect);
    props.put("group.id", groupId);
    props.put("zookeeper.session.timeout.ms", "40000");
    props.put("zookeeper.sync.time.ms", "200");
    props.put("auto.commit.interval.ms", "1000");
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));


    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(topic, new Integer(1));
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
    final ConsumerIterator<byte[], byte[]> it = stream.iterator();

    Runnable executor = new Runnable() {
        @Override
        public void run() {
            while (it.hasNext()) {
                System.out.println("************** receive:" + new String(it.next().message()));
                try {
                    Thread.sleep(3000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
    };

    new Thread(executor).start();
}
 
Example #25
Source File: JavaKafkaConsumerHighAPIESImpl.java    From dk-fitting with Apache License 2.0 5 votes vote down vote up
public JavaKafkaConsumerHighAPIESImpl(String topicName, int threads, String zookeeper, String groupId) throws TException {

        // 1. 创建Kafka连接器
        this.consumer = Consumer.createJavaConsumerConnector(createConsumerConfig(zookeeper, groupId));
        // 2. 数据赋值
        this.topic = topic;
        this.numThreads = numThreads;
    }
 
Example #26
Source File: KafkaConsumer.java    From blog_demos with Apache License 2.0 5 votes vote down vote up
/**
 * 启动一个consumer
 * @param topic
 */
public void startConsume(String topic){
    Properties props = new Properties();
    props.put("zookeeper.connect", zkConnect);
    props.put("group.id", groupId);
    props.put("zookeeper.session.timeout.ms", "40000");
    props.put("zookeeper.sync.time.ms", "200");
    props.put("auto.commit.interval.ms", "1000");
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));


    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(topic, new Integer(1));
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
    final ConsumerIterator<byte[], byte[]> it = stream.iterator();

    Runnable executor = new Runnable() {
        @Override
        public void run() {
            while (it.hasNext()) {
                System.out.println("************** receive:" + new String(it.next().message()));
                try {
                    Thread.sleep(3000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
    };

    new Thread(executor).start();
}
 
Example #27
Source File: KafkaExample.java    From pragmatic-java-engineer with GNU General Public License v3.0 5 votes vote down vote up
public static void consumer() {
    Properties props = new Properties();
    props.put("zookeeper.connect", "zk1.dmp.com:2181,zk2.dmp.com:2181,zk3.dmp.com:2181");
    props.put("zookeeper.session.timeout.ms", "3000");
    props.put("zookeeper.sync.time.ms", "200");
    props.put("group.id", "test_group");
    props.put("auto.commit.interval.ms", "600");

    String topic = "test_topic";
    ConsumerConnector connector = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
    Map<String, Integer> topics = new HashMap<String, Integer>();
    int partitionNum = 3;//分区数目
    topics.put(topic, partitionNum);
    Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(topics);
    List<KafkaStream<byte[], byte[]>> partitions = streams.get(topic);
    Executor threadPool = Executors.newFixedThreadPool(partitionNum);
    for (final KafkaStream<byte[], byte[]> partition : partitions) {
        threadPool.execute(
                new Runnable() {
                    @Override
                    public void run() {
                        ConsumerIterator<byte[], byte[]> it = partition.iterator();
                        while (it.hasNext()) {
                            MessageAndMetadata<byte[], byte[]> item = it.next();
                            byte[] messageBody = item.message();
                        }
                    }
                });
    }
}
 
Example #28
Source File: JavaKafkaConsumerHighAPIHbaseImpl.java    From dk-fitting with Apache License 2.0 5 votes vote down vote up
public void JavaKafkaConsumerHighAPIHbase(String topic, int numThreads, String zookeeper, String groupId) throws TException {
    // 1. 创建Kafka连接器
    this.consumer = Consumer.createJavaConsumerConnector(createConsumerConfig(zookeeper, groupId));
    // 2. 数据赋值
    this.topic = topic;
    this.numThreads = numThreads;
}
 
Example #29
Source File: KafkaUtils.java    From Kafka-Insight with Apache License 2.0 5 votes vote down vote up
public static ConsumerConnector createConsumerConnector(String zkAddr, String group) {
    Properties props = new Properties();
    props.put(ConsumerConfig.GROUP_ID_CONFIG, group);
    props.put(ConsumerConfig.EXCLUDE_INTERNAL_TOPICS_CONFIG, "false");
    props.put(KafkaConfig.ZkConnectProp(), zkAddr);
    ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(new kafka.consumer.ConsumerConfig(props));
    return consumerConnector;
}
 
Example #30
Source File: KafkaPublisherTest.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
private ConsumerIterator<byte[], byte[]> buildConsumer(String topic) {
    Properties props = new Properties();
    props.put("zookeeper.connect", "localhost:" + kafkaLocal.getZookeeperPort());
    props.put("group.id", "test");
    props.put("consumer.timeout.ms", "5000");
    props.put("auto.offset.reset", "smallest");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<>(1);
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
    ConsumerIterator<byte[], byte[]> iter = streams.get(0).iterator();
    return iter;
}