kafka.consumer.ConsumerConfig Java Examples

The following examples show how to use kafka.consumer.ConsumerConfig. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PulsarKafkaConsumerTest.java    From pulsar with Apache License 2.0 6 votes vote down vote up
@Test
public void testPulsarKafkaConsumerConfig() throws Exception {
    // https://kafka.apache.org/08/documentation.html#consumerconfigs
    Properties properties = new Properties();
    properties.put("zookeeper.connect", "http://localhost:8080/");
    properties.put("group.id", "group1");
    properties.put("consumer.id", "cons1");
    properties.put("auto.commit.enable", "true");
    properties.put("auto.commit.interval.ms", "100");
    properties.put("queued.max.message.chunks", "100");

    ConsumerConfig config = new ConsumerConfig(properties);
    ConsumerConnector connector = new ConsumerConnector(config);
    ConsumerBuilderImpl<byte[]> consumerBuilder = (ConsumerBuilderImpl<byte[]>) connector.getConsumerBuilder();
    Field confField = consumerBuilder.getClass().getDeclaredField("conf");
    confField.setAccessible(true);
    ConsumerConfigurationData conf = (ConsumerConfigurationData) confField.get(consumerBuilder);
    assertEquals(conf.getSubscriptionName(), "group1");
    assertEquals(conf.getReceiverQueueSize(), 100);
    assertEquals(conf.getAcknowledgementsGroupTimeMicros(), TimeUnit.MILLISECONDS.toMicros(100));
    System.out.println(conf);
}
 
Example #2
Source File: KafkaSpout.java    From monasca-thresh with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
  logger.info("Opened");
  this.collector = collector;
  logger.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
  this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId());

  Properties kafkaProperties =
      KafkaConsumerProperties.createKafkaProperties(kafkaSpoutConfig.kafkaConsumerConfiguration);
  // Have to use a different consumer.id for each spout so use the storm taskId. Otherwise,
  // zookeeper complains about a conflicted ephemeral node when there is more than one spout
  // reading from a topic
  kafkaProperties.setProperty("consumer.id", String.valueOf(context.getThisTaskId()));
  ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
  this.consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
}
 
Example #3
Source File: KafkaTestBase.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
KafkaConsumerSuite(String zkConnectString, String topic)
{
  _topic = topic;
  Properties consumeProps = new Properties();
  consumeProps.put("zookeeper.connect", zkConnectString);
  consumeProps.put("group.id", _topic+"-"+System.nanoTime());
  consumeProps.put("zookeeper.session.timeout.ms", "10000");
  consumeProps.put("zookeeper.sync.time.ms", "10000");
  consumeProps.put("auto.commit.interval.ms", "10000");
  consumeProps.put("_consumer.timeout.ms", "10000");

  _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
      _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1));
  List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic);
  _stream = streams.get(0);
  _iterator = _stream.iterator();
}
 
Example #4
Source File: KafkaTestBase.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
KafkaConsumerSuite(String zkConnectString, String topic)
{
  _topic = topic;
  Properties consumeProps = new Properties();
  consumeProps.put("zookeeper.connect", zkConnectString);
  consumeProps.put("group.id", _topic+"-"+System.nanoTime());
  consumeProps.put("zookeeper.session.timeout.ms", "10000");
  consumeProps.put("zookeeper.sync.time.ms", "10000");
  consumeProps.put("auto.commit.interval.ms", "10000");
  consumeProps.put("_consumer.timeout.ms", "10000");

  _consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
      _consumer.createMessageStreams(ImmutableMap.of(this._topic, 1));
  List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic);
  _stream = streams.get(0);
  _iterator = _stream.iterator();
}
 
Example #5
Source File: kafkaConsumer.java    From Transwarp-Sample-Code with MIT License 6 votes vote down vote up
/**
     * 创建线程池,执行kafka消费者
     */
    public void go() {
        Constant constant = new Constant();
        kafkaProperties kafkaProperties = new kafkaProperties();
        ConsumerConfig config = new ConsumerConfig(kafkaProperties.properties());

        ExecutorService executorService = Executors.newFixedThreadPool(Integer.parseInt(constant.THREAD_POOL_SIZE));

        String topic = constant.TOPIC;
//        Task[] tasks = new Task[Integer.parseInt(constant.THREAD_NUM)];
        Map<String, Integer> topicCountMap = new HashMap<>();
        topicCountMap.put(topic, new Integer(constant.THREAD_NUM));
        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);

        for (KafkaStream stream : streams) {
            executorService.submit(new Task(stream));
        }

        executorService.shutdown();
    }
 
Example #6
Source File: OldApiTopicConsumer.java    From azeroth with Apache License 2.0 6 votes vote down vote up
/**
 * 
 * @param connector
 * @param topics
 * @param processThreads 
 */
@SuppressWarnings("unchecked")
public OldApiTopicConsumer(ConsumerContext context) {

    this.consumerContext = context;
    try {
        Class<?> deserializerClass = Class
            .forName(context.getProperties().getProperty("value.deserializer"));
        deserializer = (Deserializer<Object>) deserializerClass.newInstance();
    } catch (Exception e) {
    }
    this.connector = kafka.consumer.Consumer
        .createJavaConsumerConnector(new ConsumerConfig(context.getProperties()));

    int poolSize = consumerContext.getMessageHandlers().size();
    this.fetchExecutor = new StandardThreadExecutor(poolSize, poolSize, 0, TimeUnit.SECONDS,
        poolSize, new StandardThreadFactory("KafkaFetcher"));

    this.defaultProcessExecutor = new StandardThreadExecutor(1, context.getMaxProcessThreads(),
        30, TimeUnit.SECONDS, context.getMaxProcessThreads(),
        new StandardThreadFactory("KafkaProcessor"), new PoolFullRunsPolicy());

    logger.info(
        "Kafka Conumer ThreadPool initialized,fetchPool Size:{},defalutProcessPool Size:{} ",
        poolSize, context.getMaxProcessThreads());
}
 
Example #7
Source File: KafkaDataProvider.java    From linden with Apache License 2.0 6 votes vote down vote up
public KafkaDataProvider(String zookeeper, String topic, String groupId) {
  super(MessageAndMetadata.class);
  Properties props = new Properties();
  props.put("zookeeper.connect", zookeeper);
  props.put("group.id", groupId);
  props.put("zookeeper.session.timeout.ms", "30000");
  props.put("auto.commit.interval.ms", "1000");
  props.put("fetch.message.max.bytes", "4194304");
  consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
  Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
  topicCountMap.put(topic, 1);
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);

  iter = stream.iterator();
}
 
Example #8
Source File: KafkaDistributed.java    From jlogstash-input-plugin with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public void prepare() {
	Properties props = geneConsumerProp();
	
	for(String topicName : topic.keySet()){
		ConsumerConnector consumer = kafka.consumer.Consumer
				.createJavaConsumerConnector(new ConsumerConfig(props));
		
		consumerConnMap.put(topicName, consumer);
	}
	if(distributed!=null){
		try {
			logger.warn("zkDistributed is start...");
			zkDistributed = ZkDistributed.getSingleZkDistributed(distributed);
			zkDistributed.zkRegistration();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			logger.error("zkRegistration fail:{}",ExceptionUtil.getErrorMessage(e));
		}
	}
}
 
Example #9
Source File: KafkaSourceOp.java    From PoseidonX with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void initialize()
    throws StreamingException
{
    ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
    consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);

    Map<String, Integer> topicCountMap = Maps.newHashMap();
    topicCountMap.put(topic, TOPIC_COUNT);

    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
        consumerConnector.createMessageStreams(topicCountMap);
    KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
    consumerIterator = stream.iterator();
}
 
Example #10
Source File: KafkaConsumerMeta.java    From pentaho-kafka-consumer with Apache License 2.0 6 votes vote down vote up
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
                  String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
                  IMetaStore metaStore) {

    if (topic == null) {
        remarks.add(new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR,
                Messages.getString("KafkaConsumerMeta.Check.InvalidTopic"), stepMeta));
    }
    if (field == null) {
        remarks.add(new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR,
                Messages.getString("KafkaConsumerMeta.Check.InvalidField"), stepMeta));
    }
    if (keyField == null) {
        remarks.add(new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR,
                Messages.getString("KafkaConsumerMeta.Check.InvalidKeyField"), stepMeta));
    }
    try {
        new ConsumerConfig(kafkaProperties);
    } catch (IllegalArgumentException e) {
        remarks.add(new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, e.getMessage(), stepMeta));
    }
}
 
Example #11
Source File: KafkaConsumerTest.java    From pentaho-kafka-consumer with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() {
    data = new KafkaConsumerData();
    meta = new KafkaConsumerMeta();
    meta.setKafkaProperties(getDefaultKafkaProperties());
    meta.setLimit(STEP_LIMIT);

    stepMeta = new StepMeta("KafkaConsumer", meta);
    transMeta = new TransMeta();
    transMeta.addStep(stepMeta);
    trans = new Trans(transMeta);

    PowerMockito.mockStatic(Consumer.class);

    when(Consumer.createJavaConsumerConnector(any(ConsumerConfig.class))).thenReturn(zookeeperConsumerConnector);
    when(zookeeperConsumerConnector.createMessageStreams(anyMapOf(String.class, Integer.class))).thenReturn(streamsMap);
    when(streamsMap.get(anyObject())).thenReturn(stream);
    when(stream.get(anyInt())).thenReturn(kafkaStream);
    when(kafkaStream.iterator()).thenReturn(streamIterator);
    when(streamIterator.next()).thenReturn(generateKafkaMessage());
}
 
Example #12
Source File: KafkaDistributed.java    From jlogstash-input-plugin with Apache License 2.0 6 votes vote down vote up
public void reconnConsumer(String topicName){
		
		//停止topic 对应的conn
		ConsumerConnector consumerConn = consumerConnMap.get(topicName);
		consumerConn.commitOffsets(true);
		consumerConn.shutdown();
		consumerConnMap.remove(topicName);
		
		//停止topic 对应的stream消耗线程
		ExecutorService es = executorMap.get(topicName);
		es.shutdownNow();
		executorMap.remove(topicName);

		Properties prop = geneConsumerProp();
		ConsumerConnector newConsumerConn = kafka.consumer.Consumer
				.createJavaConsumerConnector(new ConsumerConfig(prop));
		consumerConnMap.put(topicName, newConsumerConn);

		addNewConsumer(topicName, topic.get(topicName));
}
 
Example #13
Source File: Kafka.java    From jlogstash-input-plugin with Apache License 2.0 6 votes vote down vote up
public void reconnConsumer(String topicName){
	
	//停止topic 对应的conn
	ConsumerConnector consumerConn = consumerConnMap.get(topicName);
	consumerConn.commitOffsets(true);
	consumerConn.shutdown();
	consumerConnMap.remove(topicName);
	
	//停止topic 对应的stream消耗线程
	ExecutorService es = executorMap.get(topicName);
	es.shutdownNow();	
	executorMap.remove(topicName);
	
	Properties prop = geneConsumerProp();
	ConsumerConnector newConsumerConn = kafka.consumer.Consumer
			.createJavaConsumerConnector(new ConsumerConfig(prop));
	consumerConnMap.put(topicName, newConsumerConn);
	
	addNewConsumer(topicName, topic.get(topicName));
}
 
Example #14
Source File: JavaKafkaConsumerHighAPIHbaseImpl.java    From dk-fitting with Apache License 2.0 6 votes vote down vote up
/**
 * 根据传入的zk的连接信息和groupID的值创建对应的ConsumerConfig对象
 *
 * @param zookeeper zk的连接信息,类似于:<br/>
 *                  hadoop-senior01.ibeifeng.com:2181,hadoop-senior02.ibeifeng.com:2181/kafka
 * @param groupId   该kafka consumer所属的group id的值, group id值一样的kafka consumer会进行负载均衡
 * @return Kafka连接信息
 */
private ConsumerConfig createConsumerConfig(String zookeeper, String groupId) {
    // 1. 构建属性对象
    Properties prop = new Properties();
    // 2. 添加相关属性
    prop.put("group.id", groupId); // 指定分组id
    prop.put("zookeeper.connect", zookeeper); // 指定zk的连接url
    prop.put("zookeeper.session.timeout.ms", providerProp.getProperty("consumer.hbase.zookeeper.session.timeout.ms")); //
    prop.put("session.timeout.ms",providerProp.getProperty("consumer.hbase.session.timeout.ms"));
    prop.put("enable.auto.commit",providerProp.getProperty("consumer.hbase.enable.auto.commit"));
    prop.put("auto.offset.reset",providerProp.getProperty("consumer.hbase.auto.offset.reset"));
    prop.put("offsets.storage",providerProp.getProperty("consumer.hbase.offsets.storage"));
    prop.put("dual.commit",providerProp.getProperty("consumer.hbase.dual.commit"));
    //prop.put("zookeeper.sync.time.ms", providerProp.getProperty("consumer.es.auto.commit.interval.ms"));
    prop.put("auto.commit.interval.ms", providerProp.getProperty("consumer.hbase.auto.commit.interval.ms"));
    // 3. 构建ConsumerConfig对象
    return new ConsumerConfig(prop);
}
 
Example #15
Source File: PulsarKafkaConsumerTest.java    From pulsar with Apache License 2.0 6 votes vote down vote up
@Test
public void testPulsarKafkaConsumerWithDefaultConfig() throws Exception {
    // https://kafka.apache.org/08/documentation.html#consumerconfigs
    Properties properties = new Properties();
    properties.put("zookeeper.connect", "http://localhost:8080/");
    properties.put("group.id", "group1");

    ConsumerConfig config = new ConsumerConfig(properties);
    ConsumerConnector connector = new ConsumerConnector(config);
    ConsumerBuilderImpl<byte[]> consumerBuilder = (ConsumerBuilderImpl<byte[]>) connector.getConsumerBuilder();
    Field confField = consumerBuilder.getClass().getDeclaredField("conf");
    confField.setAccessible(true);
    ConsumerConfigurationData conf = (ConsumerConfigurationData) confField.get(consumerBuilder);
    assertEquals(conf.getSubscriptionName(), "group1");
    assertEquals(conf.getReceiverQueueSize(), 1000);
}
 
Example #16
Source File: MessageResource.java    From dropwizard-kafka-http with Apache License 2.0 5 votes vote down vote up
@GET
@Timed
public Response consume(
        @QueryParam("topic") String topic,
        @QueryParam("timeout") Integer timeout
) {
    if (Strings.isNullOrEmpty(topic))
        return Response.status(400)
                .entity(new String[]{"Undefined topic"})
                .build();

    Properties props = (Properties) consumerCfg.clone();
    if (timeout != null) props.put("consumer.timeout.ms", "" + timeout);

    ConsumerConfig config = new ConsumerConfig(props);
    ConsumerConnector connector = Consumer.createJavaConsumerConnector(config);

    Map<String, Integer> streamCounts = Collections.singletonMap(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(streamCounts);
    KafkaStream<byte[], byte[]> stream = streams.get(topic).get(0);

    List<Message> messages = new ArrayList<>();
    try {
        for (MessageAndMetadata<byte[], byte[]> messageAndMetadata : stream)
            messages.add(new Message(messageAndMetadata));
    } catch (ConsumerTimeoutException ignore) {
    } finally {
        connector.commitOffsets();
        connector.shutdown();
    }

    return Response.ok(messages).build();
}
 
Example #17
Source File: KafkaComponent.java    From metron with Apache License 2.0 5 votes vote down vote up
public ConsumerIterator<byte[], byte[]> getStreamIterator(String topic, String group, String consumerName) {
  // setup simple consumer
  Properties consumerProperties = TestUtils.createConsumerProperties(zookeeperConnectString, group, consumerName, -1);
  consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties));
  Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
  topicCountMap.put(topic, 1);
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
  ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
  return iterator;
}
 
Example #18
Source File: KafkaPublisherTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
private ConsumerIterator<byte[], byte[]> buildConsumer(String topic) {
    Properties props = new Properties();
    props.put("zookeeper.connect", "localhost:" + kafkaLocal.getZookeeperPort());
    props.put("group.id", "test");
    props.put("consumer.timeout.ms", "5000");
    props.put("auto.offset.reset", "smallest");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<>(1);
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
    ConsumerIterator<byte[], byte[]> iter = streams.get(0).iterator();
    return iter;
}
 
Example #19
Source File: PutKafkaTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
private ConsumerIterator<byte[], byte[]> buildConsumer(String topic) {
    Properties props = new Properties();
    props.put("zookeeper.connect", "0.0.0.0:" + kafkaLocal.getZookeeperPort());
    props.put("group.id", "test");
    props.put("consumer.timeout.ms", "5000");
    props.put("auto.offset.reset", "smallest");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<>(1);
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
    ConsumerIterator<byte[], byte[]> iter = streams.get(0).iterator();
    return iter;
}
 
Example #20
Source File: KafkaTestBase.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public KafkaTestBase(String topic) throws InterruptedException, RuntimeException {

    startServer();

    this.topic = topic;

    AdminUtils.createTopic(zkClient, topic, 1, 1, new Properties());

    List<KafkaServer> servers = new ArrayList<>();
    servers.add(kafkaServer);
    TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, 0, 5000);

    Properties consumeProps = new Properties();
    consumeProps.put("zookeeper.connect", zkConnect);
    consumeProps.put("group.id", "testConsumer");
    consumeProps.put("zookeeper.session.timeout.ms", "10000");
    consumeProps.put("zookeeper.sync.time.ms", "10000");
    consumeProps.put("auto.commit.interval.ms", "10000");
    consumeProps.put("consumer.timeout.ms", "10000");

    consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));

    Map<String, Integer> topicCountMap = new HashMap<>();
    topicCountMap.put(this.topic, 1);
    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
    List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
    stream = streams.get(0);

    iterator = stream.iterator();
  }
 
Example #21
Source File: KafkaClientTest.java    From Krackle with Apache License 2.0 5 votes vote down vote up
private ConsumerConnector getStdConsumer() {
	Properties props = new Properties();
	props.put("zookeeper.connect", "localhost:21818");
	props.put("group.id", "test");
	ConsumerConfig conf = new ConsumerConfig(props);
	return kafka.consumer.Consumer.createJavaConsumerConnector(conf);
}
 
Example #22
Source File: KafkaConsumer08.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
public void init() throws StageException {
  if(consumer == null) {
    Properties props = new Properties();
    configureKafkaProperties(props);
    LOG.debug("Creating Kafka Consumer with properties {}", props.toString());
    consumerConfig = new ConsumerConfig(props);
    createConsumer();
  }
}
 
Example #23
Source File: KafkaConsumer08.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
public void validate(List<Stage.ConfigIssue> issues, Stage.Context context) {
  validateKafkaTimestamp(issues);
  if (issues.isEmpty()) {
    Properties props = new Properties();
    configureKafkaProperties(props);
    LOG.debug("Creating Kafka Consumer with properties {}" , props.toString());
    consumerConfig = new ConsumerConfig(props);
    try {
      createConsumer(issues, context);
    } catch (StageException ex) {
      issues.add(context.createConfigIssue(null, null, KafkaErrors.KAFKA_10, ex.toString()));
    }
  }
}
 
Example #24
Source File: KafkaTestConsumer.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
private ConsumerConfig createConsumerConfig()
{
  Properties props = new Properties();
  props.setProperty("zookeeper.connect", zkaddress);
  props.setProperty("group.id", "group1");
  props.put("auto.offset.reset", "smallest");
  return new ConsumerConfig(props);
}
 
Example #25
Source File: AlertKafkaPublisherTest.java    From eagle with Apache License 2.0 5 votes vote down vote up
private static void consumeWithOutput(final List<String> outputMessages) {
    Thread t = new Thread(new Runnable() {
        @Override
        public void run() {
            Properties props = new Properties();
            props.put("group.id", "B");
            props.put("zookeeper.connect", "127.0.0.1:" + + TEST_KAFKA_ZOOKEEPER_PORT);
            props.put("zookeeper.session.timeout.ms", "4000");
            props.put("zookeeper.sync.time.ms", "2000");
            props.put("auto.commit.interval.ms", "1000");
            props.put("auto.offset.reset", "smallest");

            ConsumerConnector jcc = null;
            try {
                ConsumerConfig ccfg = new ConsumerConfig(props);
                jcc = Consumer.createJavaConsumerConnector(ccfg);
                Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
                topicCountMap.put(TEST_TOPIC_NAME, 1);
                Map<String, List<KafkaStream<byte[], byte[]>>> topicMap = jcc.createMessageStreams(topicCountMap);
                KafkaStream<byte[], byte[]> cstrm = topicMap.get(TEST_TOPIC_NAME).get(0);
                for (MessageAndMetadata<byte[], byte[]> mm : cstrm) {
                    String message = new String(mm.message());
                    outputMessages.add(message);

                    try {
                        Thread.sleep(5000);
                    } catch (InterruptedException e) {
                    }
                }
            } finally {
                if (jcc != null) {
                    jcc.shutdown();
                }
            }
        }
    });
    t.start();
}
 
Example #26
Source File: Kafka.java    From hangout with MIT License 5 votes vote down vote up
protected void prepare() {
    //if null, utf-8 encoding will be used
    this.encoding = (String) this.config.get("encoding");
    if (this.encoding == null) {
        this.encoding = "UTF-8";
    }
    topics = (Map<String, Integer>) this.config.get("topic");
    topicPatterns = (Map<String, Integer>) this.config.get("topic_pattern");

    Properties props = new Properties();
    HashMap<String, String> consumerSettings = (HashMap<String, String>) this.config.get("consumer_settings");
    consumerSettings.entrySet().stream().forEach(entry -> props.put(entry.getKey(), entry.getValue()));
    consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
}
 
Example #27
Source File: ConsumerThreadPool.java    From spring-boot-kafka-video-consumer with MIT License 5 votes vote down vote up
@PostConstruct
public void startConsuming() {
    ConsumerConfig consumerConfig = consumerConfigFactory.getConsumerConfig();
    consumer = createJavaConsumerConnector(consumerConfig);
    
    consume();
}
 
Example #28
Source File: ConsumerConfigFactory.java    From spring-boot-kafka-video-consumer with MIT License 5 votes vote down vote up
@PostConstruct
private void createConsumerConfig() {
    Properties props = new Properties();
    props.put("zookeeper.connect", ZK_CONNECT);
    props.put("group.id", "Video-cg-0");
    props.put("zookeeper.session.timeout.ms", "400");
    props.put("zookeeper.sync.time.ms", "200");
    props.put("auto.commit.interval.ms", "1000");
    consumerConfig = new ConsumerConfig(props);
}
 
Example #29
Source File: KafkaSourceUtil.java    From flume-ng-extends-source with MIT License 5 votes vote down vote up
public static ConsumerConnector getConsumer(Properties kafkaProps) {
  ConsumerConfig consumerConfig =
          new ConsumerConfig(kafkaProps);
  ConsumerConnector consumer =
          Consumer.createJavaConsumerConnector(consumerConfig);
  return consumer;
}
 
Example #30
Source File: KafkaConsumer.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private static ConsumerConfig createConsumerConfig(String zkUrl,
                                                   String groupId) {
  Properties props = new Properties();
  props.put("zookeeper.connect", zkUrl);
  props.put("group.id", groupId);
  props.put("zookeeper.session.timeout.ms", "1000");
  props.put("zookeeper.sync.time.ms", "200");
  props.put("auto.commit.interval.ms", "1000");
  props.put("auto.offset.reset", "smallest");
  props.put("consumer.timeout.ms","1000");
  return new ConsumerConfig(props);
}