Java Code Examples for org.apache.kafka.common.serialization.StringDeserializer

The following examples show how to use org.apache.kafka.common.serialization.StringDeserializer. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may want to check out the right sidebar which shows the related API usage.
Example 1
private void testSelectWithFilter(String resultStream,
                                  String inputStreamName,
                                  DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {

  ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT * FROM %s WHERE ORDERUNITS > 40;",
                                resultStream, inputStreamName));

  Map<String, GenericRow> results = testHarness.consumeData(resultStream,
                                                            dataProvider.schema(),
                                                            4,
                                                            new StringDeserializer(),
                                                            IntegrationTestHarness
                                                                .RESULTS_POLL_MAX_TIME_MS,
                                                            dataSourceSerDe);

  Assert.assertEquals(4, results.size());
}
 
Example 2
private KafkaConsumer<String, String> createKafkaConsumer(
    String bootstrapServers,
    String patchTopic,
    String patchGroupId,
    ObjectMapper objectMapper) {
  Properties properties = new Properties();
  properties.setProperty("bootstrap.servers", bootstrapServers);
  properties.setProperty("group.id", patchGroupId);
  properties.setProperty("auto.offset.reset", "earliest");
  properties.setProperty("enable.auto.commit", "false");
  KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(
      properties,
      new StringDeserializer(),
      new StringDeserializer());
  kafkaConsumer.subscribe(Lists.newArrayList(patchTopic));
  return kafkaConsumer;
}
 
Example 3
public static Pipeline build(String bootstrapServers) {
    Properties properties = new Properties();
    properties.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
    properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName());
    properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName());
    properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    Pipeline pipeline = Pipeline.create();

    pipeline
            .readFrom(KafkaSources.kafka(properties, Constants.TOPIC_NAME_PRECIOUS))
            .withoutTimestamps()
            .writeTo(Sinks.map(Constants.IMAP_NAME_PRECIOUS));

    return pipeline;
}
 
Example 4
Source Project: kafka_book_demo   Source File: ConsumerTTL.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    props.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG,
            ConsumerInterceptorTTL.class.getName());

    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList(topic));

    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
        for (ConsumerRecord<String, String> record : records) {
            System.out.println(record.partition() + ":" + record.offset() + ":" + record.value());
        }
    }
}
 
Example 5
public static void main(String[] args) {
    String brokerList = "192.168.0.101:9092";
    String topic = "topic.serialization";
    String groupId = "group.demo";
    Properties properties = new Properties();
    properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtostuffDeserializer.class.getName());
    properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
    properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);

    KafkaConsumer<String, Company> consumer = new KafkaConsumer<String, Company>(properties);
    consumer.subscribe(Collections.singletonList(topic));

    while (true) {
        ConsumerRecords<String, Company> records = consumer.poll(Duration.ofMillis(1000));
        for (ConsumerRecord record : records) {
            System.out.println(String.format("%s-%s-%s-%s",
                    record.topic(), record.partition(), record.offset(), record.value()));
            // 成功反序列化,输出:topic.serialization-0-1-Company(name=whirly, address=中国)
        }
    }
}
 
Example 6
@Test
public void testBuildNoEnableAutoCommit() throws RemotingKafkaConfigurationException {
    Properties props = new ConsumerPropertiesBuilder()
            .withBootstrapServers("localhost:9092")
            .withAutoOffsetReset(AutoOffsetReset.EARLIEST)
            .withGroupID("test")
            .withKeyDeserializer(StringDeserializer.class)
            .withValueDeserializer(StringDeserializer.class)
            .build();
    assertEquals("localhost:9092", props.get(KafkaConfigs.BOOTSTRAP_SERVERS));
    assertEquals(true, props.get(KafkaConfigs.ENABLE_AUTO_COMMIT));
    assertEquals(AutoOffsetReset.EARLIEST.toString(), props.get(KafkaConfigs.AUTO_OFFSET_RESET));
    assertEquals("test", props.get(KafkaConfigs.GROUP_ID));
    assertEquals(StringDeserializer.class, props.get(KafkaConfigs.KEY_DESERIALIZER));
    assertEquals(StringDeserializer.class, props.get(KafkaConfigs.VALUE_DESERIALIZER));
}
 
Example 7
Source Project: ad   Source File: KafkaConfiguration.java    License: Apache License 2.0 6 votes vote down vote up
@Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
    ConcurrentKafkaListenerContainerFactory<String, String> containerFactory = new ConcurrentKafkaListenerContainerFactory<>();
    containerFactory.setConcurrency(concurrency);

    Map<String, Object> config = Maps.newHashMap();
    config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    // 由于课程原版实现中广告的索引数据是存在于ConcurrentHashMap中, 即每个索引服务实例的jvm中。
    // 所以当每一个索引实例监听kafka topic数据时, 需要保证每个实例都处于不同的消费者组
    // 即各个实例之间需要各不相同的groupId, 保证在部署多实例时, 每个实例都可以加载到完整的索引数据

    // 但在本实现中由于将索引数据单独提出, 存放到了Redis数据库中, 所以应该让所有实例属于同一个消费者组
    // 共同消费kafka topic下的数据, 保证索引数据不会被重复消费。

    // 综上, 若索引数据的存放如果为各个实例自身的jvm, 应该考虑加上以下代码(或自行编写其他实现)保证各实例处于不同的消费者组
    // 若索引数据存放的位置, 是所有检索实例共享的位置, 应该将以下配置取消(或直接删除本类)
    config.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
    DefaultKafkaConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<String, String>(config);
    containerFactory.setConsumerFactory(consumerFactory);
    return containerFactory;
}
 
Example 8
private Map<String, Object> consumerConfigs() {
    Map<String, Object> propsMap = new HashMap<>(16);
    propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
    String sId = groupId;
    if (groupId == null || (groupId.trim().length() <= 0)) {
    	sId = StringsUtil.getUuid();
    }
    propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, sId);
    propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,enableAutoCommit);
    propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,autoCommitInterval);
    
    ///latest,none,earliest
    propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
    propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    return propsMap;
}
 
Example 9
@Test
public void testBuildNoAutoOffsetReset() throws RemotingKafkaConfigurationException {
    Properties props = new ConsumerPropertiesBuilder()
            .withBootstrapServers("localhost:9092")
            .withEnableAutoCommit(false)
            .withGroupID("test")
            .withKeyDeserializer(StringDeserializer.class)
            .withValueDeserializer(StringDeserializer.class)
            .build();
    assertEquals("localhost:9092", props.get(KafkaConfigs.BOOTSTRAP_SERVERS));
    assertEquals(false, props.get(KafkaConfigs.ENABLE_AUTO_COMMIT));
    assertEquals(null, props.get(KafkaConfigs.AUTO_OFFSET_RESET));
    assertEquals("test", props.get(KafkaConfigs.GROUP_ID));
    assertEquals(StringDeserializer.class, props.get(KafkaConfigs.KEY_DESERIALIZER));
    assertEquals(StringDeserializer.class, props.get(KafkaConfigs.VALUE_DESERIALIZER));
}
 
Example 10
Source Project: strimzi-kafka-oauth   Source File: ExampleConsumer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Build KafkaConsumer properties. The specified values are defaults that can be overridden
 * through runtime system properties or env variables.
 *
 * @return Configuration properties
 */
private static Properties buildConsumerConfig() {

    Properties p = new Properties();

    p.setProperty("security.protocol", "SASL_PLAINTEXT");
    p.setProperty("sasl.mechanism", "OAUTHBEARER");
    p.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required ;");
    p.setProperty("sasl.login.callback.handler.class", "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler");

    p.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

    p.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "a_consumer-group");
    p.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "10");
    p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");

    return ConfigProperties.resolve(p);
}
 
Example 11
private static Properties buildConsumerConfig() {
    Properties p = new Properties();
    p.setProperty("security.protocol", "SASL_PLAINTEXT");
    p.setProperty("sasl.mechanism", "OAUTHBEARER");
    p.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required ;");
    p.setProperty("sasl.login.callback.handler.class", "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler");

    p.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092");
    p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

    p.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "consumer-group");
    p.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "10");
    p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");

    return p;
}
 
Example 12
static Properties buildConsumerConfig(String accessToken) {
    Properties p = new Properties();
    p.setProperty("security.protocol", "SASL_PLAINTEXT");
    p.setProperty("sasl.mechanism", "OAUTHBEARER");
    p.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " +
            " oauth.access.token=\"" + accessToken + "\";");
    p.setProperty("sasl.login.callback.handler.class", "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler");

    p.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka:9092");
    p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

    p.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "consumer-group");
    p.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "10");
    p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");

    return p;
}
 
Example 13
Source Project: fasten   Source File: KafkaConnector.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Returns Kafka properties.
 *
 * @param serverAddresses broker address
 * @param groupId         group id
 * @return Kafka Properties
 */
public static Properties kafkaConsumerProperties(List<String> serverAddresses, String groupId) {
    String deserializer = StringDeserializer.class.getName();
    Properties properties = new Properties();

    properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
            String.join(",", serverAddresses));
    properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    properties.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, groupId + "_client");
    properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserializer);
    properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializer);
    properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    properties.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "5");

    // Gives more time to the consumer for processing the records so
    // that the broker will NOT kill the consumer.
    properties.setProperty(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "200000");
    properties.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "700000");

    return properties;
}
 
Example 14
Source Project: joyqueue   Source File: SimpleKafkaConsumer.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaConfigs.BOOTSTRAP);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConfigs.GROUP_ID);
        props.put(ConsumerConfig.CLIENT_ID_CONFIG, KafkaConfigs.GROUP_ID);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Arrays.asList("test_topic_0"));

        while (true) {
//            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000 * 1));
            ConsumerRecords<String, String> records = consumer.poll(1000 * 1);
            for (ConsumerRecord<String, String> record : records) {
//                System.out.println(String.format("record, key: %s, value: %s, offset: %s, timestamp: %s", record.key(), record.value(), record.offset(), record.timestamp()));
                System.out.println(String.format("record, key: %s, value: %s, offset: %s", record.key(), record.value(), record.offset()));
            }
        }
    }
 
Example 15
Source Project: joyqueue   Source File: SimpleKafkaConsumer.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:50088");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test_app");
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, "test_app");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList("test_topic_0"));

    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000 * 1));
        for (ConsumerRecord<String, String> record : records) {
            System.out.println(String.format("record, key: %s, value: %s, offset: %s", record.key(), record.value(), record.offset()));
        }
    }
}
 
Example 16
Source Project: kafka-encryption   Source File: SampleRawConsumer.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void run() {

    Properties consumerProperties = new Properties();
    consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "sampleraw");
    consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    try (KafkaConsumer<Long, String> consumer = new KafkaConsumer<Long, String>(
            consumerProperties,
            new LongDeserializer(),
            new StringDeserializer())) {

        consumer.subscribe(Collections.singleton("sampletopic"));
        for (; true; ) {
            ConsumerRecords<Long, String> records = consumer.poll(1000L);
            records.forEach(
                    record -> System.out.println(
                        "-------------------------------------------------------------\n" +
                        "raw record: key=" + record.key() + ", offset=" + record.offset() + ", value=" + record.value() +
                        "\n-------------------------------------------------------------\n\n"
                    )
            );
        }
    }
}
 
Example 17
Source Project: kafka-encryption   Source File: SampleRawConsumer.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void run() {

    Properties consumerProperties = new Properties();
    consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "sampleraw");
    consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    try (KafkaConsumer<Long, String> consumer = new KafkaConsumer<Long, String>(
            consumerProperties,
            new LongDeserializer(),
            new StringDeserializer())) {

        consumer.subscribe(Collections.singleton("sampletopic"));
        for (; true; ) {
            ConsumerRecords<Long, String> records = consumer.poll(1000L);
            records.forEach(
                    record -> System.out.println(
                    "-------------------------------------------------------------\n" +
                    "raw record: key=" + record.key() + ", offset=" + record.offset() + ", value=" + record.value() +
                    "\n-------------------------------------------------------------\n\n")
            );
        }
    }
}
 
Example 18
@Before
public void setUp() {
    Properties properties = StreamsTestUtils.getStreamsConfig("integrationTest",
            EMBEDDED_KAFKA.bootstrapServers(),
            STRING_SERDE_CLASSNAME,
            STRING_SERDE_CLASSNAME,
            new Properties());
    properties.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
    
    streamsConfig = new StreamsConfig(properties);

    producerConfig = TestUtils.producerConfig(EMBEDDED_KAFKA.bootstrapServers(),
            StringSerializer.class,
            StringSerializer.class);

    consumerConfig = TestUtils.consumerConfig(EMBEDDED_KAFKA.bootstrapServers(),
            StringDeserializer.class,
            StringDeserializer.class);
}
 
Example 19
@Test
public void testBuildAllConfigs() throws RemotingKafkaConfigurationException {
    Properties props = new ConsumerPropertiesBuilder()
            .withBootstrapServers("localhost:9092")
            .withEnableAutoCommit(false)
            .withAutoOffsetReset(AutoOffsetReset.EARLIEST)
            .withGroupID("test")
            .withKeyDeserializer(StringDeserializer.class)
            .withValueDeserializer(StringDeserializer.class)
            .build();
    assertEquals("localhost:9092", props.get(KafkaConfigs.BOOTSTRAP_SERVERS));
    assertEquals(false, props.get(KafkaConfigs.ENABLE_AUTO_COMMIT));
    assertEquals(AutoOffsetReset.EARLIEST.toString(), props.get(KafkaConfigs.AUTO_OFFSET_RESET));
    assertEquals("test", props.get(KafkaConfigs.GROUP_ID));
    assertEquals(StringDeserializer.class, props.get(KafkaConfigs.KEY_DESERIALIZER));
    assertEquals(StringDeserializer.class, props.get(KafkaConfigs.VALUE_DESERIALIZER));
}
 
Example 20
Source Project: smallrye-reactive-messaging   Source File: KafkaUsage.java    License: Apache License 2.0 5 votes vote down vote up
private void consumeDoubles(BooleanSupplier continuation, Runnable completion, Collection<String> topics,
        Consumer<ConsumerRecord<String, Double>> consumerFunction) {
    Deserializer<String> keyDes = new StringDeserializer();
    Deserializer<Double> valDes = new DoubleDeserializer();
    String randomId = UUID.randomUUID().toString();
    this.consume(randomId, randomId, OffsetResetStrategy.EARLIEST, keyDes, valDes, continuation, null,
            completion, topics, consumerFunction);
}
 
Example 21
private MapBasedConfig getKafkaSinkConfigForMyAppProcessingData() {
    String prefix = "mp.messaging.outgoing.kafka.";
    Map<String, Object> config = new HashMap<>();
    config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
    config.put(prefix + "value.serializer", StringSerializer.class.getName());
    config.put(prefix + "topic", "some-other-topic");

    prefix = "mp.messaging.incoming.source.";
    config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
    config.put(prefix + "value.deserializer", IntegerDeserializer.class.getName());
    config.put(prefix + "key.deserializer", StringDeserializer.class.getName());
    config.put(prefix + "topic", "some-topic");

    return new MapBasedConfig(config);
}
 
Example 22
Source Project: data-highway   Source File: RoadEndpointsIntegrationTest.java    License: Apache License 2.0 5 votes vote down vote up
private static KafkaConsumer<String, String> createPatchConsumer() {
  Properties properties = new Properties();
  properties.setProperty("bootstrap.servers", kafkaCluster.bootstrapServers());
  properties.setProperty("group.id", UUID.randomUUID().toString());
  properties.setProperty("auto.offset.reset", "earliest");
  properties.setProperty("enable.auto.commit", "false");
  KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties, new StringDeserializer(),
      new StringDeserializer());
  kafkaConsumer.subscribe(Lists.newArrayList(patchTopic));
  return kafkaConsumer;
}
 
Example 23
@Override
public Properties getProperties() {
    Properties props = new Properties();
    props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
    props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
    props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    return props;
}
 
Example 24
Source Project: ChengFeng1.5   Source File: KafkaConfig.java    License: MIT License 5 votes vote down vote up
@Bean
public Map<String, Object> consumerConfigs() {
    Map<String, Object> props = new HashMap<>();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBrokerList());
    props.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaProperties.getCGroupId());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
    props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100);
    props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, PurchaseInfoDeSerializer.class);
    return props;
}
 
Example 25
Source Project: springBoot-study   Source File: KafkaConsumerTest.java    License: Apache License 2.0 5 votes vote down vote up
private void init() {
		Properties props = new Properties();
		//kafka消费的的地址
		props.put("bootstrap.servers", "master:9092,slave1:9092,slave2:9092");
		//组名 不同组名可以重复消费
		props.put("group.id", GROUPID);
		//是否自动提交
		props.put("enable.auto.commit", "false");
		//超时时间
		props.put("session.timeout.ms", "30000");
		//一次最大拉取的条数
		props.put("max.poll.records", 10);
//		earliest当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费 
//		latest 
//		当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据 
//		none 
//		topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
		props.put("auto.offset.reset", "earliest");
		//序列化
		props.put("key.deserializer", StringDeserializer.class.getName());
		props.put("value.deserializer", StringDeserializer.class.getName());
		this.consumer = new KafkaConsumer<String, String>(props);
		//订阅主题列表topic
		this.consumer.subscribe(Arrays.asList(topic));
		
		System.out.println("初始化!");
	}
 
Example 26
Source Project: football-events   Source File: PlayerCommandConnectorTest.java    License: MIT License 5 votes vote down vote up
@Test
public void test() throws Exception {
    String json = StreamUtils.copyToString(getClass().getResourceAsStream("player-inserted.json"),
            Charset.defaultCharset());
    tester.sendStringMessage(1L, json, "fb-connect.public.players");
    ProducerRecord<String, PlayerStartedCareer> event = tester.read(Topics.eventTopicName(PlayerStartedCareer.class),
            new StringDeserializer(), new JsonPojoSerde<>(PlayerStartedCareer.class));

    assertThat(event.key()).isEqualTo("1");
    assertThat(event.value().getPlayerId()).isEqualTo("1");
    assertThat(event.value().getName()).isEqualTo("Player One");
}
 
Example 27
Source Project: football-events   Source File: FootballEcosystem.java    License: MIT License 5 votes vote down vote up
public <T extends Event> List<T> waitForEvents(Class<T> type, int expectedEventCount) {
    var consumer = new KafkaConsumer<String, T>(consumerProps, new StringDeserializer(),
            new JsonPojoSerde<T>(type));

    try {
        String topic = Topics.eventTopicName(type);
        consumer.subscribe(Collections.singletonList(topic));
        var found = new ArrayList<T>(expectedEventCount);
        long timeout = eventTimeout;
        long endTime = System.currentTimeMillis() + timeout;

        do {
            for (ConsumerRecord<String, T> record : consumer.poll(timeout)) {
                found.add(record.value());
            }
            timeout = endTime - System.currentTimeMillis();
        } while (found.size() < expectedEventCount && timeout > 0);

        if (found.size() < expectedEventCount) {
            throw new RuntimeException("The expected number of waitForEvents in topic " + topic + " should be: "
                + expectedEventCount + ", but found: " + found);
        }
        if (found.size() > expectedEventCount) {
            logger.warn("Some redundant waitForEvents have been found in topic {}: {}", topic, found);
        }
        return found;
    } finally {
        consumer.close();
    }
}
 
Example 28
Source Project: kafka_book_demo   Source File: KafkaConsumerGroupService.java    License: Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<String, String> createNewConsumer(
        String brokerUrl, String groupId) {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerUrl);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    return new KafkaConsumer<>(props);
}
 
Example 29
public static Properties initConfig() {
    Properties props = new Properties();
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
    return props;
}
 
Example 30
Source Project: smallrye-reactive-messaging   Source File: KafkaUsage.java    License: Apache License 2.0 5 votes vote down vote up
private void consumeStrings(BooleanSupplier continuation, Runnable completion, Collection<String> topics,
        Consumer<ConsumerRecord<String, String>> consumerFunction) {
    Deserializer<String> keyDes = new StringDeserializer();
    String randomId = UUID.randomUUID().toString();
    this.consume(randomId, randomId, OffsetResetStrategy.EARLIEST, keyDes, keyDes, continuation, null,
            completion, topics, consumerFunction);
}