org.apache.kafka.common.serialization.IntegerDeserializer Java Examples

The following examples show how to use org.apache.kafka.common.serialization.IntegerDeserializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConsumerExample.java    From pulsar with Apache License 2.0 8 votes vote down vote up
public static void main(String[] args) {
    String topic = "persistent://public/default/test";

    Properties props = new Properties();
    props.put("bootstrap.servers", "pulsar://localhost:6650");
    props.put("group.id", "my-subscription-name");
    props.put("enable.auto.commit", "false");
    props.put("key.deserializer", IntegerDeserializer.class.getName());
    props.put("value.deserializer", StringDeserializer.class.getName());

    @SuppressWarnings("resource")
    Consumer<Integer, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(topic));

    while (true) {
        ConsumerRecords<Integer, String> records = consumer.poll(100);
        records.forEach(record -> {
            log.info("Received record: {}", record);
        });

        // Commit last offset
        consumer.commitSync();
    }
}
 
Example #2
Source File: SparkDataSetProcessor.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
public <V> DataSet<ExecRow> readKafkaTopic(String topicName, OperationContext context) throws StandardException {
    Properties props = new Properties();
    String consumerGroupId = "spark-consumer-dss-sdsp";
    String bootstrapServers = SIDriver.driver().getConfiguration().getKafkaBootstrapServers();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroupId);
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, consumerGroupId+"-"+UUID.randomUUID());
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ExternalizableDeserializer.class.getName());

    KafkaConsumer<Integer, Externalizable> consumer = new KafkaConsumer<Integer, Externalizable>(props);
    List ps = consumer.partitionsFor(topicName);
    List<Integer> partitions = new ArrayList<>(ps.size());
    for (int i = 0; i < ps.size(); ++i) {
        partitions.add(i);
    }
    consumer.close();

    SparkDataSet rdd = new SparkDataSet(SpliceSpark.getContext().parallelize(partitions, partitions.size()));
    return rdd.flatMap(new KafkaReadFunction(context, topicName, bootstrapServers));
}
 
Example #3
Source File: DataLoaderConfig.java    From kafka-webview with MIT License 6 votes vote down vote up
/**
 * Creates default message formats.
 */
private void createDefaultMessageFormats() {
    final Map<String, String> defaultFormats = new HashMap<>();
    defaultFormats.put("Short", ShortDeserializer.class.getName());
    defaultFormats.put("ByteArray", ByteArrayDeserializer.class.getName());
    defaultFormats.put("Bytes", BytesDeserializer.class.getName());
    defaultFormats.put("Double", DoubleDeserializer.class.getName());
    defaultFormats.put("Float", FloatDeserializer.class.getName());
    defaultFormats.put("Integer", IntegerDeserializer.class.getName());
    defaultFormats.put("Long", LongDeserializer.class.getName());
    defaultFormats.put("String", StringDeserializer.class.getName());
    defaultFormats.put("Bytes (Hex Encoded)", BytesToHexDeserializer.class.getName());

    // Create if needed.
    for (final Map.Entry<String, String> entry : defaultFormats.entrySet()) {
        MessageFormat messageFormat = messageFormatRepository.findByName(entry.getKey());
        if (messageFormat == null) {
            messageFormat = new MessageFormat();
        }
        messageFormat.setName(entry.getKey());
        messageFormat.setClasspath(entry.getValue());
        messageFormat.setJar("n/a");
        messageFormat.setDefaultFormat(true);
        messageFormatRepository.save(messageFormat);
    }
}
 
Example #4
Source File: KafkaIOTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a consumer with two topics, with 10 partitions each. numElements are (round-robin)
 * assigned all the 20 partitions.
 */
private static KafkaIO.Read<Integer, Long> mkKafkaReadTransform(
    int numElements,
    int maxNumRecords,
    @Nullable SerializableFunction<KV<Integer, Long>, Instant> timestampFn) {

  List<String> topics = ImmutableList.of("topic_a", "topic_b");

  KafkaIO.Read<Integer, Long> reader =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("myServer1:9092,myServer2:9092")
          .withTopics(topics)
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  topics, 10, numElements, OffsetResetStrategy.EARLIEST)) // 20 partitions
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class)
          .withMaxNumRecords(maxNumRecords);

  if (timestampFn != null) {
    return reader.withTimestampFn(timestampFn);
  } else {
    return reader;
  }
}
 
Example #5
Source File: KafkaAvroTest.java    From quarkus with Apache License 2.0 6 votes vote down vote up
public static KafkaConsumer<Integer, Pet> createConsumer() {
    String registry = System.getProperty("schema.url");

    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-avro");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);

    // Without you get GenericData.Record instead of `Pet`
    props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);

    KafkaConsumer<Integer, Pet> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-avro-producer"));
    return consumer;
}
 
Example #6
Source File: KafkaSourceTest.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
private MapBasedConfig myKafkaSourceConfig(int partitions, String withConsumerRebalanceListener, String group) {
    String prefix = "mp.messaging.incoming.data.";
    Map<String, Object> config = new HashMap<>();
    config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
    if (group != null) {
        config.put(prefix + "group.id", group);
    }
    config.put(prefix + "value.deserializer", IntegerDeserializer.class.getName());
    config.put(prefix + "enable.auto.commit", "false");
    config.put(prefix + "auto.offset.reset", "earliest");
    config.put(prefix + "topic", "data");
    if (partitions > 0) {
        config.put(prefix + "partitions", Integer.toString(partitions));
        config.put(prefix + "topic", "data-" + partitions);
    }
    if (withConsumerRebalanceListener != null) {
        config.put(prefix + "consumer-rebalance-listener.name", withConsumerRebalanceListener);
    }

    return new MapBasedConfig(config);
}
 
Example #7
Source File: KafkaIOTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnboundedSourceWithSingleTopic() {
  // same as testUnboundedSource, but with single topic

  int numElements = 1000;
  String topic = "my_topic";

  KafkaIO.Read<Integer, Long> reader =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("none")
          .withTopic("my_topic")
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  ImmutableList.of(topic), 10, numElements, OffsetResetStrategy.EARLIEST))
          .withMaxNumRecords(numElements)
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class);

  PCollection<Long> input = p.apply(reader.withoutMetadata()).apply(Values.create());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #8
Source File: KafkaSourceTest.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("rawtypes")
@Test
public void testSourceWithChannelName() {
    KafkaUsage usage = new KafkaUsage();
    String topic = UUID.randomUUID().toString();
    Map<String, Object> config = newCommonConfig();
    config.put("channel-name", topic);
    config.put("value.deserializer", IntegerDeserializer.class.getName());
    config.put("bootstrap.servers", SERVERS);
    KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(new MapBasedConfig(config));
    KafkaSource<String, Integer> source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic,
            getConsumerRebalanceListeners());

    List<KafkaRecord> messages = new ArrayList<>();
    source.getStream().subscribe().with(messages::add);

    AtomicInteger counter = new AtomicInteger();
    new Thread(() -> usage.produceIntegers(10, null,
            () -> new ProducerRecord<>(topic, counter.getAndIncrement()))).start();

    await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 10);
    assertThat(messages.stream().map(KafkaRecord::getPayload).collect(Collectors.toList()))
            .containsExactly(0, 1, 2, 3, 4,
                    5, 6, 7, 8, 9);
}
 
Example #9
Source File: KafkaFailureHandlerTest.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
private MapBasedConfig getDeadLetterQueueWithCustomConfig() {
    String prefix = "mp.messaging.incoming.kafka.";
    Map<String, Object> config = new HashMap<>();
    config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
    config.put(prefix + "group.id", "my-group");
    config.put(prefix + "topic", "dead-letter-custom");
    config.put(prefix + "value.deserializer", IntegerDeserializer.class.getName());
    config.put(prefix + "enable.auto.commit", "false");
    config.put(prefix + "auto.offset.reset", "earliest");
    config.put(prefix + "failure-strategy", "dead-letter-queue");
    config.put(prefix + "dead-letter-queue.topic", "missed");
    config.put(prefix + "dead-letter-queue.key.serializer", IntegerSerializer.class.getName());
    config.put(prefix + "dead-letter-queue.value.serializer", IntegerSerializer.class.getName());

    return new MapBasedConfig(config);
}
 
Example #10
Source File: KafkaSourceTest.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Test
public void testSource() {
    KafkaUsage usage = new KafkaUsage();
    String topic = UUID.randomUUID().toString();
    Map<String, Object> config = newCommonConfig();
    config.put("topic", topic);
    config.put("value.deserializer", IntegerDeserializer.class.getName());
    config.put("bootstrap.servers", SERVERS);
    config.put("channel-name", topic);
    KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(new MapBasedConfig(config));
    KafkaSource<String, Integer> source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic,
            getConsumerRebalanceListeners());

    List<Message<?>> messages = new ArrayList<>();
    source.getStream().subscribe().with(messages::add);

    AtomicInteger counter = new AtomicInteger();
    new Thread(() -> usage.produceIntegers(10, null,
            () -> new ProducerRecord<>(topic, counter.getAndIncrement()))).start();

    await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 10);
    assertThat(messages.stream().map(m -> ((KafkaRecord<String, Integer>) m).getPayload())
            .collect(Collectors.toList())).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
}
 
Example #11
Source File: StreamUtilsTest.java    From kafka-graphs with Apache License 2.0 6 votes vote down vote up
@Test
public void testCollectionToStream() throws Exception {
    Collection<KeyValue<Integer, Integer>> input = new ArrayList<>();
    for (Integer i : LEFT_INPUT) {
        input.add(new KeyValue<>(i, i));
    }
    StreamsBuilder builder = new StreamsBuilder();
    KStream<Integer, Integer> stream = StreamUtils.streamFromCollection(
        builder, PRODUCER_CONFIG, LEFT_INPUT_TOPIC, 50, (short) 1,
        Serdes.Integer(), Serdes.Integer(),
        input);
    stream.to(OUTPUT_TOPIC);

    startStreams(builder, Serdes.Integer(), Serdes.Integer());

    Thread.sleep(1000);

    List<KeyValue<Integer, Integer>> records = consumeData(
        OUTPUT_TOPIC, IntegerDeserializer.class, IntegerDeserializer.class, 26, 10000L);
    for (KeyValue<Integer, Integer> record : records) {
        assertEquals(record.key, record.value);
    }

    streams.close();
}
 
Example #12
Source File: ConsumerExample.java    From pulsar with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    String topic = "persistent://public/default/test";

    Properties props = new Properties();
    props.put("bootstrap.servers", "pulsar://localhost:6650");
    props.put("group.id", "my-subscription-name");
    props.put("enable.auto.commit", "false");
    props.put("key.deserializer", IntegerDeserializer.class.getName());
    props.put("value.deserializer", StringDeserializer.class.getName());

    @SuppressWarnings("resource")
    Consumer<Integer, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(topic));

    while (true) {
        ConsumerRecords<Integer, String> records = consumer.poll(100);
        records.forEach(record -> {
            log.info("Received record: {}", record);
        });

        // Commit last offset
        consumer.commitSync();
    }
}
 
Example #13
Source File: KafkaProducerInterceptorWrapper.java    From pulsar with Apache License 2.0 6 votes vote down vote up
static Deserializer getDeserializer(Serializer serializer) {
    if (serializer instanceof StringSerializer) {
        return new StringDeserializer();
    } else if (serializer instanceof LongSerializer) {
        return new LongDeserializer();
    } else if (serializer instanceof IntegerSerializer) {
        return new IntegerDeserializer();
    } else if (serializer instanceof DoubleSerializer) {
        return new DoubleDeserializer();
    } else if (serializer instanceof BytesSerializer) {
        return new BytesDeserializer();
    } else if (serializer instanceof ByteBufferSerializer) {
        return new ByteBufferDeserializer();
    } else if (serializer instanceof ByteArraySerializer) {
        return new ByteArrayDeserializer();
    } else {
        throw new IllegalArgumentException(serializer.getClass().getName() + " is not a valid or supported subclass of org.apache.kafka.common.serialization.Serializer.");
    }
}
 
Example #14
Source File: KafkaProducerInterceptorWrapperTest.java    From pulsar with Apache License 2.0 6 votes vote down vote up
@DataProvider(name = "serializers")
public Object[][] serializers() {
    return new Object[][] {
        {
            new StringSerializer(), StringDeserializer.class
        },
        {
            new LongSerializer(), LongDeserializer.class
        },
        {
            new IntegerSerializer(), IntegerDeserializer.class,
        },
        {
            new DoubleSerializer(), DoubleDeserializer.class,
        },
        {
            new BytesSerializer(), BytesDeserializer.class
        },
        {
            new ByteBufferSerializer(), ByteBufferDeserializer.class
        },
        {
            new ByteArraySerializer(), ByteArrayDeserializer.class
        }
    };
}
 
Example #15
Source File: DefaultConfigTest.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
private MapBasedConfig getKafkaSinkConfigForMyAppProcessingData() {
    String prefix = "mp.messaging.outgoing.kafka.";
    Map<String, Object> config = new HashMap<>();
    config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
    config.put(prefix + "topic", "some-other-topic");

    prefix = "mp.messaging.incoming.source.";
    config.put(prefix + "connector", KafkaConnector.CONNECTOR_NAME);
    config.put(prefix + "topic", "some-topic");
    config.put(prefix + "auto.offset.reset", "earliest");

    config.put("kafka.value.serializer", StringSerializer.class.getName());
    config.put("kafka.value.deserializer", IntegerDeserializer.class.getName());
    config.put("kafka.key.deserializer", StringDeserializer.class.getName());

    return new MapBasedConfig(config);
}
 
Example #16
Source File: KafkaBinaryLog.java    From modernmt with Apache License 2.0 6 votes vote down vote up
public static Properties loadProperties(String filename, String[] hosts, int port) {
    InputStream stream = null;

    try {
        Properties properties = new Properties();
        stream = KafkaBinaryLog.class.getClassLoader().getResourceAsStream(filename);
        properties.load(stream);

        String[] servers = new String[hosts.length];
        for (int i = 0; i < servers.length; i++)
            servers[i] = hosts[i] + ':' + port;

        properties.put("bootstrap.servers", StringUtils.join(servers, ','));
        properties.put("key.serializer", IntegerSerializer.class.getName());
        properties.put("value.serializer", KafkaPacketSerializer.class.getName());
        properties.put("key.deserializer", IntegerDeserializer.class.getName());
        properties.put("value.deserializer", KafkaPacketDeserializer.class.getName());

        return properties;
    } catch (IOException e) {
        throw new Error("Unexpected exception", e);
    } finally {
        IOUtils.closeQuietly(stream);
    }
}
 
Example #17
Source File: KafkaUtils.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
public static long messageCount(String bootstrapServers, String topicName, int partition) {
    Properties props = new Properties();
    String consumerId = UUID.randomUUID().toString();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "spark-consumer-group-"+consumerId);
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, "spark-consumer-"+consumerId);
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ExternalizableDeserializer.class.getName());

    KafkaConsumer<Integer, Externalizable> consumer = new KafkaConsumer<Integer, Externalizable>(props);

    TopicPartition topicPartition = new TopicPartition(topicName, partition);
    List<TopicPartition> partitionList = Arrays.asList(topicPartition);
    consumer.assign(partitionList);
    consumer.seekToEnd(partitionList);
    long nextOffset = consumer.position(topicPartition);

    consumer.seekToBeginning(partitionList);
    long firstOffset = consumer.position(topicPartition);

    consumer.close();

    return nextOffset - firstOffset;
}
 
Example #18
Source File: KafkaRepository.java    From kafka-service-broker with Apache License 2.0 5 votes vote down vote up
private Map<String, Object> consumerProps() {
    Map<String, Object> props = new HashMap<>();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,info.getHosts());
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "pivotal");
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
    props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
    props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    return props;
}
 
Example #19
Source File: KafkaIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadAvroSpecificRecordsWithConfluentSchemaRegistry() {
  int numElements = 100;
  String topic = "my_topic";
  String schemaRegistryUrl = "mock://my-scope-name";
  String valueSchemaSubject = topic + "-value";

  List<KV<Integer, AvroGeneratedUser>> inputs = new ArrayList<>();
  for (int i = 0; i < numElements; i++) {
    inputs.add(KV.of(i, new AvroGeneratedUser("ValueName" + i, i, "color" + i)));
  }

  KafkaIO.Read<Integer, AvroGeneratedUser> reader =
      KafkaIO.<Integer, AvroGeneratedUser>read()
          .withBootstrapServers("localhost:9092")
          .withTopic(topic)
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(
              mockDeserializerProvider(schemaRegistryUrl, valueSchemaSubject, null))
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  ImmutableList.of(topic),
                  1,
                  numElements,
                  OffsetResetStrategy.EARLIEST,
                  i -> ByteBuffer.wrap(new byte[4]).putInt(i).array(),
                  new ValueAvroSerializableFunction(topic, schemaRegistryUrl)))
          .withMaxNumRecords(numElements);

  PCollection<KV<Integer, AvroGeneratedUser>> input = p.apply(reader.withoutMetadata());

  PAssert.that(input).containsInAnyOrder(inputs);
  p.run();
}
 
Example #20
Source File: KafkaIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnboundedSourceSplits() throws Exception {

  int numElements = 1000;
  int numSplits = 10;

  // Coders must be specified explicitly here due to the way the transform
  // is used in the test.
  UnboundedSource<KafkaRecord<Integer, Long>, ?> initial =
      mkKafkaReadTransform(numElements, null)
          .withKeyDeserializerAndCoder(IntegerDeserializer.class, BigEndianIntegerCoder.of())
          .withValueDeserializerAndCoder(LongDeserializer.class, BigEndianLongCoder.of())
          .makeSource();

  List<? extends UnboundedSource<KafkaRecord<Integer, Long>, ?>> splits =
      initial.split(numSplits, p.getOptions());
  assertEquals("Expected exact splitting", numSplits, splits.size());

  long elementsPerSplit = numElements / numSplits;
  assertEquals("Expected even splits", numElements, elementsPerSplit * numSplits);
  PCollectionList<Long> pcollections = PCollectionList.empty(p);
  for (int i = 0; i < splits.size(); ++i) {
    pcollections =
        pcollections.and(
            p.apply("split" + i, Read.from(splits.get(i)).withMaxNumRecords(elementsPerSplit))
                .apply("Remove Metadata " + i, ParDo.of(new RemoveKafkaMetadata<>()))
                .apply("collection " + i, Values.create()));
  }
  PCollection<Long> input = pcollections.apply(Flatten.pCollections());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #21
Source File: AvroEndpoint.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<Integer, Pet> createConsumer(String registry) {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-avro-consumer");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);
    props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
    KafkaConsumer<Integer, Pet> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-avro-consumer"));
    return consumer;
}
 
Example #22
Source File: KafkaAdaptorConsumer.java    From pulsar-java-tutorial with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
    String topic = Utils.getTopicName(args);

    Properties props = new Properties();
    props.put("bootstrap.servers", SERVICE_URL);
    props.put("group.id", SUBSCRIPTION_NAME);
    props.put("enable.auto.commit", "false");
    props.put("key.deserializer", IntegerDeserializer.class.getName());
    props.put("value.deserializer", StringDeserializer.class.getName());

    Consumer<Integer, String> consumer = new KafkaConsumer<>(props);

    new ConsumerLoop(consumer, Collections.singleton(topic)).run();
}
 
Example #23
Source File: ConsumerAvroExample.java    From pulsar with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
    String topic = "persistent://public/default/test-avro";

    Properties props = new Properties();
    props.put("bootstrap.servers", "pulsar://localhost:6650");
    props.put("group.id", "my-subscription-name");
    props.put("enable.auto.commit", "false");
    props.put("key.deserializer", IntegerDeserializer.class.getName());
    props.put("value.deserializer", StringDeserializer.class.getName());

    AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
    AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());

    Bar bar = new Bar();
    bar.setField1(true);

    Foo foo = new Foo();
    foo.setField1("field1");
    foo.setField2("field2");
    foo.setField3(3);

    @SuppressWarnings("resource")
    Consumer<Foo, Bar> consumer = new KafkaConsumer<>(props, fooSchema, barSchema);
    consumer.subscribe(Arrays.asList(topic));

    while (true) {
        ConsumerRecords<Foo, Bar> records = consumer.poll(100);
        records.forEach(record -> {
            log.info("Received record: {}", record);
        });

        // Commit last offset
        consumer.commitSync();
    }
}
 
Example #24
Source File: ConsumerAvroExample.java    From pulsar with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
    String topic = "persistent://public/default/test-avro";

    Properties props = new Properties();
    props.put("bootstrap.servers", "pulsar://localhost:6650");
    props.put("group.id", "my-subscription-name");
    props.put("enable.auto.commit", "false");
    props.put("key.deserializer", IntegerDeserializer.class.getName());
    props.put("value.deserializer", StringDeserializer.class.getName());

    AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
    AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());

    Bar bar = new Bar();
    bar.setField1(true);

    Foo foo = new Foo();
    foo.setField1("field1");
    foo.setField2("field2");
    foo.setField3(3);

    @SuppressWarnings("resource")
    Consumer<Foo, Bar> consumer = new KafkaConsumer<>(props, fooSchema, barSchema);
    consumer.subscribe(Arrays.asList(topic));

    while (true) {
        ConsumerRecords<Foo, Bar> records = consumer.poll(100);
        records.forEach(record -> {
            log.info("Received record: {}", record);
        });

        // Commit last offset
        consumer.commitSync();
    }
}
 
Example #25
Source File: EphemeralKafkaClusterTest.java    From kafka-junit with Apache License 2.0 5 votes vote down vote up
@Test
public void testStartAndStop() throws Exception {
    try (KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(cluster.consumerConfig(false), new IntegerDeserializer(), new StringDeserializer());
         KafkaProducer<Integer, String> producer = new KafkaProducer<>(cluster.producerConfig(), new IntegerSerializer(), new StringSerializer())) {
        cluster.createTopics(TEST_TOPIC);

        producer.send(new ProducerRecord<>(TEST_TOPIC, "value"));
        producer.flush();

        consumer.subscribe(Collections.singleton(TEST_TOPIC));
        ConsumerRecords<Integer, String> poll = consumer.poll(TEN_SECONDS);
        assertThat(poll.count()).isEqualTo(1);
        assertThat(poll.iterator().next().value()).isEqualTo("value");
    }
}
 
Example #26
Source File: KafkaIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnboundedSourceWithExceptionInKafkaFetch() {
  // Similar testUnboundedSource, but with an injected exception inside Kafk Consumer poll.

  // The reader should throw an IOException:
  thrown.expectCause(isA(IOException.class));
  thrown.expectCause(hasMessage(containsString("Exception while reading from Kafka")));
  // The original exception is from MockConsumer.poll():
  thrown.expectCause(hasCause(isA(KafkaException.class)));
  thrown.expectCause(hasCause(hasMessage(containsString("Injected error in consumer.poll()"))));

  int numElements = 1000;
  String topic = "my_topic";

  KafkaIO.Read<Integer, Long> reader =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("none")
          .withTopic("my_topic")
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  ImmutableList.of(topic), 10, numElements, OffsetResetStrategy.EARLIEST))
          .withMaxNumRecords(2 * numElements) // Try to read more messages than available.
          .withConsumerConfigUpdates(ImmutableMap.of("inject.error.at.eof", true))
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class);

  PCollection<Long> input = p.apply(reader.withoutMetadata()).apply(Values.create());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #27
Source File: KafkaProducerTest.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<Integer, String> createConsumer() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test"));
    return consumer;
}
 
Example #28
Source File: KafkaConsumerManager.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<Integer, String> createConsumer() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-consumer"));
    return consumer;
}
 
Example #29
Source File: SslKafkaEndpoint.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<Integer, String> createConsumer() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19093");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    addSSL(props);
    KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-ssl-consumer"));
    return consumer;
}
 
Example #30
Source File: SaslKafkaEndpoint.java    From quarkus with Apache License 2.0 5 votes vote down vote up
public static KafkaConsumer<Integer, String> createConsumer() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19094");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    addJAAS(props);
    KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList("test-sasl-consumer"));
    return consumer;
}