org.apache.kafka.common.serialization.LongDeserializer Java Examples

The following examples show how to use org.apache.kafka.common.serialization.LongDeserializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MessageConsumerFactory.java    From alcor with Apache License 2.0 6 votes vote down vote up
public Consumer Create() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaAddress);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, IKafkaConfiguration.CONSUMER_GROUP_ID);

    // Key is set as long
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());

    Deserializer deserializer = getDeserializer();
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializer.getClass().getName());

    props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, IKafkaConfiguration.MAX_POLL_RECORDS);
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, IKafkaConfiguration.OFFSET_RESET_EARLIER);

    Consumer<Long, String> consumer = new KafkaConsumer<>(props);
    return consumer;
}
 
Example #2
Source File: KafkaStream.java    From moa with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Creates the configuration for the Kafka consumer.
 */
protected Map<String, Object> createConsumerConfiguration() {
  Map<String, Object> config = new HashMap<>();

  config.put("key.deserializer", LongDeserializer.class);
  config.put("value.deserializer", ObjectDeserializer.class);
  config.put("bootstrap.servers", broker());
  config.put("fetch.min.bytes", 1);
  config.put("group.id", uniqueGroupIDString());
  config.put("max.partition.fetch.bytes", 1 << 20); // 1MB
  config.put("allow.auto.create.topics", false);
  config.put("auto.offset.reset", "earliest");
  config.put("enable.auto.commit", true);
  config.put("fetch.max.bytes", 1 << 24); // 16MB
  config.put("isolation.level", "read_committed");
  config.put("client.id", this.getClass().getName());

  return config;
}
 
Example #3
Source File: KafkaProducerInterceptorWrapperTest.java    From pulsar with Apache License 2.0 6 votes vote down vote up
@DataProvider(name = "serializers")
public Object[][] serializers() {
    return new Object[][] {
        {
            new StringSerializer(), StringDeserializer.class
        },
        {
            new LongSerializer(), LongDeserializer.class
        },
        {
            new IntegerSerializer(), IntegerDeserializer.class,
        },
        {
            new DoubleSerializer(), DoubleDeserializer.class,
        },
        {
            new BytesSerializer(), BytesDeserializer.class
        },
        {
            new ByteBufferSerializer(), ByteBufferDeserializer.class
        },
        {
            new ByteArraySerializer(), ByteArrayDeserializer.class
        }
    };
}
 
Example #4
Source File: KafkaProducerInterceptorWrapper.java    From pulsar with Apache License 2.0 6 votes vote down vote up
static Deserializer getDeserializer(Serializer serializer) {
    if (serializer instanceof StringSerializer) {
        return new StringDeserializer();
    } else if (serializer instanceof LongSerializer) {
        return new LongDeserializer();
    } else if (serializer instanceof IntegerSerializer) {
        return new IntegerDeserializer();
    } else if (serializer instanceof DoubleSerializer) {
        return new DoubleDeserializer();
    } else if (serializer instanceof BytesSerializer) {
        return new BytesDeserializer();
    } else if (serializer instanceof ByteBufferSerializer) {
        return new ByteBufferDeserializer();
    } else if (serializer instanceof ByteArraySerializer) {
        return new ByteArrayDeserializer();
    } else {
        throw new IllegalArgumentException(serializer.getClass().getName() + " is not a valid or supported subclass of org.apache.kafka.common.serialization.Serializer.");
    }
}
 
Example #5
Source File: KafkaIOTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnboundedSourceWithExplicitPartitions() {
  int numElements = 1000;

  List<String> topics = ImmutableList.of("test");

  KafkaIO.Read<byte[], Long> reader =
      KafkaIO.<byte[], Long>read()
          .withBootstrapServers("none")
          .withTopicPartitions(ImmutableList.of(new TopicPartition("test", 5)))
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  topics, 10, numElements, OffsetResetStrategy.EARLIEST)) // 10 partitions
          .withKeyDeserializer(ByteArrayDeserializer.class)
          .withValueDeserializer(LongDeserializer.class)
          .withMaxNumRecords(numElements / 10);

  PCollection<Long> input = p.apply(reader.withoutMetadata()).apply(Values.create());

  // assert that every element is a multiple of 5.
  PAssert.that(input).satisfies(new AssertMultipleOf(5));

  PAssert.thatSingleton(input.apply(Count.globally())).isEqualTo(numElements / 10L);

  p.run();
}
 
Example #6
Source File: KafkaIOTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnboundedSourceWithSingleTopic() {
  // same as testUnboundedSource, but with single topic

  int numElements = 1000;
  String topic = "my_topic";

  KafkaIO.Read<Integer, Long> reader =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("none")
          .withTopic("my_topic")
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  ImmutableList.of(topic), 10, numElements, OffsetResetStrategy.EARLIEST))
          .withMaxNumRecords(numElements)
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class);

  PCollection<Long> input = p.apply(reader.withoutMetadata()).apply(Values.create());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #7
Source File: KafkaIOTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a consumer with two topics, with 10 partitions each. numElements are (round-robin)
 * assigned all the 20 partitions.
 */
private static KafkaIO.Read<Integer, Long> mkKafkaReadTransform(
    int numElements,
    int maxNumRecords,
    @Nullable SerializableFunction<KV<Integer, Long>, Instant> timestampFn) {

  List<String> topics = ImmutableList.of("topic_a", "topic_b");

  KafkaIO.Read<Integer, Long> reader =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("myServer1:9092,myServer2:9092")
          .withTopics(topics)
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  topics, 10, numElements, OffsetResetStrategy.EARLIEST)) // 20 partitions
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class)
          .withMaxNumRecords(maxNumRecords);

  if (timestampFn != null) {
    return reader.withTimestampFn(timestampFn);
  } else {
    return reader;
  }
}
 
Example #8
Source File: LocalDeserializerProviderTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testInferKeyCoder() {
  CoderRegistry registry = CoderRegistry.createDefault();
  assertTrue(
      LocalDeserializerProvider.of(LongDeserializer.class).getCoder(registry).getValueCoder()
          instanceof VarLongCoder);
  assertTrue(
      LocalDeserializerProvider.of(StringDeserializer.class).getCoder(registry).getValueCoder()
          instanceof StringUtf8Coder);
  assertTrue(
      LocalDeserializerProvider.of(InstantDeserializer.class).getCoder(registry).getValueCoder()
          instanceof InstantCoder);
  assertTrue(
      LocalDeserializerProvider.of(DeserializerWithInterfaces.class)
              .getCoder(registry)
              .getValueCoder()
          instanceof VarLongCoder);
}
 
Example #9
Source File: NexmarkLauncher.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Return source of events from Kafka. */
private PCollection<Event> sourceEventsFromKafka(Pipeline p, final Instant now) {
  checkArgument((options.getBootstrapServers() != null), "Missing --bootstrapServers");
  NexmarkUtils.console("Reading events from Kafka Topic %s", options.getKafkaTopic());

  KafkaIO.Read<Long, byte[]> read =
      KafkaIO.<Long, byte[]>read()
          .withBootstrapServers(options.getBootstrapServers())
          .withTopic(options.getKafkaTopic())
          .withKeyDeserializer(LongDeserializer.class)
          .withValueDeserializer(ByteArrayDeserializer.class)
          .withStartReadTime(now)
          .withMaxNumRecords(
              options.getNumEvents() != null ? options.getNumEvents() : Long.MAX_VALUE);

  return p.apply(queryName + ".ReadKafkaEvents", read.withoutMetadata())
      .apply(queryName + ".KafkaToEvents", ParDo.of(BYTEARRAY_TO_EVENT));
}
 
Example #10
Source File: ClickStreamEnrichmentDriver.java    From registry with Apache License 2.0 6 votes vote down vote up
private void consumeUserActivity() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
    props.put(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name(), SCHEMA_REGISTRY_URL);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "user-activity-reader");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    props.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true);

    try (KafkaConsumer<Integer, UserActivity> consumer = new KafkaConsumer<>(props)) {
        consumer.subscribe(Collections.singleton(USER_ACTIVITY_TOPIC));
        while (true) {
            final ConsumerRecords<Integer, UserActivity> consumerRecords = consumer.poll(Duration.ofSeconds(1));
            consumerRecords.forEach(System.out::println);
        }
    }
}
 
Example #11
Source File: StreamToTableJoinFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Test
public void testStreamToTableBiFunction() {
	SpringApplication app = new SpringApplication(BiFunctionCountClicksPerRegionApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	Consumer<String, Long> consumer;
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-2",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
	DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");

	runTest(app, consumer);
}
 
Example #12
Source File: StreamToTableJoinFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@Test
public void testStreamToTable() {
	SpringApplication app = new SpringApplication(CountClicksPerRegionApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	Consumer<String, Long> consumer;
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-1",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
	consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
	DefaultKafkaConsumerFactory<String, Long> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "output-topic-1");

	runTest(app, consumer);
}
 
Example #13
Source File: DataLoaderConfig.java    From kafka-webview with MIT License 6 votes vote down vote up
/**
 * Creates default message formats.
 */
private void createDefaultMessageFormats() {
    final Map<String, String> defaultFormats = new HashMap<>();
    defaultFormats.put("Short", ShortDeserializer.class.getName());
    defaultFormats.put("ByteArray", ByteArrayDeserializer.class.getName());
    defaultFormats.put("Bytes", BytesDeserializer.class.getName());
    defaultFormats.put("Double", DoubleDeserializer.class.getName());
    defaultFormats.put("Float", FloatDeserializer.class.getName());
    defaultFormats.put("Integer", IntegerDeserializer.class.getName());
    defaultFormats.put("Long", LongDeserializer.class.getName());
    defaultFormats.put("String", StringDeserializer.class.getName());
    defaultFormats.put("Bytes (Hex Encoded)", BytesToHexDeserializer.class.getName());

    // Create if needed.
    for (final Map.Entry<String, String> entry : defaultFormats.entrySet()) {
        MessageFormat messageFormat = messageFormatRepository.findByName(entry.getKey());
        if (messageFormat == null) {
            messageFormat = new MessageFormat();
        }
        messageFormat.setName(entry.getKey());
        messageFormat.setClasspath(entry.getValue());
        messageFormat.setJar("n/a");
        messageFormat.setDefaultFormat(true);
        messageFormatRepository.save(messageFormat);
    }
}
 
Example #14
Source File: MessageConsumerFactory.java    From alcor with Apache License 2.0 6 votes vote down vote up
public Consumer Create() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaAddress);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, IKafkaConfiguration.CONSUMER_GROUP_ID);

    // Key is set as long
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());

    Deserializer deserializer = getDeserializer();
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializer.getClass().getName());

    props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, IKafkaConfiguration.MAX_POLL_RECORDS);
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, IKafkaConfiguration.OFFSET_RESET_EARLIER);

    Consumer<Long, String> consumer = new KafkaConsumer<>(props);
    return consumer;
}
 
Example #15
Source File: SampleRawConsumer.java    From kafka-encryption with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {

    Properties consumerProperties = new Properties();
    consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "sampleraw");
    consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    try (KafkaConsumer<Long, String> consumer = new KafkaConsumer<Long, String>(
            consumerProperties,
            new LongDeserializer(),
            new StringDeserializer())) {

        consumer.subscribe(Collections.singleton("sampletopic"));
        for (; true; ) {
            ConsumerRecords<Long, String> records = consumer.poll(1000L);
            records.forEach(
                    record -> System.out.println(
                        "-------------------------------------------------------------\n" +
                        "raw record: key=" + record.key() + ", offset=" + record.offset() + ", value=" + record.value() +
                        "\n-------------------------------------------------------------\n\n"
                    )
            );
        }
    }
}
 
Example #16
Source File: SampleRawConsumer.java    From kafka-encryption with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {

    Properties consumerProperties = new Properties();
    consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "sampleraw");
    consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    try (KafkaConsumer<Long, String> consumer = new KafkaConsumer<Long, String>(
            consumerProperties,
            new LongDeserializer(),
            new StringDeserializer())) {

        consumer.subscribe(Collections.singleton("sampletopic"));
        for (; true; ) {
            ConsumerRecords<Long, String> records = consumer.poll(1000L);
            records.forEach(
                    record -> System.out.println(
                    "-------------------------------------------------------------\n" +
                    "raw record: key=" + record.key() + ", offset=" + record.offset() + ", value=" + record.value() +
                    "\n-------------------------------------------------------------\n\n")
            );
        }
    }
}
 
Example #17
Source File: EventKafkaConsumer.java    From dapeng-soa with Apache License 2.0 6 votes vote down vote up
public void init() {
    logger.info(new StringBuffer("[KafkaConsumer] [init] ")
            .append("kafkaConnect(").append(kafkaConnect)
            .append(") groupId(").append(groupId)
            .append(") topic(").append(topic).append(")").toString());

    KafkaConfigBuilder.ConsumerConfiguration builder = KafkaConfigBuilder.defaultConsumer();
    Properties properties = new Properties();
    properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);


    final Properties props = builder.bootstrapServers(kafkaConnect)
            .group(groupId)
            .withKeyDeserializer(LongDeserializer.class)
            .withValueDeserializer(ByteArrayDeserializer.class)
            .withOffsetCommitted("false")
            .build();

    consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(props);
}
 
Example #18
Source File: KafkaConsumerMetricsTest.java    From micrometer with Apache License 2.0 5 votes vote down vote up
private Consumer<Long, String> createConsumer() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "MicrometerTestConsumer");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

    Consumer<Long, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Collections.singletonList(TOPIC));
    consumerCount++;
    return consumer;
}
 
Example #19
Source File: SampleDecryptingConsumer.java    From kafka-encryption with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {

    // tag::consume[]
    // The key is embedded in each message

    Decryptor decryptor = new DefaultDecryptor(keyProvider, cryptoAlgorithm);

    // Construct decrypting deserializer
    CryptoDeserializerFactory cryptoDeserializerFactory = new CryptoDeserializerFactory(decryptor);

    Properties consumerProperties = new Properties();
    consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "samplecrypted");
    consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    try (KafkaConsumer<Long, String> consumer = new KafkaConsumer<Long, String>(
            consumerProperties,
            new LongDeserializer(),
            cryptoDeserializerFactory.buildFrom(new StringDeserializer()))) {

        consumer.subscribe(Collections.singleton("sampletopic"));
        for (; true; ) {
            ConsumerRecords<Long, String> records = consumer.poll(1000L);
            records.forEach(
                    record -> System.out.println(
                                "-------------------------------------------------------------\n" +
                                "decrypted record: key=" + record.key() + ", offset=" + record.offset() + ", value=" + record.value() +
                                "\n-------------------------------------------------------------\n\n")
            );
        }
    }
    // end::consume[]
}
 
Example #20
Source File: KafkaIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnreachableKafkaBrokers() {
  // Expect an exception when the Kafka brokers are not reachable on the workers.
  // We specify partitions explicitly so that splitting does not involve server interaction.
  // Set request timeout to 10ms so that test does not take long.

  thrown.expect(Exception.class);
  thrown.expectMessage("Reader-0: Timeout while initializing partition 'test-0'");

  int numElements = 1000;
  PCollection<Long> input =
      p.apply(
              KafkaIO.<Integer, Long>read()
                  .withBootstrapServers("8.8.8.8:9092") // Google public DNS ip.
                  .withTopicPartitions(ImmutableList.of(new TopicPartition("test", 0)))
                  .withKeyDeserializer(IntegerDeserializer.class)
                  .withValueDeserializer(LongDeserializer.class)
                  .withConsumerConfigUpdates(
                      ImmutableMap.of(
                          ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG,
                          5,
                          ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,
                          8,
                          ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG,
                          8,
                          "default.api.timeout.ms",
                          10))
                  .withMaxNumRecords(10)
                  .withoutMetadata())
          .apply(Values.create());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #21
Source File: KafkaBinderTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testConsumerCustomDeserializer() throws Exception {
	Binding<?> binding = null;
	try {
		KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties();
		Map<String, String> propertiesToOverride = configurationProperties
				.getConfiguration();
		propertiesToOverride.put("key.deserializer",
				"org.apache.kafka.common.serialization.StringDeserializer");
		propertiesToOverride.put("value.deserializer",
				"org.apache.kafka.common.serialization.LongDeserializer");
		configurationProperties.setConfiguration(propertiesToOverride);
		String testTopicName = "existing" + System.currentTimeMillis();
		configurationProperties.setAutoCreateTopics(false);
		Binder binder = getBinder(configurationProperties);

		ExtendedConsumerProperties<KafkaConsumerProperties> consumerProperties = createConsumerProperties();
		DirectChannel input = createBindableChannel("input",
				createConsumerBindingProperties(consumerProperties));

		binding = binder.bindConsumer(testTopicName, "test", input,
				consumerProperties);
		DirectFieldAccessor consumerAccessor = new DirectFieldAccessor(
				getKafkaConsumer(binding));
		assertThat(consumerAccessor
				.getPropertyValue("keyDeserializer") instanceof StringDeserializer)
						.isTrue();
		assertThat(consumerAccessor
				.getPropertyValue("valueDeserializer") instanceof LongDeserializer)
						.isTrue();
	}
	finally {
		if (binding != null) {
			binding.unbind();
		}
	}
}
 
Example #22
Source File: SampleDecryptingConsumer.java    From kafka-encryption with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {

    // tag::consume[]
    // The key is embedded in each message
    PerRecordKeyProvider keyProvider = new PerRecordKeyProvider(masterKeyEncryption);

    // The payload is encrypted using AES
    AesGcmNoPaddingCryptoAlgorithm cryptoAlgorithm = new AesGcmNoPaddingCryptoAlgorithm();
    Decryptor decryptor = new DefaultDecryptor(keyProvider, cryptoAlgorithm);

    // Construct decrypting deserializer
    CryptoDeserializerFactory cryptoDeserializerFactory = new CryptoDeserializerFactory(decryptor);

    Properties consumerProperties = new Properties();
    consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "samplecrypted");
    consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    try (KafkaConsumer<Long, String> consumer = new KafkaConsumer<Long, String>(
            consumerProperties,
            new LongDeserializer(),
            cryptoDeserializerFactory.buildFrom(new StringDeserializer()))) {

        consumer.subscribe(Collections.singleton("sampletopic"));
        for (; true; ) {
            ConsumerRecords<Long, String> records = consumer.poll(1000L);
            records.forEach(
                    record -> System.out.println(
                    "-------------------------------------------------------------\n" +
                    "decrypted record: key=" + record.key() + ", offset=" + record.offset() + ", value=" + record.value() +
                    "\n-------------------------------------------------------------\n\n"
                    )
            );
        }
    }
    // end::consume[]
}
 
Example #23
Source File: KafkaIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnboundedSourceWithExceptionInKafkaFetch() {
  // Similar testUnboundedSource, but with an injected exception inside Kafk Consumer poll.

  // The reader should throw an IOException:
  thrown.expectCause(isA(IOException.class));
  thrown.expectCause(hasMessage(containsString("Exception while reading from Kafka")));
  // The original exception is from MockConsumer.poll():
  thrown.expectCause(hasCause(isA(KafkaException.class)));
  thrown.expectCause(hasCause(hasMessage(containsString("Injected error in consumer.poll()"))));

  int numElements = 1000;
  String topic = "my_topic";

  KafkaIO.Read<Integer, Long> reader =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("none")
          .withTopic("my_topic")
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  ImmutableList.of(topic), 10, numElements, OffsetResetStrategy.EARLIEST))
          .withMaxNumRecords(2 * numElements) // Try to read more messages than available.
          .withConsumerConfigUpdates(ImmutableMap.of("inject.error.at.eof", true))
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class);

  PCollection<Long> input = p.apply(reader.withoutMetadata()).apply(Values.create());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #24
Source File: KafkaIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnboundedSourceSplits() throws Exception {

  int numElements = 1000;
  int numSplits = 10;

  // Coders must be specified explicitly here due to the way the transform
  // is used in the test.
  UnboundedSource<KafkaRecord<Integer, Long>, ?> initial =
      mkKafkaReadTransform(numElements, null)
          .withKeyDeserializerAndCoder(IntegerDeserializer.class, BigEndianIntegerCoder.of())
          .withValueDeserializerAndCoder(LongDeserializer.class, BigEndianLongCoder.of())
          .makeSource();

  List<? extends UnboundedSource<KafkaRecord<Integer, Long>, ?>> splits =
      initial.split(numSplits, p.getOptions());
  assertEquals("Expected exact splitting", numSplits, splits.size());

  long elementsPerSplit = numElements / numSplits;
  assertEquals("Expected even splits", numElements, elementsPerSplit * numSplits);
  PCollectionList<Long> pcollections = PCollectionList.empty(p);
  for (int i = 0; i < splits.size(); ++i) {
    pcollections =
        pcollections.and(
            p.apply("split" + i, Read.from(splits.get(i)).withMaxNumRecords(elementsPerSplit))
                .apply("Remove Metadata " + i, ParDo.of(new RemoveKafkaMetadata<>()))
                .apply("collection " + i, Values.create()));
  }
  PCollection<Long> input = pcollections.apply(Flatten.pCollections());

  addCountingAsserts(input, numElements);
  p.run();
}
 
Example #25
Source File: KafkaStreamsBinderPojoInputAndPrimitiveTypeOutputTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setUp() throws Exception {
	Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group-id",
			"false", embeddedKafka);
	consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
	consumerProps.put("value.deserializer", LongDeserializer.class);
	DefaultKafkaConsumerFactory<Integer, Long> cf = new DefaultKafkaConsumerFactory<>(
			consumerProps);
	consumer = cf.createConsumer();
	embeddedKafka.consumeFromAnEmbeddedTopic(consumer, "counts-id");
}
 
Example #26
Source File: KafkaPluginIT.java    From glowroot with Apache License 2.0 5 votes vote down vote up
private static Consumer<Long, String> createConsumer() {
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "consumerGroup1");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
            LongDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
            StringDeserializer.class.getName());
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    Consumer<Long, String> consumer = new KafkaConsumer<Long, String>(props);
    consumer.subscribe(Collections.singletonList("demo"));
    return consumer;
}
 
Example #27
Source File: RunningAverageTest.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@Test
public void validateAverageRating() {

  TestInputTopic<Long, Rating> inputTopic = testDriver.createInputTopic(RATINGS_TOPIC_NAME,
                                                                        new LongSerializer(),
                                                                        ratingSpecificAvroSerde.serializer());

  inputTopic.pipeKeyValueList(asList(
      new KeyValue<>(LETHAL_WEAPON_RATING_8.getMovieId(), LETHAL_WEAPON_RATING_8),
      new KeyValue<>(LETHAL_WEAPON_RATING_10.getMovieId(), LETHAL_WEAPON_RATING_10)
  ));

  final TestOutputTopic<Long, Double> outputTopic = testDriver.createOutputTopic(AVERAGE_RATINGS_TOPIC_NAME,
                                                                                 new LongDeserializer(),
                                                                                 new DoubleDeserializer());

  final List<KeyValue<Long, Double>> keyValues = outputTopic.readKeyValuesToList();
  // I sent two records to input topic
  // I expect second record in topic will contain correct result
  final KeyValue<Long, Double> longDoubleKeyValue = keyValues.get(1);
  System.out.println("longDoubleKeyValue = " + longDoubleKeyValue);
  assertThat(longDoubleKeyValue,
             equalTo(new KeyValue<>(362L, 9.0)));

  final KeyValueStore<Long, Double>
      keyValueStore =
      testDriver.getKeyValueStore("average-ratings");
  final Double expected = keyValueStore.get(362L);
  Assert.assertEquals("Message", expected, 9.0, 0.0);
}
 
Example #28
Source File: KafkaAutoConfigurationTest.java    From extension-kafka with Apache License 2.0 4 votes vote down vote up
@Test
void testConsumerPropertiesAreAdjustedAsExpected() {
    this.contextRunner.withUserConfiguration(TestConfiguration.class)
                      .withPropertyValues(
                              "axon.kafka.default-topic=testTopic",
                              // Overrides 'axon.kafka.bootstrap-servers'
                              "axon.kafka.bootstrap-servers=foo:1234",
                              "axon.kafka.properties.foo=bar",
                              "axon.kafka.default-topic=testTopic",
                              "axon.kafka.properties.baz=qux",
                              "axon.kafka.properties.foo.bar.baz=qux.fiz.buz",
                              "axon.kafka.ssl.key-password=p1",
                              "axon.kafka.ssl.keystore-location=classpath:ksLoc",
                              "axon.kafka.ssl.keystore-password=p2",
                              "axon.kafka.ssl.truststore-location=classpath:tsLoc",
                              "axon.kafka.ssl.truststore-password=p3",
                              "axon.kafka.consumer.auto-commit-interval=123",
                              "axon.kafka.consumer.max-poll-records=42",
                              "axon.kafka.consumer.auto-offset-reset=earliest",
                              "axon.kafka.consumer.client-id=some-client-id",
                              "axon.kafka.consumer.enable-auto-commit=false",
                              "axon.kafka.consumer.fetch-max-wait=456",
                              "axon.kafka.consumer.properties.fiz.buz=fix.fox",
                              "axon.kafka.consumer.fetch-min-size=789",
                              "axon.kafka.consumer.group-id=bar",
                              "axon.kafka.consumer.heartbeat-interval=234",
                              "axon.kafka.consumer.key-deserializer = org.apache.kafka.common.serialization.LongDeserializer",
                              "axon.kafka.consumer.value-deserializer = org.apache.kafka.common.serialization.IntegerDeserializer"
                      ).run(context -> {
        // Required bean assertions
        assertNotNull(context.getBeanNamesForType(KafkaMessageConverter.class));
        assertNotNull(context.getBeanNamesForType(ConsumerFactory.class));
        assertNotNull(context.getBeanNamesForType(Fetcher.class));
        assertNotNull(context.getBeanNamesForType(StreamableKafkaMessageSource.class));

        // Consumer assertions
        DefaultConsumerFactory<?, ?> consumerFactory = context.getBean(DefaultConsumerFactory.class);
        Map<String, Object> configs = consumerFactory.configurationProperties();

        assertEquals(
                Collections.singletonList("foo:1234"),
                configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)
        ); // Assert override
        assertEquals("p1", configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG));
        assertTrue(
                ((String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG)).contains(File.separator + "ksLoc")
        );
        assertEquals("p2", configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG));
        assertTrue(
                ((String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG)).contains(File.separator + "tsLoc")
        );
        assertEquals("p3", configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG));
        assertEquals("some-client-id", configs.get(ConsumerConfig.CLIENT_ID_CONFIG));
        assertEquals(Boolean.FALSE, configs.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG));
        assertEquals(123, configs.get(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG));
        assertEquals("earliest", configs.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG));
        assertEquals(456, configs.get(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG));
        assertEquals(789, configs.get(ConsumerConfig.FETCH_MIN_BYTES_CONFIG));
        assertEquals(234, configs.get(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG));
        assertEquals(LongDeserializer.class, configs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG));
        assertEquals(IntegerDeserializer.class, configs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG));
        assertEquals(42, configs.get(ConsumerConfig.MAX_POLL_RECORDS_CONFIG));
        assertEquals("bar", configs.get("foo"));
        assertEquals("qux", configs.get("baz"));
        assertEquals("qux.fiz.buz", configs.get("foo.bar.baz"));
        assertEquals("fix.fox", configs.get("fiz.buz"));
    });
}
 
Example #29
Source File: KafkaIOTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore // TODO : BEAM-4086 : enable once flakiness is fixed.
public void testUnboundedSourceWithoutBoundedWrapper() {
  // This is same as testUnboundedSource() without the BoundedSource wrapper.
  // Most of the tests in this file set 'maxNumRecords' on the source, which wraps
  // the unbounded source in a bounded source. As a result, the test pipeline run as
  // bounded/batch pipelines under direct-runner.
  // This tests runs without such a wrapper over unbounded wrapper, and depends on watermark
  // progressing to infinity to end the test (see TimestampPolicyWithEndOfSource above).

  final int numElements = 1000;
  final int numPartitions = 10;
  String topic = "testUnboundedSourceWithoutBoundedWrapper";

  KafkaIO.Read<byte[], Long> reader =
      KafkaIO.<byte[], Long>read()
          .withBootstrapServers(topic)
          .withTopic(topic)
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  ImmutableList.of(topic),
                  numPartitions,
                  numElements,
                  OffsetResetStrategy.EARLIEST))
          .withKeyDeserializer(ByteArrayDeserializer.class)
          .withValueDeserializer(LongDeserializer.class)
          .withTimestampPolicyFactory(
              new TimestampPolicyWithEndOfSource<>(numElements / numPartitions - 1));

  p.apply("readFromKafka", reader.withoutMetadata())
      .apply(Values.create())
      .apply(Window.into(FixedWindows.of(Duration.standardDays(100))));

  PipelineResult result = p.run();

  MetricName elementsRead = SourceMetrics.elementsRead().getName();

  MetricQueryResults metrics =
      result
          .metrics()
          .queryMetrics(
              MetricsFilter.builder()
                  .addNameFilter(MetricNameFilter.inNamespace(elementsRead.getNamespace()))
                  .build());

  assertThat(
      metrics.getCounters(),
      hasItem(
          attemptedMetricsResult(
              elementsRead.getNamespace(),
              elementsRead.getName(),
              "readFromKafka",
              (long) numElements)));
}
 
Example #30
Source File: KafkaIOTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testUnboundedSourceCheckpointMarkWithEmptyPartitions() throws Exception {
  // Similar to testUnboundedSourceCheckpointMark(), but verifies that source resumes
  // properly from empty partitions, without missing messages added since checkpoint.

  // Initialize consumer with fewer elements than number of partitions so that some are empty.
  int initialNumElements = 5;
  UnboundedSource<KafkaRecord<Integer, Long>, KafkaCheckpointMark> source =
      mkKafkaReadTransform(initialNumElements, new ValueAsTimestampFn())
          .makeSource()
          .split(1, PipelineOptionsFactory.create())
          .get(0);

  UnboundedReader<KafkaRecord<Integer, Long>> reader = source.createReader(null, null);

  for (int l = 0; l < initialNumElements; ++l) {
    advanceOnce(reader, l > 0);
  }

  // Checkpoint and restart, and confirm that the source continues correctly.
  KafkaCheckpointMark mark =
      CoderUtils.clone(
          source.getCheckpointMarkCoder(), (KafkaCheckpointMark) reader.getCheckpointMark());

  // Create another source with MockConsumer with OffsetResetStrategy.LATEST. This insures that
  // the reader need to explicitly need to seek to first offset for partitions that were empty.

  int numElements = 100; // all the 20 partitions will have elements
  List<String> topics = ImmutableList.of("topic_a", "topic_b");

  source =
      KafkaIO.<Integer, Long>read()
          .withBootstrapServers("none")
          .withTopics(topics)
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(topics, 10, numElements, OffsetResetStrategy.LATEST))
          .withKeyDeserializer(IntegerDeserializer.class)
          .withValueDeserializer(LongDeserializer.class)
          .withMaxNumRecords(numElements)
          .withTimestampFn(new ValueAsTimestampFn())
          .makeSource()
          .split(1, PipelineOptionsFactory.create())
          .get(0);

  reader = source.createReader(null, mark);

  // Verify in any order. As the partitions are unevenly read, the returned records are not in a
  // simple order. Note that testUnboundedSourceCheckpointMark() verifies round-robin oder.

  List<Long> expected = new ArrayList<>();
  List<Long> actual = new ArrayList<>();
  for (long i = initialNumElements; i < numElements; i++) {
    advanceOnce(reader, i > initialNumElements);
    expected.add(i);
    actual.add(reader.getCurrent().getKV().getValue());
  }
  assertThat(actual, IsIterableContainingInAnyOrder.containsInAnyOrder(expected.toArray()));
}