org.apache.kafka.common.serialization.ByteArrayDeserializer Java Examples

The following examples show how to use org.apache.kafka.common.serialization.ByteArrayDeserializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void filters_nonIntegerPartitionFilter() throws Exception {
    String topic = "my_topic";
    Vertx vertx = Vertx.vertx();
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, BridgeConfig.fromMap(config),
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, topic + "/group.id/blah");
    // Call handle()
    Map<Symbol, Object> filter = new HashMap<>();
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_PARTITION_FILTER), "not an integer");
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_OFFSET_FILTER), 10L);
    ((Source) mockSender.getRemoteSource()).setFilter(filter);
    endpoint.handle(new AmqpEndpoint(mockSender));

    assertDetach(mockSender,
            AmqpBridge.AMQP_ERROR_WRONG_PARTITION_FILTER,
            "Wrong partition filter");
}
 
Example #2
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void filters_negativeLongOffsetFilter() throws Exception {
    String topic = "my_topic";
    Vertx vertx = Vertx.vertx();
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, BridgeConfig.fromMap(config),
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, topic + "/group.id/blah");
    // Call handle()
    Map<Symbol, Object> filter = new HashMap<>();
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_PARTITION_FILTER), 0);
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_OFFSET_FILTER), -10L);
    ((Source) mockSender.getRemoteSource()).setFilter(filter);
    endpoint.handle(new AmqpEndpoint(mockSender));

    assertDetach(mockSender,
            AmqpBridge.AMQP_ERROR_WRONG_FILTER,
            "Wrong filter");
}
 
Example #3
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void config_ConverterNoDefaultConstructor() throws AmqpErrorConditionException {
    Vertx vertx = Vertx.vertx();
    BridgeConfig config = BridgeConfig.fromMap(AmqpSinkBridgeEndpointMockTest.config);
    config.getAmqpConfig().setMessageConverter(NoNullaryCtor.class.getName());
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, config,
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, "");
    // Call handle()
    endpoint.handle(new AmqpEndpoint(mockSender));

    assertDetach(mockSender,
            AmqpBridge.AMQP_ERROR_CONFIGURATION,
            "configured message converter class could not be instantiated: io.strimzi.kafka.bridge.amqp.AmqpSinkBridgeEndpointMockTest$NoNullaryCtor");
}
 
Example #4
Source File: FlinkKafkaConsumer.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Makes sure that the ByteArrayDeserializer is registered in the Kafka properties.
 *
 * @param props The Kafka properties to register the serializer in.
 */
private static void setDeserializer(Properties props) {
	final String deSerName = ByteArrayDeserializer.class.getName();

	Object keyDeSer = props.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
	Object valDeSer = props.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);

	if (keyDeSer != null && !keyDeSer.equals(deSerName)) {
		LOG.warn("Ignoring configured key DeSerializer ({})", ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
	}
	if (valDeSer != null && !valDeSer.equals(deSerName)) {
		LOG.warn("Ignoring configured value DeSerializer ({})", ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);
	}

	props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deSerName);
	props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deSerName);
}
 
Example #5
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void config_ConverterDefaultConstructorThrows() throws AmqpErrorConditionException {
    Vertx vertx = Vertx.vertx();
    BridgeConfig config = BridgeConfig.fromMap(AmqpSinkBridgeEndpointMockTest.config);
    config.getAmqpConfig().setMessageConverter(CtorThrows.class.getName());
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, config,
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, "");
    // Call handle()
    endpoint.handle(new AmqpEndpoint(mockSender));

    assertDetach(mockSender,
            AmqpBridge.AMQP_ERROR_CONFIGURATION,
            "configured message converter class could not be instantiated: io.strimzi.kafka.bridge.amqp.AmqpSinkBridgeEndpointMockTest$CtorThrows");
}
 
Example #6
Source File: SerdeRegistry.java    From micronaut-kafka with Apache License 2.0 6 votes vote down vote up
/**
 * Picks the most appropriate {@link Deserializer} for the given argument.
 *
 * @param argument The argument
 * @param <T> The generic type
 * @return The {@link Deserializer}
 */
@SuppressWarnings("unchecked")
default <T> Serializer<T> pickSerializer(Argument<T> argument) {
    Class<T> type = argument.getType();

    if (Publishers.isConvertibleToPublisher(type) || Future.class.isAssignableFrom(type)) {
        Optional<Argument<?>> typeArg = argument.getFirstTypeVariable();

        if (typeArg.isPresent()) {
            type = (Class<T>) typeArg.get().getType();
        } else {
            return (Serializer<T>) new ByteArrayDeserializer();
        }
    }

    return getSerializer(type);
}
 
Example #7
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void filters_nonLongOffsetFilter() throws Exception {
    String topic = "my_topic";
    Vertx vertx = Vertx.vertx();
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, BridgeConfig.fromMap(config),
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, topic + "/group.id/blah");
    // Call handle()
    Map<Symbol, Object> filter = new HashMap<>();
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_PARTITION_FILTER), 0);
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_OFFSET_FILTER), "not a long");
    ((Source) mockSender.getRemoteSource()).setFilter(filter);
    endpoint.handle(new AmqpEndpoint(mockSender));

    assertDetach(mockSender,
            // TODO really?
            AmqpBridge.AMQP_ERROR_WRONG_OFFSET_FILTER,
            "Wrong offset filter");
}
 
Example #8
Source File: AmqpBridge.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
/**
 * Handler for attached link by a remote receiver
 *
 * @param connection connection which the sender link belong to
 * @param sender sender link created by the underlying Proton library
 *               by which handling communication with remote receiver
 */
private void processOpenSender(ProtonConnection connection, ProtonSender sender) {

    log.info("Remote receiver attached {}", sender.getName());

    // create and add a new sink to the map
    // TODO: the AMQP client should be able to specify the format during link attachment
    SinkBridgeEndpoint<?, ?> sink = new AmqpSinkBridgeEndpoint<>(this.vertx, this.bridgeConfig,
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());

    sink.closeHandler(s -> {
        this.endpoints.get(connection).getSinks().remove(s);
    });
    sink.open();
    this.endpoints.get(connection).getSinks().add(sink);

    sink.handle(new AmqpEndpoint(sender));
}
 
Example #9
Source File: BeamKafkaTable.java    From beam with Apache License 2.0 6 votes vote down vote up
KafkaIO.Read<byte[], byte[]> createKafkaRead() {
  KafkaIO.Read<byte[], byte[]> kafkaRead;
  if (topics != null) {
    kafkaRead =
        KafkaIO.<byte[], byte[]>read()
            .withBootstrapServers(bootstrapServers)
            .withTopics(topics)
            .withConsumerConfigUpdates(configUpdates)
            .withKeyDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of())
            .withValueDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of());
  } else if (topicPartitions != null) {
    kafkaRead =
        KafkaIO.<byte[], byte[]>read()
            .withBootstrapServers(bootstrapServers)
            .withTopicPartitions(topicPartitions)
            .withConsumerConfigUpdates(configUpdates)
            .withKeyDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of())
            .withValueDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of());
  } else {
    throw new InvalidTableException("One of topics and topicPartitions must be configurated.");
  }
  return kafkaRead;
}
 
Example #10
Source File: FlinkKafkaConsumer09.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Makes sure that the ByteArrayDeserializer is registered in the Kafka properties.
 *
 * @param props The Kafka properties to register the serializer in.
 */
private static void setDeserializer(Properties props) {
	final String deSerName = ByteArrayDeserializer.class.getName();

	Object keyDeSer = props.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
	Object valDeSer = props.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);

	if (keyDeSer != null && !keyDeSer.equals(deSerName)) {
		LOG.warn("Ignoring configured key DeSerializer ({})", ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
	}
	if (valDeSer != null && !valDeSer.equals(deSerName)) {
		LOG.warn("Ignoring configured value DeSerializer ({})", ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);
	}

	props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deSerName);
	props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deSerName);
}
 
Example #11
Source File: KafkaStore.java    From data-highway with Apache License 2.0 6 votes vote down vote up
private KafkaBasedLog<byte[], byte[]> createKafkaLog(
    String bootstrapServers,
    String topic,
    Time time,
    Map<String, Object> additionalProducerProps,
    Map<String, Object> additionalConsumerProps) {
  Map<String, Object> producerProps = new HashMap<>();
  producerProps.putAll(additionalProducerProps);
  producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
  producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  producerProps.put(ProducerConfig.RETRIES_CONFIG, 1);

  Map<String, Object> consumerProps = new HashMap<>();
  producerProps.putAll(additionalConsumerProps);
  consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
  consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
  consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());

  return new KafkaBasedLog<>(topic, producerProps, consumerProps, this::consume, time, null);
}
 
Example #12
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void filters_offsetFilterButNoPartitionFilter() throws Exception {
    String topic = "my_topic";
    Vertx vertx = Vertx.vertx();
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, BridgeConfig.fromMap(config),
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, topic + "/group.id/blah");
    // Call handle()
    Map<Symbol, Object> filter = new HashMap<>();
    //filter.put(Symbol.getSymbol(Bridge.AMQP_PARTITION_FILTER), 0);
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_OFFSET_FILTER), 10L);
    ((Source) mockSender.getRemoteSource()).setFilter(filter);
    endpoint.handle(new AmqpEndpoint(mockSender));

    assertDetach(mockSender,
            AmqpBridge.AMQP_ERROR_NO_PARTITION_FILTER,
            "No partition filter specified");
}
 
Example #13
Source File: FlinkKafkaConsumer09.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Makes sure that the ByteArrayDeserializer is registered in the Kafka properties.
 *
 * @param props The Kafka properties to register the serializer in.
 */
private static void setDeserializer(Properties props) {
	final String deSerName = ByteArrayDeserializer.class.getName();

	Object keyDeSer = props.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
	Object valDeSer = props.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);

	if (keyDeSer != null && !keyDeSer.equals(deSerName)) {
		LOG.warn("Ignoring configured key DeSerializer ({})", ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG);
	}
	if (valDeSer != null && !valDeSer.equals(deSerName)) {
		LOG.warn("Ignoring configured value DeSerializer ({})", ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG);
	}

	props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deSerName);
	props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deSerName);
}
 
Example #14
Source File: KafkaIOTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnboundedSourceWithExplicitPartitions() {
  int numElements = 1000;

  List<String> topics = ImmutableList.of("test");

  KafkaIO.Read<byte[], Long> reader =
      KafkaIO.<byte[], Long>read()
          .withBootstrapServers("none")
          .withTopicPartitions(ImmutableList.of(new TopicPartition("test", 5)))
          .withConsumerFactoryFn(
              new ConsumerFactoryFn(
                  topics, 10, numElements, OffsetResetStrategy.EARLIEST)) // 10 partitions
          .withKeyDeserializer(ByteArrayDeserializer.class)
          .withValueDeserializer(LongDeserializer.class)
          .withMaxNumRecords(numElements / 10);

  PCollection<Long> input = p.apply(reader.withoutMetadata()).apply(Values.create());

  // assert that every element is a multiple of 5.
  PAssert.that(input).satisfies(new AssertMultipleOf(5));

  PAssert.thatSingleton(input.apply(Count.globally())).isEqualTo(numElements / 10L);

  p.run();
}
 
Example #15
Source File: KafkaUnit.java    From SkaETL with Apache License 2.0 6 votes vote down vote up
public <K, V> List<Message<K, V>> readMessages(final String topicName, final int maxPoll, final MessageExtractor<K, V> messageExtractor) {
    final Properties props = new Properties();
    props.put("bootstrap.servers", brokerString);
    props.put("group.id", "test");
    props.put("enable.auto.commit", "true");
    props.put("auto.commit.interval.ms", "1000");
    props.put("session.timeout.ms", "30000");
    props.put("key.deserializer", ByteArrayDeserializer.class.getName());
    props.put("value.deserializer", ByteArrayDeserializer.class.getName());
    props.put("max.poll.records", String.valueOf(maxPoll));
    try (final KafkaConsumer<byte[], byte[]> kafkaConsumer = new KafkaConsumer<>(props)) {
        kafkaConsumer.subscribe(Collections.singletonList(topicName));
        kafkaConsumer.poll(0); // dummy poll
        kafkaConsumer.seekToBeginning(Collections.singletonList(new TopicPartition(topicName, 0)));
        final ConsumerRecords<byte[], byte[]> records = kafkaConsumer.poll(10000);
        final List<Message<K, V>> messages = new ArrayList<>();
        for (ConsumerRecord<byte[], byte[]> record : records) {
            messages.add(messageExtractor.extract(record));
        }
        return messages;
    }
}
 
Example #16
Source File: KafkaProducerInterceptorWrapper.java    From pulsar with Apache License 2.0 6 votes vote down vote up
static Deserializer getDeserializer(Serializer serializer) {
    if (serializer instanceof StringSerializer) {
        return new StringDeserializer();
    } else if (serializer instanceof LongSerializer) {
        return new LongDeserializer();
    } else if (serializer instanceof IntegerSerializer) {
        return new IntegerDeserializer();
    } else if (serializer instanceof DoubleSerializer) {
        return new DoubleDeserializer();
    } else if (serializer instanceof BytesSerializer) {
        return new BytesDeserializer();
    } else if (serializer instanceof ByteBufferSerializer) {
        return new ByteBufferDeserializer();
    } else if (serializer instanceof ByteArraySerializer) {
        return new ByteArrayDeserializer();
    } else {
        throw new IllegalArgumentException(serializer.getClass().getName() + " is not a valid or supported subclass of org.apache.kafka.common.serialization.Serializer.");
    }
}
 
Example #17
Source File: ConsumeKafka_0_10.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) {
    final int maxLeases = context.getMaxConcurrentTasks();
    final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS);
    final byte[] demarcator = context.getProperty(ConsumeKafka_0_10.MESSAGE_DEMARCATOR).isSet()
            ? context.getProperty(ConsumeKafka_0_10.MESSAGE_DEMARCATOR).evaluateAttributeExpressions().getValue().getBytes(StandardCharsets.UTF_8)
            : null;
    final Map<String, Object> props = new HashMap<>();
    KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props);
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    final String topicListing = context.getProperty(ConsumeKafka_0_10.TOPICS).evaluateAttributeExpressions().getValue();
    final List<String> topics = new ArrayList<>();
    for (final String topic : topicListing.split(",", 100)) {
        final String trimmedName = topic.trim();
        if (!trimmedName.isEmpty()) {
            topics.add(trimmedName);
        }
    }
    final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue();
    final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
    final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).getValue();

    return new ConsumerPool(maxLeases, demarcator, props, topics, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log);
}
 
Example #18
Source File: ConsumeKafkaTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateCustomValidatorSettings() throws Exception {
    ConsumeKafka_0_10 consumeKafka = new ConsumeKafka_0_10();
    TestRunner runner = TestRunners.newTestRunner(consumeKafka);
    runner.setProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS, "okeydokey:1234");
    runner.setProperty(ConsumeKafka_0_10.TOPICS, "foo");
    runner.setProperty(ConsumeKafka_0_10.GROUP_ID, "foo");
    runner.setProperty(ConsumeKafka_0_10.AUTO_OFFSET_RESET, ConsumeKafka_0_10.OFFSET_EARLIEST);
    runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    runner.assertValid();
    runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "Foo");
    runner.assertNotValid();
    runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    runner.assertValid();
    runner.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    runner.assertValid();
    runner.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    runner.assertNotValid();
}
 
Example #19
Source File: TestUtils.java    From uReplicator with Apache License 2.0 6 votes vote down vote up
private static Consumer<Byte[], Byte[]> createConsumer(String bootstrapServer) {
  final Properties consumerProps = new Properties();
  consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
  consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG,
      "KafkaExampleConsumer");
  consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
      ByteArrayDeserializer.class.getName());
  consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
      ByteArrayDeserializer.class.getName());
  consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
  // Create the consumer using props.
  final Consumer<Byte[], Byte[]> consumer =
      new KafkaConsumer<>(consumerProps);
  // Subscribe to the topic.
  return consumer;
}
 
Example #20
Source File: ConsumeKafka.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) {
    final int maxLeases = context.getMaxConcurrentTasks();
    final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS);
    final byte[] demarcator = context.getProperty(ConsumeKafka.MESSAGE_DEMARCATOR).isSet()
            ? context.getProperty(ConsumeKafka.MESSAGE_DEMARCATOR).evaluateAttributeExpressions().getValue().getBytes(StandardCharsets.UTF_8)
            : null;

    final Map<String, Object> props = new HashMap<>();
    KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props);
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    final String topicListing = context.getProperty(ConsumeKafka.TOPICS).evaluateAttributeExpressions().getValue();
    final List<String> topics = new ArrayList<>();
    for (final String topic : topicListing.split(",", 100)) {
        final String trimmedName = topic.trim();
        if (!trimmedName.isEmpty()) {
            topics.add(trimmedName);
        }
    }
    final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue();
    final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
    final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).getValue();

    return new ConsumerPool(maxLeases, demarcator, props, topics, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log);
}
 
Example #21
Source File: ConsumeKafkaTest.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void validateCustomValidatorSettings() throws Exception {
    ConsumeKafka consumeKafka = new ConsumeKafka();
    TestRunner runner = TestRunners.newTestRunner(consumeKafka);
    runner.setProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS, "okeydokey:1234");
    runner.setProperty(ConsumeKafka.TOPICS, "foo");
    runner.setProperty(ConsumeKafka.GROUP_ID, "foo");
    runner.setProperty(ConsumeKafka.AUTO_OFFSET_RESET, ConsumeKafka.OFFSET_EARLIEST);
    runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    runner.assertValid();
    runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "Foo");
    runner.assertNotValid();
    runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
    runner.assertValid();
    runner.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    runner.assertValid();
    runner.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
    runner.assertNotValid();
}
 
Example #22
Source File: EventKafkaConsumer.java    From dapeng-soa with Apache License 2.0 6 votes vote down vote up
public void init() {
    logger.info(new StringBuffer("[KafkaConsumer] [init] ")
            .append("kafkaConnect(").append(kafkaConnect)
            .append(") groupId(").append(groupId)
            .append(") topic(").append(topic).append(")").toString());

    KafkaConfigBuilder.ConsumerConfiguration builder = KafkaConfigBuilder.defaultConsumer();
    Properties properties = new Properties();
    properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);


    final Properties props = builder.bootstrapServers(kafkaConnect)
            .group(groupId)
            .withKeyDeserializer(LongDeserializer.class)
            .withValueDeserializer(ByteArrayDeserializer.class)
            .withOffsetCommitted("false")
            .build();

    consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(props);
}
 
Example #23
Source File: KafkaBenchmarkDriver.java    From openmessaging-benchmark with Apache License 2.0 6 votes vote down vote up
@Override
public void initialize(File configurationFile, StatsLogger statsLogger) throws IOException {
    config = mapper.readValue(configurationFile, Config.class);

    Properties commonProperties = new Properties();
    commonProperties.load(new StringReader(config.commonConfig));

    producerProperties = new Properties();
    commonProperties.forEach((key, value) -> producerProperties.put(key, value));
    producerProperties.load(new StringReader(config.producerConfig));
    producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());

    consumerProperties = new Properties();
    commonProperties.forEach((key, value) -> consumerProperties.put(key, value));
    consumerProperties.load(new StringReader(config.consumerConfig));
    consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
    consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());

    topicProperties = new Properties();
    topicProperties.load(new StringReader(config.topicConfig));

    admin = AdminClient.create(commonProperties);

    producer = new KafkaProducer<>(producerProperties);
}
 
Example #24
Source File: NexmarkLauncher.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Return source of events from Kafka. */
private PCollection<Event> sourceEventsFromKafka(Pipeline p, final Instant now) {
  checkArgument((options.getBootstrapServers() != null), "Missing --bootstrapServers");
  NexmarkUtils.console("Reading events from Kafka Topic %s", options.getKafkaTopic());

  KafkaIO.Read<Long, byte[]> read =
      KafkaIO.<Long, byte[]>read()
          .withBootstrapServers(options.getBootstrapServers())
          .withTopic(options.getKafkaTopic())
          .withKeyDeserializer(LongDeserializer.class)
          .withValueDeserializer(ByteArrayDeserializer.class)
          .withStartReadTime(now)
          .withMaxNumRecords(
              options.getNumEvents() != null ? options.getNumEvents() : Long.MAX_VALUE);

  return p.apply(queryName + ".ReadKafkaEvents", read.withoutMetadata())
      .apply(queryName + ".KafkaToEvents", ParDo.of(BYTEARRAY_TO_EVENT));
}
 
Example #25
Source File: AmqpSinkBridgeEndpointMockTest.java    From strimzi-kafka-bridge with Apache License 2.0 6 votes vote down vote up
@Test
public <K, V> void filters_negativeIntegerPartitionFilter() throws Exception {
    String topic = "my_topic";
    Vertx vertx = Vertx.vertx();
    AmqpSinkBridgeEndpoint<K, V> endpoint = (AmqpSinkBridgeEndpoint) new AmqpSinkBridgeEndpoint<>(vertx, BridgeConfig.fromMap(config),
            EmbeddedFormat.JSON, new StringDeserializer(), new ByteArrayDeserializer());
    endpoint.open();
    ProtonSender mockSender = mockSender(ProtonQoS.AT_MOST_ONCE, topic + "/group.id/blah");
    // Call handle()
    Map<Symbol, Object> filter = new HashMap<>();
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_PARTITION_FILTER), -1);
    filter.put(Symbol.getSymbol(AmqpBridge.AMQP_OFFSET_FILTER), 10L);
    ((Source) mockSender.getRemoteSource()).setFilter(filter);
    endpoint.handle(new AmqpEndpoint(mockSender));

    ArgumentCaptor<ErrorCondition> errorCap = ArgumentCaptor.forClass(ErrorCondition.class);
    verify(mockSender).setCondition(errorCap.capture());
    verify(mockSender).close();

    assertDetach(mockSender,
            AmqpBridge.AMQP_ERROR_WRONG_FILTER,
            "Wrong filter");
}
 
Example #26
Source File: DataLoaderConfig.java    From kafka-webview with MIT License 6 votes vote down vote up
/**
 * Creates default message formats.
 */
private void createDefaultMessageFormats() {
    final Map<String, String> defaultFormats = new HashMap<>();
    defaultFormats.put("Short", ShortDeserializer.class.getName());
    defaultFormats.put("ByteArray", ByteArrayDeserializer.class.getName());
    defaultFormats.put("Bytes", BytesDeserializer.class.getName());
    defaultFormats.put("Double", DoubleDeserializer.class.getName());
    defaultFormats.put("Float", FloatDeserializer.class.getName());
    defaultFormats.put("Integer", IntegerDeserializer.class.getName());
    defaultFormats.put("Long", LongDeserializer.class.getName());
    defaultFormats.put("String", StringDeserializer.class.getName());
    defaultFormats.put("Bytes (Hex Encoded)", BytesToHexDeserializer.class.getName());

    // Create if needed.
    for (final Map.Entry<String, String> entry : defaultFormats.entrySet()) {
        MessageFormat messageFormat = messageFormatRepository.findByName(entry.getKey());
        if (messageFormat == null) {
            messageFormat = new MessageFormat();
        }
        messageFormat.setName(entry.getKey());
        messageFormat.setClasspath(entry.getValue());
        messageFormat.setJar("n/a");
        messageFormat.setDefaultFormat(true);
        messageFormatRepository.save(messageFormat);
    }
}
 
Example #27
Source File: Consumer.java    From ja-micro with Apache License 2.0 6 votes vote down vote up
Consumer(Topic topic, String consumerGroupId, Properties props, PartitionProcessorFactory processorFactory) {
    this.topic = topic;
    this.consumerGroupId = consumerGroupId;

    // Mandatory settings, not changeable
    props.put("group.id", consumerGroupId);
    props.put("key.deserializer", StringDeserializer.class.getName());
    props.put("value.deserializer", ByteArrayDeserializer.class.getName());

    kafka = new KafkaConsumer<>(props);
    partitions = new AssignedPartitions(processorFactory);

    long now = System.currentTimeMillis();

    // start it
    consumerLoopExecutor.execute(new ConsumerLoop());
}
 
Example #28
Source File: KafkaProducerInterceptorWrapperTest.java    From pulsar with Apache License 2.0 6 votes vote down vote up
@DataProvider(name = "serializers")
public Object[][] serializers() {
    return new Object[][] {
        {
            new StringSerializer(), StringDeserializer.class
        },
        {
            new LongSerializer(), LongDeserializer.class
        },
        {
            new IntegerSerializer(), IntegerDeserializer.class,
        },
        {
            new DoubleSerializer(), DoubleDeserializer.class,
        },
        {
            new BytesSerializer(), BytesDeserializer.class
        },
        {
            new ByteBufferSerializer(), ByteBufferDeserializer.class
        },
        {
            new ByteArraySerializer(), ByteArrayDeserializer.class
        }
    };
}
 
Example #29
Source File: KafkaSampleStore.java    From cruise-control with BSD 2-Clause "Simplified" License 6 votes vote down vote up
protected KafkaConsumer<byte[], byte[]> createConsumer(Map<String, ?> config) {
  Properties consumerProps = new Properties();
  consumerProps.putAll(config);
  long randomToken = RANDOM.nextLong();
  String bootstrapServers = config.get(MonitorConfig.BOOTSTRAP_SERVERS_CONFIG).toString();
  // Trim the brackets in List's String representation.
  if (bootstrapServers.length() > 2) {
    bootstrapServers = bootstrapServers.substring(1, bootstrapServers.length() - 1);
  }
  consumerProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
  consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "KafkaCruiseControlSampleStore" + randomToken);
  consumerProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, CONSUMER_CLIENT_ID + randomToken);
  consumerProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
  consumerProps.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
  consumerProps.setProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, Integer.toString(Integer.MAX_VALUE));
  consumerProps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
  consumerProps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
  consumerProps.setProperty(ConsumerConfig.RECONNECT_BACKOFF_MS_CONFIG,
                                           config.get(MonitorConfig.RECONNECT_BACKOFF_MS_CONFIG).toString());
  return new KafkaConsumer<>(consumerProps);
}
 
Example #30
Source File: KafkaIntegrationTest.java    From extension-kafka with Apache License 2.0 6 votes vote down vote up
@BeforeEach
void setUp() {
    producerFactory = ProducerConfigUtil.ackProducerFactory(kafkaBroker, ByteArraySerializer.class);
    publisher = KafkaPublisher.<String, byte[]>builder()
            .producerFactory(producerFactory)
            .topic("integration")
            .build();
    KafkaEventPublisher<String, byte[]> sender =
            KafkaEventPublisher.<String, byte[]>builder().kafkaPublisher(publisher).build();
    configurer.eventProcessing(
            eventProcessingConfigurer -> eventProcessingConfigurer.registerEventHandler(c -> sender)
    );

    consumerFactory = new DefaultConsumerFactory<>(minimal(kafkaBroker, ByteArrayDeserializer.class));

    fetcher = AsyncFetcher.<String, byte[], KafkaEventMessage>builder()
            .pollTimeout(300)
            .build();

    eventBus = SimpleEventBus.builder().build();
    configurer.configureEventBus(configuration -> eventBus);

    configurer.start();
}