org.apache.kafka.streams.processor.TimestampExtractor Java Examples

The following examples show how to use org.apache.kafka.streams.processor.TimestampExtractor. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractKafkaStreamsBinderProcessor.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private <K, V> Consumed<K, V> getConsumed(KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties,
		Serde<K> keySerde, Serde<V> valueSerde, Topology.AutoOffsetReset autoOffsetReset) {
	TimestampExtractor timestampExtractor = null;
	if (!StringUtils.isEmpty(kafkaStreamsConsumerProperties.getTimestampExtractorBeanName())) {
		timestampExtractor = applicationContext.getBean(kafkaStreamsConsumerProperties.getTimestampExtractorBeanName(),
				TimestampExtractor.class);
	}
	final Consumed<K, V> consumed = Consumed.with(keySerde, valueSerde)
			.withOffsetResetPolicy(autoOffsetReset);
	if (timestampExtractor != null) {
		consumed.withTimestampExtractor(timestampExtractor);
	}
	return consumed;
}
 
Example #2
Source File: AlarmMessageLogger.java    From phoebus with Eclipse Public License 1.0 4 votes vote down vote up
@Override
public void run() {
    logger.info("Starting the alarm messages stream consumer for " + topic);

    Properties props = new Properties();
    props.putAll(PropertiesHelper.getProperties());
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-"+topic+"-alarm-messages");

    if (!props.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) {
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    }
    
    
    final String indexDateSpanUnits = props.getProperty("date_span_units");
    final Integer indexDateSpanValue = Integer.parseInt(props.getProperty("date_span_value"));

    try {
        stateIndexNameHelper = new IndexNameHelper(topic + STATE_INDEX_FORMAT, indexDateSpanUnits, indexDateSpanValue);
        configIndexNameHelper = new IndexNameHelper(topic + CONFIG_INDEX_FORMAT , indexDateSpanUnits, indexDateSpanValue);
    } catch (Exception ex) {
        logger.log(Level.SEVERE, "Time based index creation failed.", ex);
    }
    
    // Attach a message time stamp.
    StreamsBuilder builder = new StreamsBuilder();

    KStream<String, AlarmMessage> alarms = builder.stream(topic,
            Consumed.with(Serdes.String(), alarmMessageSerde).withTimestampExtractor(new TimestampExtractor() {

                @Override
                public long extract(ConsumerRecord<Object, Object> record, long previousTimestamp) {
                    return record.timestamp();
                }
            }));

    alarms = alarms.filter((k, v) -> {
        return v != null;
    });

    alarms = alarms.map((key, value) -> {
        logger.config("Processing alarm message with key : " + key != null ? key
                : "null" + " " + value != null ? value.toString() : "null");
        value.setKey(key);
        return new KeyValue<String, AlarmMessage>(key, value);
    });

    @SuppressWarnings("unchecked")
    KStream<String, AlarmMessage>[] alarmBranches = alarms.branch((k,v) -> k.startsWith("state"),
                                                                  (k,v) -> k.startsWith("config"),
                                                                  (k,v) -> false
                                                                 );

    processAlarmStateStream(alarmBranches[0], props);
    processAlarmConfigurationStream(alarmBranches[1], props);

    final KafkaStreams streams = new KafkaStreams(builder.build(), props);
    final CountDownLatch latch = new CountDownLatch(1);

    // attach shutdown handler to catch control-c
    Runtime.getRuntime().addShutdownHook(new Thread("streams-"+topic+"-alarm-messages-shutdown-hook") {
        @Override
        public void run() {
            streams.close(10, TimeUnit.SECONDS);
            System.out.println("\nShutting streams Done.");
            latch.countDown();
        }
    });

    try {
        streams.start();
        latch.await();
    } catch (Throwable e) {
        System.exit(1);
    }
    System.exit(0);
}
 
Example #3
Source File: StreamToGlobalKTableFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Test
public void testTimeExtractor() throws Exception {
	SpringApplication app = new SpringApplication(OrderEnricherApplication.class);
	app.setWebApplicationType(WebApplicationType.NONE);

	try (ConfigurableApplicationContext context = app.run(
			"--server.port=0",
			"--spring.jmx.enabled=false",
			"--spring.cloud.stream.function.definition=forTimeExtractorTest",
			"--spring.cloud.stream.bindings.forTimeExtractorTest-in-0.destination=orders",
			"--spring.cloud.stream.bindings.forTimeExtractorTest-in-1.destination=customers",
			"--spring.cloud.stream.bindings.forTimeExtractorTest-in-2.destination=products",
			"--spring.cloud.stream.bindings.forTimeExtractorTest-out-0.destination=enriched-order",
			"--spring.cloud.stream.kafka.streams.bindings.forTimeExtractorTest-in-0.consumer.timestampExtractorBeanName" +
					"=timestampExtractor",
			"--spring.cloud.stream.kafka.streams.bindings.forTimeExtractorTest-in-1.consumer.timestampExtractorBeanName" +
					"=timestampExtractor",
			"--spring.cloud.stream.kafka.streams.bindings.forTimeExtractorTest-in-2.consumer.timestampExtractorBeanName" +
					"=timestampExtractor",
			"--spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde" +
					"=org.apache.kafka.common.serialization.Serdes$StringSerde",
			"--spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde" +
					"=org.apache.kafka.common.serialization.Serdes$StringSerde",
			"--spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms=10000",
			"--spring.cloud.stream.kafka.streams.bindings.order.consumer.applicationId=" +
					"testTimeExtractor-abc",
			"--spring.cloud.stream.kafka.streams.binder.brokers=" + embeddedKafka.getBrokersAsString())) {

		final KafkaStreamsExtendedBindingProperties kafkaStreamsExtendedBindingProperties =
				context.getBean(KafkaStreamsExtendedBindingProperties.class);

		final Map<String, KafkaStreamsBindingProperties> bindings = kafkaStreamsExtendedBindingProperties.getBindings();

		final KafkaStreamsBindingProperties kafkaStreamsBindingProperties0 = bindings.get("forTimeExtractorTest-in-0");
		final String timestampExtractorBeanName0 = kafkaStreamsBindingProperties0.getConsumer().getTimestampExtractorBeanName();
		final TimestampExtractor timestampExtractor0 = context.getBean(timestampExtractorBeanName0, TimestampExtractor.class);
		assertThat(timestampExtractor0).isNotNull();

		final KafkaStreamsBindingProperties kafkaStreamsBindingProperties1 = bindings.get("forTimeExtractorTest-in-1");
		final String timestampExtractorBeanName1 = kafkaStreamsBindingProperties1.getConsumer().getTimestampExtractorBeanName();
		final TimestampExtractor timestampExtractor1 = context.getBean(timestampExtractorBeanName1, TimestampExtractor.class);
		assertThat(timestampExtractor1).isNotNull();

		final KafkaStreamsBindingProperties kafkaStreamsBindingProperties2 = bindings.get("forTimeExtractorTest-in-2");
		final String timestampExtractorBeanName2 = kafkaStreamsBindingProperties2.getConsumer().getTimestampExtractorBeanName();
		final TimestampExtractor timestampExtractor2 = context.getBean(timestampExtractorBeanName2, TimestampExtractor.class);
		assertThat(timestampExtractor2).isNotNull();
	}
}
 
Example #4
Source File: StreamToGlobalKTableFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Bean
public TimestampExtractor timestampExtractor() {
	return new WallclockTimestampExtractor();
}