Java Code Examples for org.apache.kafka.streams.kstream.KStream#process()

The following examples show how to use org.apache.kafka.streams.kstream.KStream#process() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaStreamsStateStoreIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@StreamListener("input")
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void process(KStream<Object, Product> input) {

	input.process(() -> new Processor<Object, Product>() {

		@Override
		public void init(ProcessorContext processorContext) {
			state = (WindowStore) processorContext.getStateStore("mystate");
		}

		@Override
		public void process(Object s, Product product) {
			processed = true;
		}

		@Override
		public void close() {
			if (state != null) {
				state.close();
			}
		}
	}, "mystate");
}
 
Example 2
Source File: KafkaStreamsStateStoreIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@StreamListener("input3")
@SuppressWarnings({"unchecked" })
public void process(KStream<Object, Product> input) {

	input.process(() -> new Processor<Object, Product>() {

		@Override
		public void init(ProcessorContext processorContext) {
			state = (WindowStore) processorContext.getStateStore("mystate");
		}

		@Override
		public void process(Object s, Product product) {
			processed = true;
		}

		@Override
		public void close() {
			if (state != null) {
				state.close();
			}
		}
	}, "mystate");
}
 
Example 3
Source File: ProcessStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamEs(String inputTopic, ParameterOutput parameterOutput) {

        StreamsBuilder builder = new StreamsBuilder();

        KStream<String, JsonNode> streamToES = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        streamToES.process(() -> applicationContext.getBean(JsonNodeToElasticSearchProcessor.class, parameterOutput.getElasticsearchRetentionLevel(), parameterOutput.getIndexShape()));

        KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.ES_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        addStreams(streams);
    }
 
Example 4
Source File: ErrorImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void activate() {
    log.info("Activating error importer");
    StreamsBuilder builder = new StreamsBuilder();
    final Serde<ErrorData> errorDataSerde = Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(ErrorData.class));

    KStream<String, ErrorData> streamToES = builder.stream(kafkaConfiguration.getErrorTopic(), Consumed.with(Serdes.String(), errorDataSerde));

    streamToES.process(() -> elasticsearchProcessor);

    errorStream = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(INPUT_PROCESS_ERROR, kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(errorStream::close));

    errorStream.start();
}
 
Example 5
Source File: RetryImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void activate() {
    log.info("Activating retry importer");
    StreamsBuilder builder = new StreamsBuilder();
    final Serde<ValidateData> validateDataSerdes = Serdes.serdeFrom(new ValidateDataSerializer(), new ValidateDataDeserializer());

    KStream<String, ValidateData> streamToES = builder.stream(kafkaConfiguration.getRetryTopic(), Consumed.with(Serdes.String(), validateDataSerdes));
    streamToES.process(() -> elasticSearchProcessor);

    retryStream = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(INPUT_PROCESS_RETRY, kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(retryStream::close));
    retryStream.start();
}
 
Example 6
Source File: KafkaStreamsStateStoreIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@StreamListener
@KafkaStreamsStateStore(name = "mystate", type = KafkaStreamsStateStoreProperties.StoreType.WINDOW, lengthMs = 300000, retentionMs = 300000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void process(@Input("input1")KStream<Object, Product> input, @Input("input2")KStream<Object, Product> input2) {

	input.process(() -> new Processor<Object, Product>() {

		@Override
		public void init(ProcessorContext processorContext) {
			state = (WindowStore) processorContext.getStateStore("mystate");
		}

		@Override
		public void process(Object s, Product product) {
			processed = true;
		}

		@Override
		public void close() {
			if (state != null) {
				state.close();
			}
		}
	}, "mystate");

	//simple use of input2, we are not using input2 for anything other than triggering some test behavior.
	input2.foreach((key, value) -> { });
}
 
Example 7
Source File: CommandProcessor.java    From cqrs-manager-for-distributed-reactive-services with Apache License 2.0 5 votes vote down vote up
public void start() {
    KStreamBuilder builder = new KStreamBuilder();

    Serde<UUID> keySerde = new FressianSerde();
    Serde<Map> valSerde = new FressianSerde();

    KStream<UUID, Map> commands = builder.stream(keySerde, valSerde, commandsTopic);
    KStream<UUID, Map> customerEvents = commands
            .filter((id, command) -> command.get(new Keyword("action")).equals(new Keyword("create-customer")))
            .map((id, command) -> {
                logger.debug("Command received");
                Map userEvent = new HashMap(command);
                userEvent.put(new Keyword("action"), new Keyword("customer-created"));
                userEvent.put(new Keyword("parent"), id);
                Map userValue = (Map) userEvent.get(new Keyword("data"));
                userValue.put(new Keyword("id"), UUID.randomUUID());
                return new KeyValue<>(UUID.randomUUID(), userEvent);
    }).through(keySerde, valSerde, eventsTopic);

    KStream<UUID, Map> customers = customerEvents
            .map((id, event) -> {
                Map customer = (Map) event.get(new Keyword("data"));
                UUID customerId = (UUID) customer.get(new Keyword("id"));
                return new KeyValue<UUID, Map>(customerId, customer);
            });

    customers.through(keySerde, valSerde, customersTopic);

    StateStoreSupplier store = Stores.create("Customers")
            .withKeys(keySerde)
            .withValues(valSerde)
            .persistent()
            .build();
    builder.addStateStore(store);

    customers.process(customerStore, "Customers");

    this.kafkaStreams = new KafkaStreams(builder, kafkaStreamsConfig);
    this.kafkaStreams.start();
}
 
Example 8
Source File: StreamsTopologyProvider.java    From apicurio-registry with Apache License 2.0 4 votes vote down vote up
@Override
public Topology get() {
    StreamsBuilder builder = new StreamsBuilder();

    // Simple defaults
    ImmutableMap<String, String> configuration = ImmutableMap.of(
        TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT,
        TopicConfig.MIN_COMPACTION_LAG_MS_CONFIG, "0",
        TopicConfig.SEGMENT_BYTES_CONFIG, String.valueOf(64 * 1024 * 1024)
    );

    // Input topic -- storage topic
    // This is where we handle "http" requests
    // Key is artifactId -- which is also used for KeyValue store key
    KStream<String, Str.StorageValue> storageRequest = builder.stream(
        properties.getStorageTopic(),
        Consumed.with(Serdes.String(), ProtoSerde.parsedWith(Str.StorageValue.parser()))
    );

    // Data structure holds all artifact information
    // Global rules are Data as well, with constant artifactId (GLOBAL_RULES variable)
    String storageStoreName = properties.getStorageStoreName();
    StoreBuilder<KeyValueStore<String /* artifactId */, Str.Data>> storageStoreBuilder =
        Stores
            .keyValueStoreBuilder(
                Stores.inMemoryKeyValueStore(storageStoreName),
                Serdes.String(), ProtoSerde.parsedWith(Str.Data.parser())
            )
            .withCachingEnabled()
            .withLoggingEnabled(configuration);

    builder.addStateStore(storageStoreBuilder);

    // We transform <artifactId, Data> into simple mapping <globalId, <artifactId, version>>
    KStream<Long, Str.TupleValue> globalRequest =
        storageRequest.transform(
            () -> new StorageTransformer(properties, dataDispatcher, factory),
            storageStoreName
        ).through(
            properties.getGlobalIdTopic(),
            Produced.with(Serdes.Long(), ProtoSerde.parsedWith(Str.TupleValue.parser()))
        );

    String globalIdStoreName = properties.getGlobalIdStoreName();
    StoreBuilder<KeyValueStore<Long /* globalId */, Str.TupleValue>> globalIdStoreBuilder =
        Stores
            .keyValueStoreBuilder(
                Stores.inMemoryKeyValueStore(globalIdStoreName),
                Serdes.Long(), ProtoSerde.parsedWith(Str.TupleValue.parser())
            )
            .withCachingEnabled()
            .withLoggingEnabled(configuration);

    builder.addStateStore(globalIdStoreBuilder);

    // Just handle globalId mapping -- put or delete
    globalRequest.process(() -> new GlobalIdProcessor(globalIdStoreName), globalIdStoreName);

    return builder.build(properties.getProperties());
}
 
Example 9
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
private void toElasticsearch(KStream<String, JsonNode> result, ParameterOutput parameterOutput, Class<? extends AbstractElasticsearchProcessor> toElasticsearchProcessorClass) {
    result.process(() -> applicationContext.getBean(toElasticsearchProcessorClass, parameterOutput.getElasticsearchRetentionLevel()));
}
 
Example 10
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
private void toSystemOut(KStream<String, JsonNode> result) {
    result.process(() -> new LoggingProcessor<>());
}
 
Example 11
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
private void toSlack(KStream<String, JsonNode> result, ParameterOutput parameterOutput) {
    result.process(() -> new JsonNodeSlackProcessor(parameterOutput.getWebHookURL(), parameterOutput.getTemplate()));
}
 
Example 12
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
private void toSnmp(KStream<String, JsonNode> result, ParameterOutput parameterOutput) {
    result.process(() -> new JsonNodeSnmpProcessor(applicationContext.getBean(SnmpService.class)));
}
 
Example 13
Source File: PregelComputation.java    From kafka-graphs with Apache License 2.0 4 votes vote down vote up
public void prepare(StreamsBuilder builder, Properties streamsConfig) {
    Properties producerConfig = ClientUtils.producerConfig(
        bootstrapServers, serialized.keySerde().serializer().getClass(), KryoSerializer.class,
        streamsConfig != null ? streamsConfig : new Properties()
    );
    producerConfig.setProperty(ProducerConfig.CLIENT_ID_CONFIG, applicationId + "-producer");
    this.producer = new KafkaProducer<>(producerConfig);

    final StoreBuilder<KeyValueStore<Integer, Map<K, Map<K, List<Message>>>>> workSetStoreBuilder =
        Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(localworkSetStoreName),
            Serdes.Integer(), new KryoSerde<>()
        );
    builder.addStateStore(workSetStoreBuilder);

    final StoreBuilder<KeyValueStore<K, Tuple4<Integer, VV, Integer, VV>>> solutionSetStoreBuilder =
        Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(localSolutionSetStoreName),
            serialized.keySerde(), new KryoSerde<>()
        );
    builder.addStateStore(solutionSetStoreBuilder);

    this.vertices = builder
        .table(
            verticesTopic,
            Materialized.<K, VV, KeyValueStore<Bytes, byte[]>>as(verticesStoreName)
                .withKeySerde(serialized.keySerde()).withValueSerde(serialized.vertexValueSerde())
        );

    this.edgesGroupedBySource = builder
        .table(
            edgesGroupedBySourceTopic,
            Materialized.<K, Map<K, EV>, KeyValueStore<Bytes, byte[]>>as(edgesStoreName)
                .withKeySerde(serialized.keySerde()).withValueSerde(new KryoSerde<>())
        );

    this.solutionSet = builder
        .table(solutionSetTopic, Consumed.<K, Tuple4<Integer, VV, Integer, VV>>with(serialized.keySerde(), new KryoSerde<>()))
        .mapValues(v -> v._4, Materialized.as(solutionSetStore));

    // Initalize solution set
    this.vertices
        .toStream()
        .mapValues(v -> new Tuple4<>(-1, v, 0, v))
        .to(solutionSetTopic, Produced.with(serialized.keySerde(), new KryoSerde<>()));

    // Initialize workset
    this.vertices
        .toStream()
        .peek((k, v) -> {
            try {
                int partition = PregelComputation.vertexToPartition(k, serialized.keySerde().serializer(), numPartitions);
                ZKUtils.addChild(curator, applicationId, new PregelState(State.CREATED, 0, Stage.SEND), childPath(partition));
            } catch (Exception e) {
                throw toRuntimeException(e);
            }

        })
        .mapValues((k, v) -> new Tuple3<>(0, k, initialMessage.map(Collections::singletonList).orElse(Collections.emptyList())))
        .peek((k, v) -> log.trace("workset 0 before topic: (" + k + ", " + v + ")"))
        .<K, Tuple3<Integer, K, List<Message>>>to(workSetTopic, Produced.with(serialized.keySerde(), new KryoSerde<>()));

    this.workSet = builder
        .stream(workSetTopic, Consumed.with(serialized.keySerde(), new KryoSerde<Tuple3<Integer, K, List<Message>>>()))
        .peek((k, v) -> log.trace("workset 1 after topic: (" + k + ", " + v + ")"));

    KStream<K, Tuple2<Integer, Map<K, List<Message>>>> syncedWorkSet = workSet
        .transform(BarrierSync::new, localworkSetStoreName)
        .peek((k, v) -> log.trace("workset 2 after join: (" + k + ", " + v + ")"));

    KStream<K, Tuple3<Integer, Tuple4<Integer, VV, Integer, VV>, Map<K, List<Message>>>> superstepComputation =
        syncedWorkSet
            .transformValues(VertexComputeUdf::new, localSolutionSetStoreName, vertices.queryableStoreName(),
                edgesGroupedBySource.queryableStoreName());

    // Compute the solution set delta
    KStream<K, Tuple4<Integer, VV, Integer, VV>> solutionSetDelta = superstepComputation
        .flatMapValues(v -> v._2 != null ? Collections.singletonList(v._2) : Collections.emptyList())
        .peek((k, v) -> log.trace("solution set: (" + k + ", " + v + ")"));

    solutionSetDelta
        .to(solutionSetTopic, Produced.with(serialized.keySerde(), new KryoSerde<>()));

    // Compute the inbox of each vertex for the next step (new workset)
    KStream<K, Tuple2<Integer, Map<K, List<Message>>>> newworkSet = superstepComputation
        .mapValues(v -> new Tuple2<>(v._1, v._3))
        .peek((k, v) -> log.trace("workset new: (" + k + ", " + v + ")"));

    newworkSet.process(() -> new SendMessages(producer));
}
 
Example 14
Source File: ReferentialImporter.java    From SkaETL with Apache License 2.0 3 votes vote down vote up
private void toEmail(KStream<String, JsonNode> result, ParameterOutput parameterOutput) {
    String email = parameterOutput.getEmail();
    String template = parameterOutput.getTemplate();


    result.process(() -> new JsonNodeEmailProcessor(email, template, applicationContext.getBean(EmailService.class)));
}