Java Code Examples for org.apache.kafka.common.serialization.Serdes

The following examples show how to use org.apache.kafka.common.serialization.Serdes. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: fluent-kafka-streams-tests   Source File: NameJoinGlobalKTable.java    License: MIT License 7 votes vote down vote up
public Topology getTopologyWithIntermediateTopic() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> inputStream =
            builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long()));

    builder.stream(NAME_INPUT, Consumed.with(Serdes.Long(), Serdes.String()))
            .mapValues(name -> name.toUpperCase())
            .to(INTERMEDIATE_TOPIC);

    final GlobalKTable<Long, String> joinTable = builder.globalTable(INTERMEDIATE_TOPIC);

    inputStream
            .join(joinTable,
                    (id, valueId) -> valueId,
                    (id, name) -> name)
            .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String()));

    return builder.build();
}
 
Example 2
@Produces
@ApplicationScoped
public ReadOnlyKeyValueStore<Long, Str.TupleValue> globalIdKeyValueStore(
    KafkaStreams streams,
    HostInfo storageLocalHost,
    StreamsProperties properties
) {
    return new DistributedReadOnlyKeyValueStore<>(
        streams,
        storageLocalHost,
        properties.getGlobalIdStoreName(),
        Serdes.Long(), ProtoSerde.parsedWith(Str.TupleValue.parser()),
        new DefaultGrpcChannelProvider(),
        true,
        (filter, over, id, tuple) -> true
    );
}
 
Example 3
Source Project: football-events   Source File: PlayerCommandConnector.java    License: MIT License 6 votes vote down vote up
public void build(StreamsBuilder builder) {
    KStream<byte[], JsonNode> playerSourceStream = builder.stream(
            CONNECT_PLAYERS_TOPIC, Consumed.with(Serdes.ByteArray(), new JsonNodeSerde()))
            .filter((id, json) -> creationOrSnapshot(json));

    playerSourceStream.foreach(this::debug);

    KStream<String, PlayerStartedCareer> playerReadyStream = playerSourceStream
            .map((id, json) -> {
                PlayerStartedCareer event = createEvent(json);
                return KeyValue.pair(event.getAggId(), event);
            });

    playerReadyStream.to(PLAYER_STARTED_CAREER_TOPIC, Produced.with(
            Serdes.String(), new JsonPojoSerde<>(PlayerStartedCareer.class)));
}
 
Example 4
static Properties getStreamConfiguration(String bootstrapServers, String applicationId) {
	final Properties streamsConfiguration = new Properties();
	// Give the Streams application a unique name. The name must be unique
	// in the Kafka cluster
	// against which the application is run.
	streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
	// Where to find Kafka broker(s).
	streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);

	// Specify default (de)serializers for record keys and for record
	// values.
	streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
	streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

	// For illustrative purposes we disable record caches
	streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
	return streamsConfiguration;
}
 
Example 5
Source Project: SkaETL   Source File: ProcessStreamService.java    License: Apache License 2.0 6 votes vote down vote up
public void createStreamEmail(String inputTopic, ParameterOutput parameterOutput) {

        String email = parameterOutput.getEmail();
        if (email != null) {
            String template = parameterOutput.getTemplate();
            StreamsBuilder builder = new StreamsBuilder();

            if (template != null)
                builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeEmailProcessor(email, template, emailService));
            else
                builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde())).process(() -> new JsonNodeEmailProcessor(email, emailService));

            KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.EMAIL_PROCESS, getBootstrapServer()));
            Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
            streams.start();
            addStreams(streams);
        } else {
            log.error("destinationEmail is null and it's not normal");
        }
    }
 
Example 6
Source Project: fluent-kafka-streams-tests   Source File: WordCount.java    License: MIT License 6 votes vote down vote up
public Topology getTopology() {
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();

    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> textLines = builder.stream(this.inputTopic);

    final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    final KTable<String, Long> wordCounts = textLines
            .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase())))
            .groupBy((key, word) -> word)
            .count(Materialized.as("count"));

    wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde));
    return builder.build();
}
 
Example 7
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "KTable-aggregations");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "KTable-aggregations-id");
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, "KTable-aggregations-client");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "30000");
    props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "10000");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "1");
    props.put(ConsumerConfig.METADATA_MAX_AGE_CONFIG, "10000");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1);
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class);
    return props;

}
 
Example 8
@Test
public void testGroupByKey() {
  String selectQuery = "SELECT col0, col1 FROM test1 WHERE col0 > 100;";
  PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
  initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream,
      ksqlStream.getKeyField(), new ArrayList<>(),
      SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());

  Expression keyExpression = new DereferenceExpression(
      new QualifiedNameReference(QualifiedName.of("TEST1")), "COL0");
  KsqlTopicSerDe ksqlTopicSerDe = new KsqlJsonTopicSerDe();
  Serde<GenericRow> rowSerde = ksqlTopicSerDe.getGenericRowSerde(
      initialSchemaKStream.getSchema(), null, false, null);
  List<Expression> groupByExpressions = Arrays.asList(keyExpression);
  SchemaKGroupedStream groupedSchemaKStream = initialSchemaKStream.groupBy(
      Serdes.String(), rowSerde, groupByExpressions);

  Assert.assertEquals(groupedSchemaKStream.getKeyField().name(), "COL0");
}
 
Example 9
Source Project: kafka-graphs   Source File: StreamUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCollectionToStream() throws Exception {
    Collection<KeyValue<Integer, Integer>> input = new ArrayList<>();
    for (Integer i : LEFT_INPUT) {
        input.add(new KeyValue<>(i, i));
    }
    StreamsBuilder builder = new StreamsBuilder();
    KStream<Integer, Integer> stream = StreamUtils.streamFromCollection(
        builder, PRODUCER_CONFIG, LEFT_INPUT_TOPIC, 50, (short) 1,
        Serdes.Integer(), Serdes.Integer(),
        input);
    stream.to(OUTPUT_TOPIC);

    startStreams(builder, Serdes.Integer(), Serdes.Integer());

    Thread.sleep(1000);

    List<KeyValue<Integer, Integer>> records = consumeData(
        OUTPUT_TOPIC, IntegerDeserializer.class, IntegerDeserializer.class, 26, 10000L);
    for (KeyValue<Integer, Integer> record : records) {
        assertEquals(record.key, record.value);
    }

    streams.close();
}
 
Example 10
Source Project: kiqr   Source File: MainVerticle.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void start(Future<Void> startFuture) throws Exception {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "kiqr");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
    props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.LongSerde.class);
    props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, "0");

    KStreamBuilder builder = new KStreamBuilder();
    KTable<String, Long> table = builder.table(Serdes.String(), Serdes.Long(), "visits", "visitStore");
    KTable<Windowed<String>, Long> windowedCount = table.toStream().groupByKey().count(TimeWindows.of(60), "visitCount");





    vertx.deployVerticle(RestKiqrServerVerticle.Builder.serverBuilder(builder, props).withPort(2901).build(), res -> {
        if (res.succeeded()) {
            startFuture.complete();
        } else {
            startFuture.fail(res.cause());
        }
    });
}
 
Example 11
Source Project: spring_io_2019   Source File: ScsApplication.java    License: Apache License 2.0 6 votes vote down vote up
@StreamListener
@SendTo(Bindings.RATED_MOVIES)
KStream<Long, RatedMovie> rateMoviesFor(@Input(Bindings.AVG_TABLE) KTable<Long, Double> ratings,
                                        @Input(Bindings.MOVIES) KTable<Long, Movie> movies) {

  ValueJoiner<Movie, Double, RatedMovie> joiner = (movie, rating) ->
      new RatedMovie(movie.getMovieId(), movie.getReleaseYear(), movie.getTitle(), rating);

  movies
      .join(ratings, joiner, Materialized
          .<Long, RatedMovie, KeyValueStore<Bytes, byte[]>>as(Bindings.RATED_MOVIES_STORE)
          .withKeySerde(Serdes.Long())
          .withValueSerde(new JsonSerde<>(RatedMovie.class)));

  return movies.join(ratings, joiner).toStream();
}
 
Example 12
Source Project: kafka-tutorials   Source File: AggregatingCount.java    License: Apache License 2.0 6 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<TicketSale> ticketSaleSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde))
      // Set key to title and value to ticket value
      .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue()))
      // Group by title
      .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
      // Apply COUNT method
      .count()
      // Write to stream specified by outputTopic
      .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));

  return builder.build();
}
 
Example 13
private boolean isSerdeFromStandardDefaults(Serde<?> serde) {
	if (serde != null) {
		if (Number.class.isAssignableFrom(serde.getClass())) {
			return true;
		}
		else if (Serdes.ByteArray().getClass().isAssignableFrom(serde.getClass())) {
			return true;
		}
		else if (Serdes.String().getClass().isAssignableFrom(serde.getClass())) {
			return true;
		}
		else if (Serdes.UUID().getClass().isAssignableFrom(serde.getClass())) {
			return true;
		}
	}
	return false;
}
 
Example 14
@SuppressWarnings("unchecked")
public SchemaKTable aggregate(
    final Initializer initializer,
    final UdafAggregator aggregator,
    final WindowExpression windowExpression,
    final Serde<GenericRow> topicValueSerDe) {
  final KTable aggKtable;
  if (windowExpression != null) {
    final Materialized<String, GenericRow, ?> materialized
        = Materialized.<String, GenericRow, WindowStore<Bytes, byte[]>>with(
            Serdes.String(), topicValueSerDe);

    final KsqlWindowExpression ksqlWindowExpression = windowExpression.getKsqlWindowExpression();
    aggKtable = ksqlWindowExpression.applyAggregate(
        kgroupedStream,
        initializer,
        aggregator,
        materialized
    );
  } else {
    aggKtable = kgroupedStream.aggregate(
        initializer,
        aggregator,
        Materialized.with(Serdes.String(), topicValueSerDe)
    );
  }
  return new SchemaKTable(
      schema,
      aggKtable,
      keyField,
      sourceSchemaKStreams,
      windowExpression != null,
      SchemaKStream.Type.AGGREGATE,
      functionRegistry,
      schemaRegistryClient
  );

}
 
Example 15
Source Project: kafka-tutorials   Source File: RunningAverage.java    License: Apache License 2.0 5 votes vote down vote up
private Topology buildTopology(StreamsBuilder bldr,
                               Properties envProps) {

  final String ratingTopicName = envProps.getProperty("input.ratings.topic.name");
  final String avgRatingsTopicName = envProps.getProperty("output.rating-averages.topic.name");

  KStream<Long, Rating> ratingStream = bldr.stream(ratingTopicName,
                                                   Consumed.with(Serdes.Long(), getRatingSerde(envProps)));

  getRatingAverageTable(ratingStream, avgRatingsTopicName, getCountAndSumSerde(envProps));

  // finish the topology
  return bldr.build();
}
 
Example 16
Source Project: kafka-tutorials   Source File: FilterEvents.java    License: Apache License 2.0 5 votes vote down vote up
public Topology buildTopology(Properties envProps,
                              final SpecificAvroSerde<Publication> publicationSerde) {
  final StreamsBuilder builder = new StreamsBuilder();

  final String inputTopic = envProps.getProperty("input.topic.name");
  final String outputTopic = envProps.getProperty("output.topic.name");

  builder.stream(inputTopic, Consumed.with(Serdes.String(), publicationSerde))
      .filter((name, publication) -> "George R. R. Martin".equals(publication.getName()))
      .to(outputTopic, Produced.with(Serdes.String(), publicationSerde));

  return builder.build();
}
 
Example 17
@StreamListener( "input" )
public void process( KStream<Object, byte[]> input ) {
    log.debug( "process : enter" );

    input
            .map( (key, value) -> {

                try {

                    DomainEvent domainEvent = mapper.readValue( value, DomainEvent.class );
                    log.debug( "process : domainEvent=" + domainEvent );

                    return new KeyValue<>( domainEvent.getBoardUuid().toString(), domainEvent );

                } catch( IOException e ) {
                    log.error( "process : error converting json to DomainEvent", e );
                }

                return null;
            })
            .groupBy( (s, domainEvent) -> s, Serialized.with( Serdes.String(), domainEventSerde ) )
            .aggregate(
                    Board::new,
                    (key, domainEvent, board) -> board.handleEvent( domainEvent ),
                    Materialized.<String, Board, KeyValueStore<Bytes, byte[]>>as( BOARD_EVENTS_SNAPSHOTS )
                        .withKeySerde( Serdes.String() )
                        .withValueSerde( boardSerde )
            );

    log.debug( "process : exit" );
}
 
Example 18
Source Project: simplesource   Source File: JsonCommandSerdes.java    License: Apache License 2.0 5 votes vote down vote up
public JsonCommandSerdes(
        final GenericMapper<K, JsonElement> keyMapper,
        final GenericMapper<C, JsonElement> commandMapper) {

    super(keyMapper, commandMapper);
    serde = Serdes.String();

    final GsonBuilder gsonBuilder = new GsonBuilder();
    gsonBuilder.registerTypeAdapter(CommandRequest.class, new CommandRequestAdapter());
    gsonBuilder.registerTypeAdapter(CommandId.class, new CommandIdAdapter());
    gsonBuilder.registerTypeAdapter(CommandResponse.class, new CommandResponseAdapter());
    gson = gsonBuilder.create();
    parser = new JsonParser();

    ak = GenericSerde.of(serde,
            k -> keyMapper.toGeneric(k).toString(),
            s -> keyMapper.fromGeneric(parser.parse(s)));
    cr = GenericSerde.of(serde,
            gson::toJson,
            s -> gson.fromJson(s, new TypeToken<CommandRequest<K, C>>() {
            }.getType()));
    crk = GenericSerde.of(serde,
            gson::toJson,
            s -> gson.fromJson(s, new TypeToken<CommandId>() {
            }.getType()));
    cr2 = GenericSerde.of(serde,
            gson::toJson,
            s -> gson.fromJson(s, new TypeToken<CommandResponse>() {
            }.getType()));
}
 
Example 19
Source Project: fluent-kafka-streams-tests   Source File: WordCountTest.java    License: MIT License 5 votes vote down vote up
@Test
public void shouldReturnCorrectIteratorExplicitTable() {
    this.testTopology.input().add("bla")
            .add("blub")
            .add("bla")
            .add("foo");
    final List<String> expected = List.of("bla", "blub", "foo");

    assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()).iterator())
            .extracting(ProducerRecord::key)
            .containsAll(expected);
}
 
Example 20
Source Project: SkaETL   Source File: ProcessStreamService.java    License: Apache License 2.0 5 votes vote down vote up
public void createStreamEs(String inputTopic, ParameterOutput parameterOutput) {

        StreamsBuilder builder = new StreamsBuilder();

        KStream<String, JsonNode> streamToES = builder.stream(inputTopic, Consumed.with(Serdes.String(), GenericSerdes.jsonNodeSerde()));
        streamToES.process(() -> applicationContext.getBean(JsonNodeToElasticSearchProcessor.class, parameterOutput.getElasticsearchRetentionLevel(), parameterOutput.getIndexShape()));

        KafkaStreams streams = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(getProcessConsumer().getIdProcess() + ProcessConstants.ES_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
        addStreams(streams);
    }
 
Example 21
Source Project: fluent-kafka-streams-tests   Source File: WordCountTest.java    License: MIT License 5 votes vote down vote up
@Test
void shouldReturnSingleInputAndOutputStream() {
    this.testTopology.input().add("bla");

    this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())
            .expectNextRecord().hasKey("bla").hasValue(1L)
            .expectNoMoreRecord();
}
 
Example 22
public static void main(String[] args) {

        StreamsConfig streamingConfig = new StreamsConfig(getProperties());

        TopologyBuilder builder = new TopologyBuilder();

        JsonSerializer<StockTransactionSummary> stockTxnSummarySerializer = new JsonSerializer<>();
        JsonDeserializer<StockTransactionSummary> stockTxnSummaryDeserializer = new JsonDeserializer<>(StockTransactionSummary.class);
        JsonDeserializer<StockTransaction> stockTxnDeserializer = new JsonDeserializer<>(StockTransaction.class);
        JsonSerializer<StockTransaction> stockTxnJsonSerializer = new JsonSerializer<>();
        StringSerializer stringSerializer = new StringSerializer();
        StringDeserializer stringDeserializer = new StringDeserializer();

        Serde<StockTransactionSummary> stockTransactionSummarySerde = Serdes.serdeFrom(stockTxnSummarySerializer,stockTxnSummaryDeserializer);

        builder.addSource("stocks-source", stringDeserializer, stockTxnDeserializer, "stocks")
                       .addProcessor("summary", StockSummaryProcessor::new, "stocks-source")
                       .addStateStore(Stores.create("stock-transactions").withStringKeys()
                               .withValues(stockTransactionSummarySerde).inMemory().maxEntries(100).build(),"summary")
                       .addSink("sink", "stocks-out", stringSerializer,stockTxnJsonSerializer,"stocks-source")
                       .addSink("sink-2", "transaction-summary", stringSerializer, stockTxnSummarySerializer, "summary");

        System.out.println("Starting StockSummaryStatefulProcessor Example");
        KafkaStreams streaming = new KafkaStreams(builder, streamingConfig);
        streaming.start();
        System.out.println("StockSummaryStatefulProcessor Example now started");

    }
 
Example 23
Source Project: SkaETL   Source File: GrokRepository.java    License: Apache License 2.0 5 votes vote down vote up
public GrokRepository(KafkaAdminService kafkaAdminService, KafkaConfiguration kafkaConfiguration) {
    super("grok-referential",
            Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(GrokData.class)),
            grokRawData -> grokRawData.getKey(),
            kafkaAdminService,
            kafkaConfiguration);
}
 
Example 24
Source Project: fluent-kafka-streams-tests   Source File: WordCountTest.java    License: MIT License 5 votes vote down vote up
@Test
void shouldReturnCorrectIteratorExplicitTable() {
    this.testTopology.input().add("bla")
            .add("blub")
            .add("bla")
            .add("foo");
    final List<String> expected = List.of("bla", "blub", "foo");

    assertThat(this.testTopology.tableOutput().withSerde(Serdes.String(), Serdes.Long()).iterator())
            .extracting(ProducerRecord::key)
            .containsAll(expected);
}
 
Example 25
@Bean	
public KStream<String, Long>  statusCountStreamProcessor(StreamsBuilder streamsBuilder) {
	KStream<Integer, VehicleLocation> stream = streamsBuilder.stream("gpslocation",	//Read from topic
			Consumed.with(Serdes.Integer(), new JsonSerde<>(VehicleLocation.class)));	//using Integer and JSON serde
	return stream.map((k,v)-> {							// transform they key as Online/Offline based on status
			String online =  v.isOnline() == true ? "Online" : "Offline";	
			return new KeyValue<>(online, v);
		})
		.groupByKey(Serialized.with(			//Group by the newly mapped key in previous step
			      Serdes.String(), 
			      new JsonSerde<>(VehicleLocation.class))     
			  )
		.count(Materialized.as("statusCount"))	// materialize this value to state store
		.toStream();
}
 
Example 26
Source Project: fluent-kafka-streams-tests   Source File: WordCountTest.java    License: MIT License 5 votes vote down vote up
@Test
void shouldFailForUnmachtedKey() {
    this.testTopology.input().add("bla")
            .add("blub")
            .add("bla");

    assertThatThrownBy(() ->
            this.testTopology.streamOutput().withSerde(Serdes.String(), Serdes.Long())
                    .expectNextRecord().hasKey("blub"))
            .hasMessage("Record key does not match");
}
 
Example 27
/**
 * Constructor to use when the application wants to specify the type
 * of the Serde used for the inner object.
 *
 * @param serde specify an explicit Serde
 * @param collectionsClass type of the Collection class
 */
public CollectionSerde(Serde<E> serde, Class<?> collectionsClass) {
	this.collectionClass = collectionsClass;
	this.inner =
			Serdes.serdeFrom(
					new CollectionSerializer<>(serde.serializer()),
					new CollectionDeserializer<>(serde.deserializer(), collectionsClass));
}
 
Example 28
Source Project: brave   Source File: ITKafkaStreamsTracing.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_not_filtered_predicate_false() {
  String inputTopic = testName.getMethodName() + "-input";
  String outputTopic = testName.getMethodName() + "-output";

  StreamsBuilder builder = new StreamsBuilder();
  builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
    .transformValues(
      kafkaStreamsTracing.markAsNotFiltered("filterNot-2", (key, value) -> false))
    .filterNot((k, v) -> Objects.isNull(v))
    .to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
  Topology topology = builder.build();

  KafkaStreams streams = buildKafkaStreams(topology);

  send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));

  waitForStreamToRun(streams);

  MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
  assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);

  MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
  assertChildOf(spanProcessor, spanInput);
  assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");

  // the filter transformer returns true so record is not dropped

  MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
  assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
  assertChildOf(spanOutput, spanProcessor);

  streams.close();
  streams.cleanUp();
}
 
Example 29
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());
        Serde<String> stringSerde = Serdes.String();
        Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();


        StreamsBuilder builder = new StreamsBuilder();

        String stocksStateStore = "stock-performance-store";
        double differentialThreshold = 0.02;

        KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100);
        StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde);

        builder.addStateStore(storeBuilder);

        builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde))
                .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore)
                .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance"));

        //Uncomment this line and comment out the line above for writing to a topic
        //.to(stringSerde, stockPerformanceSerde, "stock-performance");


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol);
        System.out.println("Stock Analysis KStream/Process API App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(70000);
        System.out.println("Shutting down the Stock KStream/Process API Analysis App now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example 30
Source Project: tutorials   Source File: KafkaStreamsLiveTest.java    License: MIT License 5 votes vote down vote up
@Test
@Ignore("it needs to have kafka broker running on local")
public void shouldTestKafkaStreams() throws InterruptedException {
    // given
    String inputTopic = "inputTopic";

    Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Use a temporary directory for storing state, which will be automatically removed after the test.
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());

    // when
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> textLines = builder.stream(inputTopic);
    Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);

    KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();

    textLines.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));

    String outputTopic = "outputTopic";
    final Serde<String> stringSerde = Serdes.String();
    final Serde<String> longSerde = Serdes.String();
    textLines.to(outputTopic, Produced.with(stringSerde,longSerde));

    KafkaStreams streams = new KafkaStreams(new Topology(), streamsConfiguration);
    streams.start();

    // then
    Thread.sleep(30000);
    streams.close();
}