Java Code Examples for org.apache.kafka.common.serialization.Serdes#serdeFrom()

The following examples show how to use org.apache.kafka.common.serialization.Serdes#serdeFrom() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JsonSerdeRegistry.java    From micronaut-kafka with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public <T> Serde<T> getSerde(Class<T> type) {
    if (ClassUtils.isJavaBasicType(type)) {
        return (Serde<T>) Serdes.serdeFrom(String.class);
    } else {
        JsonSerde jsonSerde = serdes.get(type);
        if (jsonSerde != null) {
            return jsonSerde;
        } else {
            jsonSerde = beanContext.createBean(JsonSerde.class, type);
            serdes.put(type, jsonSerde);
            return jsonSerde;
        }
    }
}
 
Example 2
Source File: WikipediaStreamDemo.java    From hello-kafka-streams with Apache License 2.0 5 votes vote down vote up
private static KafkaStreams createWikipediaStreamsInstance(String bootstrapServers) {
    final Serializer<JsonNode> jsonSerializer = new JsonSerializer();
    final Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
    final Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);

    KStreamBuilder builder = new KStreamBuilder();
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "wikipedia-streams");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);


    KStream<JsonNode, JsonNode> wikipediaRaw = builder.stream(jsonSerde, jsonSerde, "wikipedia-raw");

    KStream<String, WikipediaMessage> wikipediaParsed =
            wikipediaRaw.map(WikipediaMessage::parceIRC)
                    .filter(WikipediaMessage::filterNonNull)
                    .through(Serdes.String(), new JsonPOJOSerde<>(WikipediaMessage.class), "wikipedia-parsed");

    KTable<String, Long> totalEditsByUser = wikipediaParsed
            .filter((key, value) -> value.type == WikipediaMessage.Type.EDIT)
            .countByKey(Serdes.String(), "wikipedia-edits-by-user");

    //some print
    totalEditsByUser.toStream().process(() -> new AbstractProcessor<String, Long>() {
        @Override
        public void process(String user, Long numEdits) {
            System.out.println("USER: " + user + " num.edits: " + numEdits);
        }
    });

    return new KafkaStreams(builder, props);

}
 
Example 3
Source File: CollectionSerde.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor to use when the application wants to specify the type
 * of the Serde used for the inner object.
 *
 * @param serde specify an explicit Serde
 * @param collectionsClass type of the Collection class
 */
public CollectionSerde(Serde<E> serde, Class<?> collectionsClass) {
	this.collectionClass = collectionsClass;
	this.inner =
			Serdes.serdeFrom(
					new CollectionSerializer<>(serde.serializer()),
					new CollectionDeserializer<>(serde.deserializer(), collectionsClass));
}
 
Example 4
Source File: TransformatorDescriptionRepository.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public TransformatorDescriptionRepository(KafkaAdminService kafkaAdminService, KafkaConfiguration kafkaConfiguration) {
    super("transformator-description",
            Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(TransformatorDescription.class)),
            transformatorDescription -> transformatorDescription.getName(),
            kafkaAdminService,
            kafkaConfiguration);
}
 
Example 5
Source File: ValidatorDescriptionRepository.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public ValidatorDescriptionRepository(KafkaAdminService kafkaAdminService, KafkaConfiguration kafkaConfiguration) {
    super("validator-description",
            Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(ValidatorDescription.class)),
            validatorDescription -> validatorDescription.getName(),
            kafkaAdminService,
            kafkaConfiguration);
}
 
Example 6
Source File: CogroupingStreams.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example 7
Source File: DynamicOutputTopic.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) {
    final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
    final KafkaAvroSerializer serializer = new KafkaAvroSerializer();
    final Map<String, String> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
            envProps.getProperty("schema.registry.url"));
    deserializer.configure(config, isKey);
    serializer.configure(config, isKey);
    return (Serde<T>)Serdes.serdeFrom(serializer, deserializer);
}
 
Example 8
Source File: ErrorImporter.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void activate() {
    log.info("Activating error importer");
    StreamsBuilder builder = new StreamsBuilder();
    final Serde<ErrorData> errorDataSerde = Serdes.serdeFrom(new GenericSerializer<>(), new GenericDeserializer<>(ErrorData.class));

    KStream<String, ErrorData> streamToES = builder.stream(kafkaConfiguration.getErrorTopic(), Consumed.with(Serdes.String(), errorDataSerde));

    streamToES.process(() -> elasticsearchProcessor);

    errorStream = new KafkaStreams(builder.build(), KafkaUtils.createKStreamProperties(INPUT_PROCESS_ERROR, kafkaConfiguration.getBootstrapServers()));
    Runtime.getRuntime().addShutdownHook(new Thread(errorStream::close));

    errorStream.start();
}
 
Example 9
Source File: StocksKafkaStreamsDriver.java    From kafka-streams with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {

        StreamsConfig streamingConfig = new StreamsConfig(getProperties());

        JsonSerializer<StockTransactionCollector> stockTransactionsSerializer = new JsonSerializer<>();
        JsonDeserializer<StockTransactionCollector> stockTransactionsDeserializer = new JsonDeserializer<>(StockTransactionCollector.class);
        JsonDeserializer<StockTransaction> stockTxnDeserializer = new JsonDeserializer<>(StockTransaction.class);
        JsonSerializer<StockTransaction> stockTxnJsonSerializer = new JsonSerializer<>();
        Serde<StockTransaction> transactionSerde = Serdes.serdeFrom(stockTxnJsonSerializer,stockTxnDeserializer);
        StringSerializer stringSerializer = new StringSerializer();
        StringDeserializer stringDeserializer = new StringDeserializer();
        Serde<String> stringSerde = Serdes.serdeFrom(stringSerializer,stringDeserializer);
        Serde<StockTransactionCollector> collectorSerde = Serdes.serdeFrom(stockTransactionsSerializer,stockTransactionsDeserializer);
        WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(stringSerializer);
        WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(stringDeserializer);
        Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer,windowedDeserializer);

        KStreamBuilder kStreamBuilder = new KStreamBuilder();


        KStream<String,StockTransaction> transactionKStream =  kStreamBuilder.stream(stringSerde,transactionSerde,"stocks");

        transactionKStream.map((k,v)-> new KeyValue<>(v.getSymbol(),v))
                          .through(stringSerde, transactionSerde,"stocks-out")
                          .groupBy((k,v) -> k, stringSerde, transactionSerde)
                          .aggregate(StockTransactionCollector::new,
                               (k, v, stockTransactionCollector) -> stockTransactionCollector.add(v),
                               TimeWindows.of(10000),
                               collectorSerde, "stock-summaries")
                .to(windowedSerde,collectorSerde,"transaction-summary");


        System.out.println("Starting StockStreams Example");
        KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder,streamingConfig);
        kafkaStreams.start();
        System.out.println("Now started StockStreams Example");

    }
 
Example 10
Source File: SimulateStreamService.java    From SkaETL with Apache License 2.0 5 votes vote down vote up
public void createStreamSystemOut(String topicToConsume) {

        StreamsBuilder builder = new StreamsBuilder();
        final Serde<SimulateData> simulateDataSerde = Serdes.serdeFrom(new SimulateDataSerializer(), new SimulateDataDeserializer());

        builder.stream(topicToConsume, Consumed.with(Serdes.String(), simulateDataSerde)).process(() -> new LoggingProcessor<>());

        KafkaStreams streams = new KafkaStreams(builder.build(), createKStreamProperties(SYSOUT_PROCESS, getBootstrapServer()));
        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
        streams.start();
    }
 
Example 11
Source File: KsqlJsonTopicSerDe.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Override
public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig,
                                            boolean isInternal,
                                            SchemaRegistryClient schemaRegistryClient) {
  Map<String, Object> serdeProps = new HashMap<>();
  serdeProps.put("JsonPOJOClass", GenericRow.class);

  final Serializer<GenericRow> genericRowSerializer = new KsqlJsonSerializer(schema);
  genericRowSerializer.configure(serdeProps, false);

  final Deserializer<GenericRow> genericRowDeserializer = new KsqlJsonDeserializer(schema);
  genericRowDeserializer.configure(serdeProps, false);

  return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer);
}
 
Example 12
Source File: TwitterStreamsAnalyzer.java    From kafka-streams with Apache License 2.0 5 votes vote down vote up
public void run()  {
    StreamsConfig streamsConfig = new StreamsConfig(getProperties());

    JsonSerializer<Tweet> tweetJsonSerializer = new JsonSerializer<>();
    JsonDeserializer<Tweet> tweetJsonDeserializer = new JsonDeserializer<>(Tweet.class);
    Serde<Tweet> tweetSerde = Serdes.serdeFrom(tweetJsonSerializer, tweetJsonDeserializer);

    KStreamBuilder kStreamBuilder = new KStreamBuilder();

    Classifier classifier = new Classifier();
    classifier.train(new File("src/main/resources/kafkaStreamsTwitterTrainingData_clean.csv"));

    KeyValueMapper<String, Tweet, String> languageToKey = (k, v) ->
       StringUtils.isNotBlank(v.getText()) ? classifier.classify(v.getText()):"unknown";

    Predicate<String, Tweet> isEnglish = (k, v) -> k.equals("english");
    Predicate<String, Tweet> isFrench =  (k, v) -> k.equals("french");
    Predicate<String, Tweet> isSpanish = (k, v) -> k.equals("spanish");

    KStream<String, Tweet> tweetKStream = kStreamBuilder.stream(Serdes.String(), tweetSerde, "twitterData");

    KStream<String, Tweet>[] filteredStreams = tweetKStream.selectKey(languageToKey).branch(isEnglish, isFrench, isSpanish);

    filteredStreams[0].to(Serdes.String(), tweetSerde, "english");
    filteredStreams[1].to(Serdes.String(), tweetSerde, "french");
    filteredStreams[2].to(Serdes.String(), tweetSerde, "spanish");

    kafkaStreams = new KafkaStreams(kStreamBuilder, streamsConfig);
    System.out.println("Starting twitter analysis streams");
    kafkaStreams.start();
    System.out.println("Started");

}
 
Example 13
Source File: MetricsSerdes.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
public static Serde<AggregateFunction> aggFunctionSerdes() {
    return Serdes.serdeFrom(new GenericSerializer<AggregateFunction>(), new GenericDeserializer(AggregateFunction.class));
}
 
Example 14
Source File: GenericSerdes.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
public static Serde<JsonNode> jsonNodeSerde() {
    return Serdes.serdeFrom(new JsonNodeSerialializer(), new JsonNodeDeserializer());
}
 
Example 15
Source File: StockPerformanceInteractiveQueryApplication.java    From kafka-streams-in-action with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) {

        if(args.length < 2){
            LOG.error("Need to specify host, port");
            System.exit(1);
        }

        String host = args[0];
        int port = Integer.parseInt(args[1]);
        final HostInfo hostInfo = new HostInfo(host, port);

        Properties properties = getProperties();
        properties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, host+":"+port);

        StreamsConfig streamsConfig = new StreamsConfig(properties);
        Serde<String> stringSerde = Serdes.String();
        Serde<Long> longSerde = Serdes.Long();
        Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde();
        WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(stringSerde.serializer());
        WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(stringSerde.deserializer());
        Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer);
        Serde<CustomerTransactions> customerTransactionsSerde = StreamsSerdes.CustomerTransactionsSerde();

        Aggregator<String, StockTransaction, Integer> sharesAggregator = (k, v, i) -> v.getShares() + i;

        StreamsBuilder builder = new StreamsBuilder();

        // data is already coming in keyed
        KStream<String, StockTransaction> stockTransactionKStream = builder.stream(MockDataProducer.STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, stockTransactionSerde)
                .withOffsetResetPolicy(Topology.AutoOffsetReset.LATEST));


        stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getSector(), v))
                .groupByKey(Serialized.with(stringSerde, stockTransactionSerde))
                .count(Materialized.as("TransactionsBySector"))
                .toStream()
                .peek((k,v) -> LOG.info("Transaction count for {} {}", k, v))
                .to("sector-transaction-counts", Produced.with(stringSerde, longSerde));
        
        stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getCustomerId(), v))
                .groupByKey(Serialized.with(stringSerde, stockTransactionSerde))
                .windowedBy(SessionWindows.with(TimeUnit.MINUTES.toMillis(60)).until(TimeUnit.MINUTES.toMillis(120)))
                .aggregate(CustomerTransactions::new,(k, v, ct) -> ct.update(v),
                        (k, ct, other)-> ct.merge(other),
                        Materialized.<String, CustomerTransactions, SessionStore<Bytes, byte[]>>as("CustomerPurchaseSessions")
                                .withKeySerde(stringSerde).withValueSerde(customerTransactionsSerde))
                .toStream()
                .peek((k,v) -> LOG.info("Session info for {} {}", k, v))
                .to("session-transactions", Produced.with(windowedSerde, customerTransactionsSerde));


        stockTransactionKStream.groupByKey(Serialized.with(stringSerde, stockTransactionSerde))
                .windowedBy(TimeWindows.of(10000))
                .aggregate(() -> 0, sharesAggregator,
                        Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("NumberSharesPerPeriod")
                                .withKeySerde(stringSerde)
                                .withValueSerde(Serdes.Integer()))
                .toStream().peek((k,v)->LOG.info("key is {} value is {}", k, v))
                .to("transaction-count", Produced.with(windowedSerde,Serdes.Integer()));


        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        InteractiveQueryServer queryServer = new InteractiveQueryServer(kafkaStreams, hostInfo);
        StateRestoreHttpReporter restoreReporter = new StateRestoreHttpReporter(queryServer);

        queryServer.init();

        kafkaStreams.setGlobalStateRestoreListener(restoreReporter);

        kafkaStreams.setStateListener(((newState, oldState) -> {
            if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
                LOG.info("Setting the query server to ready");
                queryServer.setReady(true);
            } else if (newState != KafkaStreams.State.RUNNING) {
                LOG.info("State not RUNNING, disabling the query server");
                queryServer.setReady(false);
            }
        }));

        kafkaStreams.setUncaughtExceptionHandler((t, e) -> {
            LOG.error("Thread {} had a fatal error {}", t, e, e);
            shutdown(kafkaStreams, queryServer);
        });


        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            shutdown(kafkaStreams, queryServer);
        }));

        LOG.info("Stock Analysis KStream Interactive Query App Started");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
    }
 
Example 16
Source File: MetricsSerdes.java    From SkaETL with Apache License 2.0 4 votes vote down vote up
public static Serde<Keys> keysSerde() {
    return Serdes.serdeFrom(new GenericSerializer<Keys>(), new GenericDeserializer(Keys.class));
}
 
Example 17
Source File: CryptoSerdeFactory.java    From kafka-encryption with Apache License 2.0 4 votes vote down vote up
private <T> Serde<T> buildFrom(Serde<T> rawSerde, ThreadLocal<byte[]> keyRefHolder) {
    return Serdes.serdeFrom(new CryptoSerializer<>(rawSerde.serializer(), encryptor, keyRefHolder),
            new CryptoDeserializer<>(rawSerde.deserializer(), decryptor));
}
 
Example 18
Source File: ValueAvroSerde.java    From kafka-connect-couchbase with Apache License 2.0 4 votes vote down vote up
public ValueAvroSerde(SchemaRegistryClient client, Map<String, ?> props) {
  inner = Serdes.serdeFrom(new ValueAvroSerializer(client), new ValueAvroDeserializer(client, props));
}
 
Example 19
Source File: JsonSerde.java    From fluent-kafka-streams-tests with MIT License 4 votes vote down vote up
public JsonSerde(final Class<T> clazz) {
    this.inner = Serdes.serdeFrom(new JsonSerializer<>(), new JsonDeserializer<>(clazz));
}
 
Example 20
Source File: KafkaSchemaValueSerde.java    From kareldb with Apache License 2.0 4 votes vote down vote up
public KafkaSchemaValueSerde() {
    inner = Serdes.serdeFrom(new KafkaSchemaValueSerializer(), new KafkaSchemaValueDeserializer());
}