org.apache.kafka.streams.StreamsConfig Java Examples

The following examples show how to use org.apache.kafka.streams.StreamsConfig. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamingWordCount.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 7 votes vote down vote up
public static void main(final String[] args) {
    final Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "StreamingWordCount");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.STATE_DIR_CONFIG, "state-store");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

    logger.info("Start Reading Messages");
    StreamsBuilder streamBuilder = new StreamsBuilder();
    KStream<String, String> KS0 = streamBuilder.stream("streaming-word-count");

    KStream<String, String> KS1 = KS0.flatMapValues(value ->
        Arrays.asList(value.toLowerCase().split(" ")));

    KGroupedStream<String, String> KGS2 = KS1.groupBy((key, value) -> value);

    KTable<String, Long> KTS3 = KGS2.count();

    KTS3.toStream().peek(
        (k, v) -> logger.info("Key = " + k + " Value = " + v.toString())
    );

    KafkaStreams streams = new KafkaStreams(streamBuilder.build(), props);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example #2
Source File: HelloStreams.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 6 votes vote down vote up
/**
 * Application entry point
 *
 * @param args topicName (Name of the Kafka topic to read)
 */

public static void main(String[] args) {

    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "HelloStreams");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

    StreamsBuilder builder = new StreamsBuilder();
    KStream<Integer, String> kStream = builder.stream(topicName);
    kStream.foreach((k, v) -> System.out.println("Key = " + k + " Value = " + v));
    //kStream.peek((k, v) -> System.out.println("Key = " + k + " Value = " + v));
    Topology topology = builder.build();

    KafkaStreams streams = new KafkaStreams(topology, props);

    logger.info("Starting the stream");
    streams.start();

    Runtime.getRuntime().addShutdownHook(new Thread(() -> {
        logger.info("Stopping Stream");
        streams.close();
    }));
}
 
Example #3
Source File: KsqlRestConfigTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetKsqlConfigProperties() {
  Map<String, Object> inputProperties = getBaseProperties();
  inputProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
  inputProperties.put(KsqlConfig.KSQL_SERVICE_ID_CONFIG, "test");

  KsqlRestConfig config = new KsqlRestConfig(inputProperties);

  Map<String, Object> ksqlConfigProperties = config.getKsqlConfigProperties();
  Map<String, Object> expectedKsqlConfigProperties = new HashMap<>();
  expectedKsqlConfigProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
  expectedKsqlConfigProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "ksql_config_test");
  expectedKsqlConfigProperties.put(RestConfig.LISTENERS_CONFIG, "http://localhost:8088");
  expectedKsqlConfigProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
  expectedKsqlConfigProperties.put(KsqlConfig.KSQL_SERVICE_ID_CONFIG, "test");
  assertThat(ksqlConfigProperties, equalTo(expectedKsqlConfigProperties));
}
 
Example #4
Source File: KsqlRestConfigTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void testOriginalsReplicability() {
  final String COMMIT_INTERVAL_MS = "10";

  Map<String, Object> inputProperties = getBaseProperties();
  inputProperties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, COMMIT_INTERVAL_MS);
  KsqlRestConfig config = new KsqlRestConfig(inputProperties);

  final Map<String, Object> originals1 = config.getOriginals();
  final Map<String, Object> originals2 = config.getOriginals();

  assertEquals(originals1, originals2);
  assertNotSame(originals1, originals2);
  assertEquals(COMMIT_INTERVAL_MS, originals1.get(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG));
  assertEquals(COMMIT_INTERVAL_MS, originals2.get(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG));
}
 
Example #5
Source File: GlobalKTableExample.java    From kafka-streams-in-action with Apache License 2.0 6 votes vote down vote up
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "Global_Ktable_example");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "Global_Ktable_example_group_id");
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, "Global_Ktable_example_client_id");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "30000");
    props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "10000");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "1");
    props.put(ConsumerConfig.METADATA_MAX_AGE_CONFIG, "10000");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1);
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, StockTransactionTimestampExtractor.class);
    return props;

}
 
Example #6
Source File: StandaloneExecutor.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
public static StandaloneExecutor create(final Properties properties, final String queriesFile) {
  final KsqlConfig ksqlConfig = new KsqlConfig(properties);
  Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
  if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
    streamsProperties.put(
        StreamsConfig.APPLICATION_ID_CONFIG, KsqlConfig.KSQL_SERVICE_ID_DEFAULT);
  }

  final KsqlEngine ksqlEngine = new KsqlEngine(
      ksqlConfig,
      new KafkaTopicClientImpl(
          AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps())));

  return new StandaloneExecutor(
      ksqlEngine,
      queriesFile);
}
 
Example #7
Source File: CampaignPerformanceApp.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 6 votes vote down vote up
public static void main(String[] args) {
    Properties properties = new Properties();
    properties.put(StreamsConfig.APPLICATION_ID_CONFIG,
        AppConfigs.applicationID);
    properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
        AppConfigs.bootstrapServers);
    properties.put(StreamsConfig.STATE_DIR_CONFIG,
        AppConfigs.stateStoreLocation);

    StreamsBuilder builder = new StreamsBuilder();
    AppTopology.withBuilder(builder);

    builder.stream(
        AppConfigs.outputTopic,
        Consumed.with(AppSerdes.String(), AppSerdes.CampaignPerformance())
    ).foreach((k, v) -> logger.info("outside = " + v));

    Topology topology = builder.build();

    KafkaStreams streams = new KafkaStreams(topology, properties);
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
}
 
Example #8
Source File: PhysicalPlanBuilderTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldAddMetricsInterceptors() throws Exception {
  buildPhysicalPlan(simpleSelectFilter);

  List<TestKafkaStreamsBuilder.Call> calls = testKafkaStreamsBuilder.getCalls();
  Assert.assertEquals(1, calls.size());
  StreamsConfig config = calls.get(0).config;

  Object val = config.originals().get(
      StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG));
  Assert.assertThat(val, instanceOf(List.class));
  List<String> consumerInterceptors = (List<String>) val;
  Assert.assertEquals(1, consumerInterceptors.size());
  Assert.assertEquals(ConsumerCollector.class, Class.forName(consumerInterceptors.get(0)));

  val = config.originals().get(
      StreamsConfig.producerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG));
  Assert.assertThat(val, instanceOf(List.class));
  List<String> producerInterceptors = (List<String>) val;
  Assert.assertEquals(1, producerInterceptors.size());
  Assert.assertEquals(ProducerCollector.class, Class.forName(producerInterceptors.get(0)));
}
 
Example #9
Source File: Ksql.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
private static Properties loadProperties(final Optional<String> propertiesFile) {
  final Properties properties = new Properties();
  propertiesFile.ifPresent(file -> {
    try (final FileInputStream input = new FileInputStream(file)) {
      properties.load(input);
      if (properties.containsKey(KsqlConfig.KSQL_SERVICE_ID_CONFIG)) {
        properties.put(
            StreamsConfig.APPLICATION_ID_CONFIG,
            properties.getProperty(KsqlConfig.KSQL_SERVICE_ID_CONFIG)
        );
      }
    } catch (final IOException e) {
      throw new KsqlException("failed to load properties file: " + file, e);
    }
  });
  return properties;
}
 
Example #10
Source File: StockPerformanceInteractiveQueryApplication.java    From kafka-streams-in-action with Apache License 2.0 6 votes vote down vote up
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-interactive-stock-analysis-client");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-interactive-stock-analysis-group");
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-interactive-stock-analysis-appid");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    props.put(StreamsConfig.topicPrefix("retention.bytes"), 1024 * 1024);
    props.put(StreamsConfig.topicPrefix("retention.ms"), 3600000);
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class);
    props.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, DeserializerErrorHandler.class);
    props.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), Collections.singletonList(bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor.class));
    return props;
}
 
Example #11
Source File: WordCountStream.java    From micronaut-kafka with Apache License 2.0 6 votes vote down vote up
@Singleton
@Named(STREAM_WORD_COUNT)
KStream<String, String> wordCountStream(ConfiguredStreamBuilder builder) { // <3>
    // set default serdes
    Properties props = builder.getConfiguration();
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KStream<String, String> source = builder
            .stream(INPUT);

    KTable<String, Long> groupedByWord = source
            .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
            .groupBy((key, word) -> word, Grouped.with(Serdes.String(), Serdes.String()))
            //Store the result in a store for lookup later
            .count(Materialized.as(WORD_COUNT_STORE)); // <4>

    groupedByWord
            //convert to stream
            .toStream()
            //send to output using specific serdes
            .to(OUTPUT, Produced.with(Serdes.String(), Serdes.Long()));

    return source;
}
 
Example #12
Source File: KStreamVsKTableExample.java    From kafka-streams-in-action with Apache License 2.0 6 votes vote down vote up
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "KStreamVSKTable_app");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "KStreamVSKTable_group");
    props.put(ConsumerConfig.CLIENT_ID_CONFIG, "KStreamVSKTable_client");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "30000");
    props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "15000");
    //props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG,"0");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "1");
    props.put(ConsumerConfig.METADATA_MAX_AGE_CONFIG, "10000");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StreamsSerdes.StockTickerSerde().getClass().getName());
    props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1);
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class);
    return props;

}
 
Example #13
Source File: KafkaStreamsYellingIntegrationTest.java    From kafka-streams-in-action with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() {
    Properties properties = StreamsTestUtils.getStreamsConfig("integrationTest",
            EMBEDDED_KAFKA.bootstrapServers(),
            STRING_SERDE_CLASSNAME,
            STRING_SERDE_CLASSNAME,
            new Properties());
    properties.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
    
    streamsConfig = new StreamsConfig(properties);

    producerConfig = TestUtils.producerConfig(EMBEDDED_KAFKA.bootstrapServers(),
            StringSerializer.class,
            StringSerializer.class);

    consumerConfig = TestUtils.consumerConfig(EMBEDDED_KAFKA.bootstrapServers(),
            StringDeserializer.class,
            StringDeserializer.class);
}
 
Example #14
Source File: KafkaStreamsStarter.java    From football-events with MIT License 6 votes vote down vote up
public KafkaStreams start() {
    Properties props = new Properties();
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapAddress);
    props.put(StreamsConfig.CLIENT_ID_CONFIG, applicationId);
    props.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 0);
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
    props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, "exactly_once");
    props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1); //commit asap

    final KafkaStreams kafkaStreams = new KafkaStreams(topology, props);

    Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close));
    kafkaStreams.setUncaughtExceptionHandler((thread, exception) -> logger.error(thread.toString(), exception));

    // wait for Kafka and football topics creation to avoid endless REBALANCING problem
    waitForKafkaAndTopics();
    startStreams(kafkaStreams);

    logger.debug("Started Kafka Streams, Kafka bootstrap: {}", kafkaBootstrapAddress);
    return kafkaStreams;
}
 
Example #15
Source File: RunningAverage.java    From kafka-tutorials with Apache License 2.0 6 votes vote down vote up
protected Properties buildStreamsProperties(Properties envProps) {
  Properties config = new Properties();
  config.putAll(envProps);

  config.put(APPLICATION_ID_CONFIG, envProps.getProperty("application.id"));
  config.put(BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers"));
  config.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Long().getClass());
  config.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Double().getClass());
  config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url"));

  config.put(REPLICATION_FACTOR_CONFIG, envProps.getProperty("default.topic.replication.factor"));
  config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, envProps.getProperty("offset.reset.policy"));

  config.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);

  return config;
}
 
Example #16
Source File: PhysicalPlanBuilderTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldAddMetricsInterceptorsToExistingList() throws Exception {
  // Initialize override properties with lists for producer/consumer interceptors
  Map<String, Object> overrideProperties = new HashMap<>();
  List<String> consumerInterceptors = new LinkedList<>();
  consumerInterceptors.add(DummyConsumerInterceptor.class.getName());
  overrideProperties.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG),
      consumerInterceptors);
  List<String> producerInterceptors = new LinkedList<>();
  producerInterceptors.add(DummyProducerInterceptor.class.getName());
  overrideProperties.put(StreamsConfig.producerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG),
      producerInterceptors);
  physicalPlanBuilder = buildPhysicalPlanBuilder(overrideProperties);

  buildPhysicalPlan(simpleSelectFilter);

  List<TestKafkaStreamsBuilder.Call> calls = testKafkaStreamsBuilder.getCalls();
  Assert.assertEquals(1, calls.size());
  StreamsConfig config = calls.get(0).config;

  Object val = config.originals().get(
      StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG));
  Assert.assertThat(val, instanceOf(List.class));
  consumerInterceptors = (List<String>) val;
  Assert.assertEquals(2, consumerInterceptors.size());
  Assert.assertEquals(DummyConsumerInterceptor.class.getName(), consumerInterceptors.get(0));
  Assert.assertEquals(ConsumerCollector.class, Class.forName(consumerInterceptors.get(1)));

  val = config.originals().get(
      StreamsConfig.producerPrefix(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG));
  Assert.assertThat(val, instanceOf(List.class));
  producerInterceptors = (List<String>) val;
  Assert.assertEquals(2, producerInterceptors.size());
  Assert.assertEquals(DummyProducerInterceptor.class.getName(), producerInterceptors.get(0));
  Assert.assertEquals(ProducerCollector.class, Class.forName(producerInterceptors.get(1)));
}
 
Example #17
Source File: StreamsTester.java    From football-events with MIT License 5 votes vote down vote up
public StreamsTester(String bootstrapServer, String applicationId) {
    try {
        kafkaTempDir = Files.createTempDirectory("kafka_streams_" + getClass().getSimpleName());
    } catch (IOException e) {
        throw new RuntimeException("Unable to create Kafka temp dir", e);
    }
    streamsProps = new Properties();
    streamsProps.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
    streamsProps.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
    streamsProps.put(StreamsConfig.STATE_DIR_CONFIG, kafkaTempDir.toString());
}
 
Example #18
Source File: ZMartKafkaStreamsAdvancedReqsMetricsApp.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.CLIENT_ID_CONFIG, "zmart-metrics-client-id");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "zmart-metrics-group-id");
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "zmart-metrics-application-id");
    props.put(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, "DEBUG");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.producerPrefix(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG), Collections.singletonList(ZMartProducerInterceptor.class));
    return props;
}
 
Example #19
Source File: NamingChangelogAndRepartitionTopics.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public Properties buildStreamsProperties(Properties envProps) {
  Properties props = new Properties();

  props.put(StreamsConfig.APPLICATION_ID_CONFIG, envProps.getProperty("application.id"));
  props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers"));
  props.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
  props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);

  return props;
}
 
Example #20
Source File: JoinIntTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Before
public void before() throws Exception {
  testHarness = new IntegrationTestHarness();
  testHarness.start();
  Map<String, Object> ksqlStreamConfigProps = new HashMap<>();
  ksqlStreamConfigProps.putAll(testHarness.ksqlConfig.getKsqlStreamConfigProps());
  // turn caching off to improve join consistency
  ksqlStreamConfigProps.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
  ksqlStreamConfigProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
  ksqlContext = KsqlContext.create(new KsqlConfig(ksqlStreamConfigProps),
                                   testHarness.schemaRegistryClient);

  /**
   * Setup test data
   */
  testHarness.createTopic(itemTableTopicJson);
  testHarness.createTopic(itemTableTopicAvro);
  itemDataProvider = new ItemDataProvider();

  testHarness.publishTestData(itemTableTopicJson,
                              itemDataProvider,
                              now -500);
  testHarness.publishTestData(itemTableTopicAvro,
                              itemDataProvider,
                              now -500,
                              DataSource.DataSourceSerDe.AVRO);


  testHarness.createTopic(orderStreamTopicJson);
  testHarness.createTopic(orderStreamTopicAvro);
  orderDataProvider = new OrderDataProvider();
  testHarness.publishTestData(orderStreamTopicJson,
                              orderDataProvider,
                              now);
  testHarness.publishTestData(orderStreamTopicAvro,
                              orderDataProvider,
                              now,
                              DataSource.DataSourceSerDe.AVRO);
  createStreams();
}
 
Example #21
Source File: KafkaStreamsHealth.java    From micronaut-kafka with Apache License 2.0 5 votes vote down vote up
/**
 * Derive the application.id from the stream.  Will use the following order to attempt to get name for stream.
 * <p>
 * application.id -> client.id -> threadName -> kafkaStream.toString()
 *
 * @param kafkaStreams The kafka stream
 * @return Application id
 */
private String getApplicationId(final KafkaStreams kafkaStreams) {
    try {
        ConfiguredStreamBuilder configuredStreamBuilder = kafkaStreamsFactory.getStreams().get(kafkaStreams);
        if (configuredStreamBuilder != null) {
            Properties configuration = configuredStreamBuilder.getConfiguration();
            return (String) configuration.getOrDefault(StreamsConfig.APPLICATION_ID_CONFIG, configuration.getProperty(StreamsConfig.CLIENT_ID_CONFIG));
        } else {
            return getDefaultStreamName(kafkaStreams);
        }
    } catch (Exception e) {
        return getDefaultStreamName(kafkaStreams);
    }
}
 
Example #22
Source File: ZMartKafkaStreamsAddStateApp.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.CLIENT_ID_CONFIG, "AddingStateConsumer");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "AddingStateGroupId");
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "AddingStateAppId");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1);
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class);
    return props;
}
 
Example #23
Source File: KafkaStreamsProcessor.java    From quarkus with Apache License 2.0 5 votes vote down vote up
private void registerDefaultSerdes(BuildProducer<ReflectiveClassBuildItem> reflectiveClasses,
        Properties kafkaStreamsProperties) {
    String defaultKeySerdeClass = kafkaStreamsProperties.getProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG);
    String defaultValueSerdeClass = kafkaStreamsProperties.getProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG);

    if (defaultKeySerdeClass != null) {
        registerClassName(reflectiveClasses, defaultKeySerdeClass);
    }
    if (defaultValueSerdeClass != null) {
        registerClassName(reflectiveClasses, defaultValueSerdeClass);
    }
    if (!allDefaultSerdesAreDefinedInProperties(defaultKeySerdeClass, defaultValueSerdeClass)) {
        registerDefaultSerde(reflectiveClasses);
    }
}
 
Example #24
Source File: KsqlConfig.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
public KsqlConfig(Map<?, ?> props) {
  super(CONFIG_DEF, props);

  ksqlConfigProps = new HashMap<>();
  ksqlStreamConfigProps = new HashMap<>();

  ksqlConfigProps.putAll(this.values());

  ksqlStreamConfigProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, KsqlConstants
      .defaultAutoOffsetRestConfig);
  ksqlStreamConfigProps.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, KsqlConstants
      .defaultCommitIntervalMsConfig);
  ksqlStreamConfigProps.put(
      StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, KsqlConstants
          .defaultCacheMaxBytesBufferingConfig);
  ksqlStreamConfigProps.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, KsqlConstants
      .defaultNumberOfStreamsThreads);

  final Object fail = originals().get(FAIL_ON_DESERIALIZATION_ERROR_CONFIG);
  if (fail == null || !Boolean.parseBoolean(fail.toString())) {
    ksqlStreamConfigProps.put(
        StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG,
        LogMetricAndContinueExceptionHandler.class
    );
  }

  applyStreamsConfig(originals());
}
 
Example #25
Source File: WindowFinalResult.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public static Properties buildProperties(Config config) {
    Properties properties = new Properties();

    properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("bootstrap.servers"));
    properties.put(StreamsConfig.APPLICATION_ID_CONFIG, config.getString("application.id"));
    properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StringSerde.class);
    properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, StringSerde.class);
    properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    properties.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);

    return properties;
}
 
Example #26
Source File: StockCountsStreamsConnectIntegrationApplication.java    From kafka-streams-in-action with Apache License 2.0 5 votes vote down vote up
private static Properties getProperties() {
    Properties props = new Properties();
    props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-connect-stock-analysis-client");
    props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-connect-stock-analysis-group");
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-connect-stock-analysis-appid");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class);
    props.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, DeserializerErrorHandler.class);
    props.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), Collections.singletonList(bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor.class));
    return props;
}
 
Example #27
Source File: StreamsIngest.java    From kafka-tutorials with Apache License 2.0 5 votes vote down vote up
public Properties buildStreamsProperties(Properties envProps) {
  Properties props = new Properties();

  //props.put(StreamsConfig.APPLICATION_ID_CONFIG, envProps.getProperty("application.id"));
  props.put(StreamsConfig.APPLICATION_ID_CONFIG, "foo2");
  props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
  props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers"));
  props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
  props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
  props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url"));
  props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);

  return props;
}
 
Example #28
Source File: LongColumnTimestampExtractionPolicy.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Override
public void applyTo(final KsqlConfig config, final Map<String, Object> newStreamProperties) {
  newStreamProperties.put(
      KsqlConfig.KSQL_TIMESTAMP_COLUMN_INDEX,
      config.get(KsqlConfig.KSQL_TIMESTAMP_COLUMN_INDEX)
  );
  newStreamProperties.put(
      StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG,
      LongTimestampExtractor.class);
}
 
Example #29
Source File: KsqlConfigTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldSetInitialValuesCorrectly() {
  Map<String, Object> initialProps = new HashMap<>();
  initialProps.put(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY, 10);
  initialProps.put(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY, (short) 3);
  initialProps.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 800);
  initialProps.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 5);

  KsqlConfig ksqlConfig = new KsqlConfig(initialProps);

  assertThat(ksqlConfig.get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY), equalTo(10));
  assertThat(ksqlConfig.get(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY), equalTo((short) 3));

}
 
Example #30
Source File: OptimizationStream.java    From micronaut-kafka with Apache License 2.0 5 votes vote down vote up
@Singleton
@Named(STREAM_OPTIMIZATION_OFF)
KStream<String, String> optimizationOff(
        @Named(STREAM_OPTIMIZATION_OFF) ConfiguredStreamBuilder builder) {
    // set default serdes
    Properties props = builder.getConfiguration();
    props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    KTable<String, String> table = builder
            .table(OPTIMIZATION_OFF_INPUT, Materialized.as(OPTIMIZATION_OFF_STORE));

    return table.toStream();
}