org.apache.kafka.streams.kstream.GlobalKTable Java Examples

The following examples show how to use org.apache.kafka.streams.kstream.GlobalKTable. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractKafkaRepository.java    From SkaETL with Apache License 2.0 8 votes vote down vote up
public AbstractKafkaRepository(String name, Serde<V> valueSerde, Function<V,String> keyFunction, KafkaAdminService kafkaAdminService, KafkaConfiguration kafkaConfiguration) {
    this.repositoryName = name + "-db";
    this.keyFunction = keyFunction;
    this.producer = KafkaUtils.kafkaProducer(kafkaConfiguration.getBootstrapServers(), StringSerializer.class, JsonNodeSerialializer.class);
    kafkaAdminService.createTopic(kafkaAdminService.buildTopicInfo(repositoryName,TopicConfig.CLEANUP_POLICY_COMPACT));

    Properties props = KafkaUtils.createKStreamProperties(repositoryName + "-stream"+ UUID.randomUUID().toString(), kafkaConfiguration.getBootstrapServers());
    StreamsBuilder builder = new StreamsBuilder();

    final GlobalKTable<String, V> globalKTable = builder.globalTable(repositoryName, materialize(valueSerde));

    final KafkaStreams streams = new KafkaStreams(builder.build(), props);
    streams.start();
    producer.flush();
    keyValueStore = streams.store(getStoreName(), QueryableStoreTypes.keyValueStore());

    Runtime.getRuntime().addShutdownHook(new Thread(streams::close));

}
 
Example #2
Source File: EmailService.java    From qcon-microservices with Apache License 2.0 8 votes vote down vote up
private KafkaStreams processStreams(final String bootstrapServers, final String stateDir) throws IOException {

        final StreamsBuilder builder = new StreamsBuilder();

        //Create the streams/tables for the join
        final KStream<String, Order> orders = builder.stream(ORDERS.name(),
                Consumed.with(ORDERS.keySerde(), ORDERS.valueSerde()));
        final GlobalKTable<String, Customer> customers = builder.globalTable(CUSTOMERS.name(),
                Consumed.with(CUSTOMERS.keySerde(), CUSTOMERS.valueSerde()));
//
        // Join a stream and a table then send an email for each
        // GlobalKTable to stream join takes three arguments: Table, mapping of stream (key,value) to table key for join
        // And the join function - takes values from stream and table and returns result
        orders.join(customers,
                        (orderID, order) -> order.getCustomerId(),
                (order, customer) -> new EmailTuple(order,customer))
                //Now for each tuple send an email.
                .peek((key, emailTuple)
                        -> emailer.sendEmail(emailTuple)
                );

        return new KafkaStreams(builder.build(), configStreams(bootstrapServers, stateDir, SERVICE_APP_ID));
    }
 
Example #3
Source File: GlobalKTableBoundElementFactory.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 8 votes vote down vote up
@Override
public GlobalKTable createInput(String name) {
	BindingProperties bindingProperties = this.bindingServiceProperties.getBindingProperties(name);
	ConsumerProperties consumerProperties = bindingProperties.getConsumer();
	if (consumerProperties == null) {
		consumerProperties = this.bindingServiceProperties.getConsumerProperties(name);
		consumerProperties.setUseNativeDecoding(true);
	}
	else {
		if (!encodingDecodingBindAdviceHandler.isDecodingSettingProvided()) {
			consumerProperties.setUseNativeDecoding(true);
		}
	}
	// Always set multiplex to true in the kafka streams binder
	consumerProperties.setMultiplex(true);

	// @checkstyle:off
	GlobalKTableBoundElementFactory.GlobalKTableWrapperHandler wrapper = new GlobalKTableBoundElementFactory.GlobalKTableWrapperHandler();
	// @checkstyle:on
	ProxyFactory proxyFactory = new ProxyFactory(
			GlobalKTableBoundElementFactory.GlobalKTableWrapper.class,
			GlobalKTable.class);
	proxyFactory.addAdvice(wrapper);

	return (GlobalKTable) proxyFactory.getProxy();
}
 
Example #4
Source File: NameJoinGlobalKTable.java    From fluent-kafka-streams-tests with MIT License 7 votes vote down vote up
public Topology getTopologyWithIntermediateTopic() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> inputStream =
            builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long()));

    builder.stream(NAME_INPUT, Consumed.with(Serdes.Long(), Serdes.String()))
            .mapValues(name -> name.toUpperCase())
            .to(INTERMEDIATE_TOPIC);

    final GlobalKTable<Long, String> joinTable = builder.globalTable(INTERMEDIATE_TOPIC);

    inputStream
            .join(joinTable,
                    (id, valueId) -> valueId,
                    (id, name) -> name)
            .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String()));

    return builder.build();
}
 
Example #5
Source File: StreamToGlobalKTableFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 7 votes vote down vote up
@Bean
public Function<KStream<Long, Order>,
		Function<GlobalKTable<Long, Customer>,
				Function<GlobalKTable<Long, Product>, KStream<Long, EnrichedOrder>>>> process() {

	return orderStream -> (
			customers -> (
					products -> (
							orderStream.join(customers,
									(orderId, order) -> order.getCustomerId(),
									(order, customer) -> new CustomerOrder(customer, order))
									.join(products,
											(orderId, customerOrder) -> customerOrder
													.productId(),
											(customerOrder, product) -> {
												EnrichedOrder enrichedOrder = new EnrichedOrder();
												enrichedOrder.setProduct(product);
												enrichedOrder.setCustomer(customerOrder.customer);
												enrichedOrder.setOrder(customerOrder.order);
												return enrichedOrder;
											})
					)
			)
	);
}
 
Example #6
Source File: KafkaStreamsStreamListenerSetupMethodOrchestrator.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 7 votes vote down vote up
@SuppressWarnings("unchecked")
private boolean isDeclarativeInput(String targetBeanName,
								MethodParameter methodParameter) {
	if (!methodParameter.getParameterType().isAssignableFrom(Object.class)
			&& this.applicationContext.containsBean(targetBeanName)) {
		Class<?> targetBeanClass = this.applicationContext.getType(targetBeanName);
		if (targetBeanClass != null) {
			boolean supports = KafkaStreamsBinderUtils.supportsKStream(methodParameter, targetBeanClass);
			if (!supports) {
				supports = KTable.class.isAssignableFrom(targetBeanClass)
						&& KTable.class.isAssignableFrom(methodParameter.getParameterType());
				if (!supports) {
					supports = GlobalKTable.class.isAssignableFrom(targetBeanClass)
							&& GlobalKTable.class.isAssignableFrom(methodParameter.getParameterType());
				}
			}
			return supports;
		}
	}
	return false;
}
 
Example #7
Source File: GlobalKTableBoundElementFactory.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 7 votes vote down vote up
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
	if (methodInvocation.getMethod().getDeclaringClass()
			.equals(GlobalKTable.class)) {
		Assert.notNull(this.delegate,
				"Trying to prepareConsumerBinding " + methodInvocation.getMethod()
						+ "  but no delegate has been set.");
		return methodInvocation.getMethod().invoke(this.delegate,
				methodInvocation.getArguments());
	}
	else if (methodInvocation.getMethod().getDeclaringClass()
			.equals(GlobalKTableBoundElementFactory.GlobalKTableWrapper.class)) {
		return methodInvocation.getMethod().invoke(this,
				methodInvocation.getArguments());
	}
	else {
		throw new IllegalStateException(
				"Only GlobalKTable method invocations are permitted");
	}
}
 
Example #8
Source File: NameJoinGlobalKTable.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> inputStream =
            builder.stream(INPUT_TOPIC, Consumed.with(Serdes.Long(), Serdes.Long()));

    final GlobalKTable<Long, String> joinTable = builder.globalTable(NAME_INPUT);

    inputStream
            .join(joinTable,
                    (id, valueId) -> valueId,
                    (id, name) -> name)
            .to(OUTPUT_TOPIC, Produced.with(Serdes.Long(), Serdes.String()));

    return builder.build();
}
 
Example #9
Source File: StreamToGlobalKTableJoinIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@StreamListener
@SendTo("output")
public KStream<Long, EnrichedOrder> process(
		@Input("input") KStream<Long, Order> ordersStream,
		@Input("input-x") GlobalKTable<Long, Customer> customers,
		@Input("input-y") GlobalKTable<Long, Product> products) {

	KStream<Long, CustomerOrder> customerOrdersStream = ordersStream.join(
			customers, (orderId, order) -> order.getCustomerId(),
			(order, customer) -> new CustomerOrder(customer, order));

	return customerOrdersStream.join(products,
			(orderId, customerOrder) -> customerOrder.productId(),
			(customerOrder, product) -> {
				EnrichedOrder enrichedOrder = new EnrichedOrder();
				enrichedOrder.setProduct(product);
				enrichedOrder.setCustomer(customerOrder.customer);
				enrichedOrder.setOrder(customerOrder.order);
				return enrichedOrder;
			});
}
 
Example #10
Source File: AbstractKafkaStreamsBinderProcessor.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private GlobalKTable<?, ?> getGlobalKTable(KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties,
		StreamsBuilder streamsBuilder,
		Serde<?> keySerde, Serde<?> valueSerde, String materializedAs,
		String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) {
	final Consumed<?, ?> consumed = getConsumed(kafkaStreamsConsumerProperties, keySerde, valueSerde, autoOffsetReset);
	return materializedAs != null
			? materializedAsGlobalKTable(streamsBuilder, bindingDestination,
			materializedAs, keySerde, valueSerde, autoOffsetReset, kafkaStreamsConsumerProperties)
			: streamsBuilder.globalTable(bindingDestination,
			consumed);
}
 
Example #11
Source File: KafkaStreamsGlobalKTableJoin.java    From spring-cloud-stream-samples with Apache License 2.0 5 votes vote down vote up
@Bean
public BiFunction<KStream<String, Long>, GlobalKTable<String, String>, KStream<String, Long>> process() {

	return (userClicksStream, userRegionsTable) -> userClicksStream
			.leftJoin(userRegionsTable,
					(name,value) -> name,
					(clicks, region) -> new RegionWithClicks(region == null ? "UNKNOWN" : region, clicks)
					)
			.map((user, regionWithClicks) -> new KeyValue<>(regionWithClicks.getRegion(), regionWithClicks.getClicks()))
			.groupByKey(Grouped.with(Serdes.String(), Serdes.Long()))
			.reduce((firstClicks, secondClicks) -> firstClicks + secondClicks)
			.toStream();
}
 
Example #12
Source File: StreamToGlobalKTableFunctionTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Bean
public Function<KStream<Long, Order>,
		Function<KTable<Long, Customer>,
				Function<GlobalKTable<Long, Product>, KStream<Long, Order>>>> forTimeExtractorTest() {
	return orderStream ->
			customers ->
				products -> orderStream;
}
 
Example #13
Source File: TopologyProducer.java    From quarkus-quickstarts with Apache License 2.0 5 votes vote down vote up
@Produces
public Topology buildTopology() {
    StreamsBuilder builder = new StreamsBuilder();

    JsonbSerde<WeatherStation> weatherStationSerde = new JsonbSerde<>(WeatherStation.class);
    JsonbSerde<Aggregation> aggregationSerde = new JsonbSerde<>(Aggregation.class);

    KeyValueBytesStoreSupplier storeSupplier = Stores.persistentKeyValueStore(WEATHER_STATIONS_STORE);

    GlobalKTable<Integer, WeatherStation> stations = builder.globalTable(
            WEATHER_STATIONS_TOPIC,
            Consumed.with(Serdes.Integer(), weatherStationSerde));

    builder.stream(
            TEMPERATURE_VALUES_TOPIC,
            Consumed.with(Serdes.Integer(), Serdes.String()))
            .join(
                    stations,
                    (stationId, timestampAndValue) -> stationId,
                    (timestampAndValue, station) -> {
                        String[] parts = timestampAndValue.split(";");
                        return new TemperatureMeasurement(station.id, station.name, Instant.parse(parts[0]),
                                Double.valueOf(parts[1]));
                    })
            .groupByKey()
            .aggregate(
                    Aggregation::new,
                    (stationId, value, aggregation) -> aggregation.updateFrom(value),
                    Materialized.<Integer, Aggregation> as(storeSupplier)
                            .withKeySerde(Serdes.Integer())
                            .withValueSerde(aggregationSerde))
            .toStream()
            .to(
                    TEMPERATURES_AGGREGATED_TOPIC,
                    Produced.with(Serdes.Integer(), aggregationSerde));

    return builder.build();
}
 
Example #14
Source File: KafkaStreamsStreamListenerSetupMethodOrchestrator.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private boolean methodParameterSupports(Method method) {
	boolean supports = false;
	for (int i = 0; i < method.getParameterCount(); i++) {
		MethodParameter methodParameter = MethodParameter.forExecutable(method, i);
		Class<?> parameterType = methodParameter.getParameterType();
		if (parameterType.equals(KStream.class) || parameterType.equals(KTable.class)
				|| parameterType.equals(GlobalKTable.class)) {
			supports = true;
		}
	}
	return supports;
}
 
Example #15
Source File: FunctionDetectorCondition.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private static List<String> pruneFunctionBeansForKafkaStreams(List<String> strings,
																	ConditionContext context) {
	final List<String> prunedList = new ArrayList<>();

	for (String key : strings) {
		final Class<?> classObj = ClassUtils.resolveClassName(((AnnotatedBeanDefinition)
						context.getBeanFactory().getBeanDefinition(key))
						.getMetadata().getClassName(),
				ClassUtils.getDefaultClassLoader());
		try {
			Method[] methods = classObj.getMethods();
			Optional<Method> kafkaStreamMethod = Arrays.stream(methods).filter(m -> m.getName().equals(key)).findFirst();
			if (kafkaStreamMethod.isPresent()) {
				Method method = kafkaStreamMethod.get();
				ResolvableType resolvableType = ResolvableType.forMethodReturnType(method, classObj);
				final Class<?> rawClass = resolvableType.getGeneric(0).getRawClass();
				if (rawClass == KStream.class || rawClass == KTable.class || rawClass == GlobalKTable.class) {
					prunedList.add(key);
				}
			}
		}
		catch (Exception e) {
			LOG.error("Function not found: " + key, e);
		}
	}
	return prunedList;
}
 
Example #16
Source File: AbstractKafkaStreamsBinderProcessor.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private <K, V> GlobalKTable<K, V> materializedAsGlobalKTable(
		StreamsBuilder streamsBuilder, String destination, String storeName,
		Serde<K> k, Serde<V> v, Topology.AutoOffsetReset autoOffsetReset, KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties) {
	final Consumed<K, V> consumed = getConsumed(kafkaStreamsConsumerProperties, k, v, autoOffsetReset);
	return streamsBuilder.globalTable(
			this.bindingServiceProperties.getBindingDestination(destination),
			consumed,
			getMaterialized(storeName, k, v));
}
 
Example #17
Source File: GlobalKTableBinder.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Override
protected Binding<GlobalKTable<Object, Object>> doBindProducer(String name,
		GlobalKTable<Object, Object> outboundBindTarget,
		ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) {
	throw new UnsupportedOperationException(
			"No producer level binding is allowed for GlobalKTable");
}
 
Example #18
Source File: GlobalKTableBinder.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
protected Binding<GlobalKTable<Object, Object>> doBindConsumer(String name,
		String group, GlobalKTable<Object, Object> inputTarget,
		ExtendedConsumerProperties<KafkaStreamsConsumerProperties> properties) {
	if (!StringUtils.hasText(group)) {
		group = properties.getExtension().getApplicationId();
	}
	KafkaStreamsBinderUtils.prepareConsumerBinding(name, group,
			getApplicationContext(), this.kafkaTopicProvisioner,
			this.binderConfigurationProperties, properties);
	return new DefaultBinding<>(name, group, inputTarget, null);
}
 
Example #19
Source File: KafkaStreamsFunctionBeanPostProcessor.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 5 votes vote down vote up
private void extractResolvableTypes(String key) {
	final Class<?> classObj = ClassUtils.resolveClassName(((AnnotatedBeanDefinition)
					this.beanFactory.getBeanDefinition(key))
					.getMetadata().getClassName(),
			ClassUtils.getDefaultClassLoader());
	try {
		Method[] methods = classObj.getMethods();
		Optional<Method> kafkaStreamMethod = Arrays.stream(methods).filter(m -> m.getName().equals(key)).findFirst();
		if (kafkaStreamMethod.isPresent()) {
			Method method = kafkaStreamMethod.get();
			ResolvableType resolvableType = ResolvableType.forMethodReturnType(method, classObj);
			final Class<?> rawClass = resolvableType.getGeneric(0).getRawClass();
			if (rawClass == KStream.class || rawClass == KTable.class || rawClass == GlobalKTable.class) {
				if (onlySingleFunction) {
					resolvableTypeMap.put(key, resolvableType);
				}
				else {
					final String definition = streamFunctionProperties.getDefinition();
					if (definition == null) {
						throw new IllegalStateException("Multiple functions found, but function definition property is not set.");
					}
					else if (definition.contains(key)) {
						resolvableTypeMap.put(key, resolvableType);
					}
				}
			}
		}
	}
	catch (Exception e) {
		LOG.error("Function activation issues while mapping the function: " + key, e);
	}
}
 
Example #20
Source File: GlobalKTableBoundElementFactory.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
public void wrap(GlobalKTable<Object, Object> delegate) {
	Assert.notNull(delegate, "delegate cannot be null");
	if (this.delegate == null) {
		this.delegate = delegate;
	}
}
 
Example #21
Source File: StreamToGlobalKTableJoinIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Input("input-x")
GlobalKTable<?, ?> inputX();
 
Example #22
Source File: StreamToGlobalKTableJoinIntegrationTests.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Input("input-y")
GlobalKTable<?, ?> inputY();
 
Example #23
Source File: GlobalKTableBoundElementFactory.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Override
public GlobalKTable createOutput(String name) {
	throw new UnsupportedOperationException(
			"Outbound operations are not allowed on target type GlobalKTable");
}
 
Example #24
Source File: GlobalKTableBoundElementFactory.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
GlobalKTableBoundElementFactory(BindingServiceProperties bindingServiceProperties,
								EncodingDecodingBindAdviceHandler encodingDecodingBindAdviceHandler) {
	super(GlobalKTable.class);
	this.bindingServiceProperties = bindingServiceProperties;
	this.encodingDecodingBindAdviceHandler = encodingDecodingBindAdviceHandler;
}
 
Example #25
Source File: KafkaStreamsBinderBootstrapTest.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
@Input("input-3")
GlobalKTable<?, ?> inputStream();
 
Example #26
Source File: KeyValueSerdeResolver.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 4 votes vote down vote up
private boolean isResolvalbeKafkaStreamsType(ResolvableType resolvableType) {
	return resolvableType.getRawClass() != null && (KStream.class.isAssignableFrom(resolvableType.getRawClass()) || KTable.class.isAssignableFrom(resolvableType.getRawClass()) ||
			GlobalKTable.class.isAssignableFrom(resolvableType.getRawClass()));
}
 
Example #27
Source File: GlobalKTableExample.java    From kafka-streams-in-action with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {


        StreamsConfig streamsConfig = new StreamsConfig(getProperties());

        Serde<String> stringSerde = Serdes.String();
        Serde<StockTransaction> transactionSerde = StreamsSerdes.StockTransactionSerde();
        Serde<TransactionSummary> transactionSummarySerde = StreamsSerdes.TransactionSummarySerde();


        StreamsBuilder builder = new StreamsBuilder();
        long twentySeconds = 1000 * 20;

        KeyValueMapper<Windowed<TransactionSummary>, Long, KeyValue<String, TransactionSummary>> transactionMapper = (window, count) -> {
            TransactionSummary transactionSummary = window.key();
            String newKey = transactionSummary.getIndustry();
            transactionSummary.setSummaryCount(count);
            return KeyValue.pair(newKey, transactionSummary);
        };

        KStream<String, TransactionSummary> countStream =
                builder.stream( STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, transactionSerde).withOffsetResetPolicy(LATEST))
                        .groupBy((noKey, transaction) -> TransactionSummary.from(transaction), Serialized.with(transactionSummarySerde, transactionSerde))
                        .windowedBy(SessionWindows.with(twentySeconds)).count()
                        .toStream().map(transactionMapper);

        GlobalKTable<String, String> publicCompanies = builder.globalTable(COMPANIES.topicName());
        GlobalKTable<String, String> clients = builder.globalTable(CLIENTS.topicName());


        countStream.leftJoin(publicCompanies, (key, txn) -> txn.getStockTicker(),TransactionSummary::withCompanyName)
                .leftJoin(clients, (key, txn) -> txn.getCustomerId(), TransactionSummary::withCustomerName)
                .print(Printed.<String, TransactionSummary>toSysOut().withLabel("Resolved Transaction Summaries"));


        
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig);
        kafkaStreams.cleanUp();


        kafkaStreams.setUncaughtExceptionHandler((t, e) -> {
            LOG.error("had exception ", e);
        });

        CustomDateGenerator dateGenerator = CustomDateGenerator.withTimestampsIncreasingBy(Duration.ofMillis(750));

        DataGenerator.setTimestampGenerator(dateGenerator::get);

        MockDataProducer.produceStockTransactions(2, 5, 3, true);

        LOG.info("Starting GlobalKTable Example");
        kafkaStreams.cleanUp();
        kafkaStreams.start();
        Thread.sleep(65000);
        LOG.info("Shutting down the GlobalKTable Example Application now");
        kafkaStreams.close();
        MockDataProducer.shutdown();
    }
 
Example #28
Source File: KafkaStreamsBinderBootstrapTest.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 2 votes vote down vote up
@StreamListener
public void handleY(@Input("input-3") GlobalKTable<Object, String> stream) {

}
 
Example #29
Source File: GlobalKTableBoundElementFactory.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 votes vote down vote up
void wrap(GlobalKTable<Object, Object> delegate);