Java Code Examples for org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner

The following examples show how to use org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: FlinkKafkaProducerBaseTest.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
DummyFlinkKafkaProducer(Properties producerConfig, KeyedSerializationSchema<T> schema, FlinkKafkaPartitioner partitioner) {

	super(DUMMY_TOPIC, schema, producerConfig, partitioner);

	this.mockProducer = mock(KafkaProducer.class);
	when(mockProducer.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Object>() {
		@Override
		public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
			pendingCallbacks.add(invocationOnMock.getArgument(1));
			return null;
		}
	});

	this.pendingCallbacks = new ArrayList<>();
	this.flushLatch = new MultiShotLatch();
}
 
Example 2
@Override
protected KafkaTableSinkBase getExpectedKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka08TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema
	);
}
 
Example 3
Source Project: flink   Source File: Kafka010TableSourceSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected KafkaTableSinkBase getExpectedKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka010TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema
	);
}
 
Example 4
@Override
protected KafkaTableSinkBase getExpectedKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka011TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema
	);
}
 
Example 5
@Override
protected KafkaTableSinkBase getExpectedKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka010TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema
	);
}
 
Example 6
@SuppressWarnings("unchecked")
private Optional<FlinkKafkaPartitioner<Row>> getFlinkKafkaPartitioner(DescriptorProperties descriptorProperties) {
	return descriptorProperties
		.getOptionalString(CONNECTOR_SINK_PARTITIONER)
		.flatMap((String partitionerString) -> {
			switch (partitionerString) {
				case CONNECTOR_SINK_PARTITIONER_VALUE_FIXED:
					return Optional.of(new FlinkFixedPartitioner<>());
				case CONNECTOR_SINK_PARTITIONER_VALUE_ROUND_ROBIN:
					return Optional.empty();
				case CONNECTOR_SINK_PARTITIONER_VALUE_CUSTOM:
					final Class<? extends FlinkKafkaPartitioner> partitionerClass =
						descriptorProperties.getClass(CONNECTOR_SINK_PARTITIONER_CLASS, FlinkKafkaPartitioner.class);
					return Optional.of((FlinkKafkaPartitioner<Row>) InstantiationUtil.instantiate(partitionerClass));
				default:
					throw new TableException("Unsupported sink partitioner. Validator should have checked that.");
			}
		});
}
 
Example 7
Source Project: Flink-CEPplus   Source File: FlinkKafkaProducerBaseTest.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
DummyFlinkKafkaProducer(Properties producerConfig, KeyedSerializationSchema<T> schema, FlinkKafkaPartitioner partitioner) {

	super(DUMMY_TOPIC, schema, producerConfig, partitioner);

	this.mockProducer = mock(KafkaProducer.class);
	when(mockProducer.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Object>() {
		@Override
		public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
			pendingCallbacks.add(invocationOnMock.getArgument(1));
			return null;
		}
	});

	this.pendingCallbacks = new ArrayList<>();
	this.flushLatch = new MultiShotLatch();
}
 
Example 8
Source Project: flink   Source File: Kafka08TableSourceSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected KafkaTableSinkBase getExpectedKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka08TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema
	);
}
 
Example 9
@Override
protected KafkaTableSinkBase createKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka09TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 10
Source Project: Flink-CEPplus   Source File: Kafka09TableSink.java    License: Apache License 2.0 5 votes vote down vote up
public Kafka09TableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {
	super(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 11
Source Project: Flink-CEPplus   Source File: Kafka09TableSink.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected FlinkKafkaProducerBase<Row> createKafkaProducer(
		String topic,
		Properties properties,
		SerializationSchema<Row> serializationSchema,
		Optional<FlinkKafkaPartitioner<Row>> partitioner) {
	return new FlinkKafkaProducer09<>(
		topic,
		serializationSchema,
		properties,
		partitioner.orElse(null));
}
 
Example 12
Source Project: alchemy   Source File: KafkaSinkDescriptor.java    License: Apache License 2.0 5 votes vote down vote up
@Override KafkaTableSinkBase newTableSink(TableSchema schema, String topic, Properties properties,
    Optional<FlinkKafkaPartitioner<Row>> partitioner, SerializationSchema<Row> serializationSchema) {
    return new KafkaTableSink(
        schema,
        topic,
        properties,
        partitioner,
        serializationSchema
    );
}
 
Example 13
Source Project: Flink-CEPplus   Source File: KafkaTestEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> StreamSink<T> getProducerSink(
		String topic,
		KeyedSerializationSchema<T> serSchema,
		Properties props,
		FlinkKafkaPartitioner<T> partitioner) {
	FlinkKafkaProducer09<T> prod = new FlinkKafkaProducer09<>(topic, serSchema, props, partitioner);
	prod.setFlushOnCheckpoint(true);
	return new StreamSink<>(prod);
}
 
Example 14
Source Project: Flink-CEPplus   Source File: Kafka08TableSink.java    License: Apache License 2.0 5 votes vote down vote up
public Kafka08TableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {
	super(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 15
Source Project: flink   Source File: Kafka011TableSink.java    License: Apache License 2.0 5 votes vote down vote up
public Kafka011TableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {
	super(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 16
Source Project: flink   Source File: KafkaTestEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> StreamSink<T> getProducerSink(String topic, KeyedSerializationSchema<T> serSchema, Properties props, FlinkKafkaPartitioner<T> partitioner) {
	return new StreamSink<>(new FlinkKafkaProducer<T>(
		topic,
		serSchema,
		props,
		Optional.ofNullable(partitioner),
		producerSemantic,
		FlinkKafkaProducer.DEFAULT_KAFKA_PRODUCERS_POOL_SIZE));
}
 
Example 17
Source Project: flink   Source File: FlinkKafkaProducerBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Tests that partitions list is determinate and correctly provided to custom partitioner.
 */
@SuppressWarnings("unchecked")
@Test
public void testPartitionerInvokedWithDeterminatePartitionList() throws Exception {
	FlinkKafkaPartitioner<String> mockPartitioner = mock(FlinkKafkaPartitioner.class);

	RuntimeContext mockRuntimeContext = mock(StreamingRuntimeContext.class);
	when(mockRuntimeContext.getIndexOfThisSubtask()).thenReturn(0);
	when(mockRuntimeContext.getNumberOfParallelSubtasks()).thenReturn(1);

	// out-of-order list of 4 partitions
	List<PartitionInfo> mockPartitionsList = new ArrayList<>(4);
	mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 3, null, null, null));
	mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 1, null, null, null));
	mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 0, null, null, null));
	mockPartitionsList.add(new PartitionInfo(DummyFlinkKafkaProducer.DUMMY_TOPIC, 2, null, null, null));

	final DummyFlinkKafkaProducer<String> producer = new DummyFlinkKafkaProducer<>(
		FakeStandardProducerConfig.get(), new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()), mockPartitioner);
	producer.setRuntimeContext(mockRuntimeContext);

	final KafkaProducer mockProducer = producer.getMockKafkaProducer();
	when(mockProducer.partitionsFor(anyString())).thenReturn(mockPartitionsList);
	when(mockProducer.metrics()).thenReturn(null);

	producer.open(new Configuration());
	verify(mockPartitioner, times(1)).open(0, 1);

	producer.invoke("foobar", SinkContextUtil.forTimestamp(0));
	verify(mockPartitioner, times(1)).partition(
		"foobar", null, "foobar".getBytes(), DummyFlinkKafkaProducer.DUMMY_TOPIC, new int[] {0, 1, 2, 3});
}
 
Example 18
Source Project: Flink-CEPplus   Source File: KafkaTableSink.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected SinkFunction<Row> createKafkaProducer(
	String topic,
	Properties properties,
	SerializationSchema<Row> serializationSchema,
	Optional<FlinkKafkaPartitioner<Row>> partitioner) {
	return new FlinkKafkaProducer<>(
		topic,
		new KeyedSerializationSchemaWrapper<>(serializationSchema),
		properties,
		partitioner);
}
 
Example 19
Source Project: Flink-CEPplus   Source File: KafkaTableSourceSinkFactory.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected KafkaTableSinkBase createKafkaTableSink(
	TableSchema schema,
	String topic,
	Properties properties,
	Optional<FlinkKafkaPartitioner<Row>> partitioner,
	SerializationSchema<Row> serializationSchema) {

	return new KafkaTableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 20
Source Project: Flink-CEPplus   Source File: KafkaTestEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> DataStreamSink<T> produceIntoKafka(DataStream<T> stream, String topic, KeyedSerializationSchema<T> serSchema, Properties props, FlinkKafkaPartitioner<T> partitioner) {
	return stream.addSink(new FlinkKafkaProducer<T>(
		topic,
		serSchema,
		props,
		Optional.ofNullable(partitioner),
		producerSemantic,
		FlinkKafkaProducer.DEFAULT_KAFKA_PRODUCERS_POOL_SIZE));
}
 
Example 21
@Override
protected KafkaTableSinkBase getExpectedKafkaTableSink(
	TableSchema schema,
	String topic,
	Properties properties,
	Optional<FlinkKafkaPartitioner<Row>> partitioner,
	SerializationSchema<Row> serializationSchema) {

	return new KafkaTableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 22
Source Project: Flink-CEPplus   Source File: Kafka011TableSink.java    License: Apache License 2.0 5 votes vote down vote up
public Kafka011TableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {
	super(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 23
Source Project: flink   Source File: KafkaTableSinkBase.java    License: Apache License 2.0 5 votes vote down vote up
protected KafkaTableSinkBase(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {
	this.schema = Preconditions.checkNotNull(schema, "Schema must not be null.");
	this.topic = Preconditions.checkNotNull(topic, "Topic must not be null.");
	this.properties = Preconditions.checkNotNull(properties, "Properties must not be null.");
	this.partitioner = Preconditions.checkNotNull(partitioner, "Partitioner must not be null.");
	this.serializationSchema = Preconditions.checkNotNull(serializationSchema, "Serialization schema must not be null.");
}
 
Example 24
@Override
protected KafkaTableSinkBase createKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka011TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 25
Source Project: Flink-CEPplus   Source File: KafkaTestEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> StreamSink<T> getProducerSink(String topic, KeyedSerializationSchema<T> serSchema, Properties props, FlinkKafkaPartitioner<T> partitioner) {
	return new StreamSink<>(new FlinkKafkaProducer011<>(
		topic,
		serSchema,
		props,
		Optional.ofNullable(partitioner),
		producerSemantic,
		FlinkKafkaProducer011.DEFAULT_KAFKA_PRODUCERS_POOL_SIZE));
}
 
Example 26
Source Project: Flink-CEPplus   Source File: KafkaTestEnvironmentImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> DataStreamSink<T> produceIntoKafka(DataStream<T> stream, String topic, KeyedSerializationSchema<T> serSchema, Properties props, FlinkKafkaPartitioner<T> partitioner) {
	return stream.addSink(new FlinkKafkaProducer011<>(
		topic,
		serSchema,
		props,
		Optional.ofNullable(partitioner),
		producerSemantic,
		FlinkKafkaProducer011.DEFAULT_KAFKA_PRODUCERS_POOL_SIZE));
}
 
Example 27
Source Project: flink   Source File: KafkaTableSourceSinkFactory.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected KafkaTableSinkBase createKafkaTableSink(
	TableSchema schema,
	String topic,
	Properties properties,
	Optional<FlinkKafkaPartitioner<Row>> partitioner,
	SerializationSchema<Row> serializationSchema) {

	return new KafkaTableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}
 
Example 28
Source Project: flink   Source File: Kafka010TableSink.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected FlinkKafkaProducerBase<Row> createKafkaProducer(
		String topic,
		Properties properties,
		SerializationSchema<Row> serializationSchema,
		Optional<FlinkKafkaPartitioner<Row>> partitioner) {
	return new FlinkKafkaProducer010<>(
		topic,
		serializationSchema,
		properties,
		partitioner.orElse(null));
}
 
Example 29
Source Project: Flink-CEPplus   Source File: Kafka010TableSink.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected FlinkKafkaProducerBase<Row> createKafkaProducer(
		String topic,
		Properties properties,
		SerializationSchema<Row> serializationSchema,
		Optional<FlinkKafkaPartitioner<Row>> partitioner) {
	return new FlinkKafkaProducer010<>(
		topic,
		serializationSchema,
		properties,
		partitioner.orElse(null));
}
 
Example 30
@Override
protected KafkaTableSinkBase createKafkaTableSink(
		TableSchema schema,
		String topic,
		Properties properties,
		Optional<FlinkKafkaPartitioner<Row>> partitioner,
		SerializationSchema<Row> serializationSchema) {

	return new Kafka010TableSink(
		schema,
		topic,
		properties,
		partitioner,
		serializationSchema);
}