Java Code Examples for org.apache.flink.streaming.api.operators.StreamSource

The following examples show how to use org.apache.flink.streaming.api.operators.StreamSource. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private static <T> void setupSourceOperator(
		StreamSource<T, ?> operator,
		ExecutionConfig executionConfig,
		Environment env,
		ProcessingTimeService timeProvider) {

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(TimeCharacteristic.EventTime);
	cfg.setOperatorID(new OperatorID());

	try {
		MockStreamTask mockTask = new MockStreamTaskBuilder(env)
			.setConfig(cfg)
			.setExecutionConfig(executionConfig)
			.setProcessingTimeService(timeProvider)
			.build();

		operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 2
@Test
public void testNoMaxWatermarkOnImmediateCancel() throws Exception {

	final List<StreamElement> output = new ArrayList<>();

	// regular stream source operator
	final StreamSource<String, InfiniteSource<String>> operator =
			new StreamSource<>(new InfiniteSource<String>());

	setupSourceOperator(operator, TimeCharacteristic.EventTime, 0);
	operator.cancel();

	// run and exit
	operator.run(new Object(), mock(StreamStatusMaintainer.class), new CollectorOutput<String>(output));

	assertTrue(output.isEmpty());
}
 
Example 3
Source Project: flink   Source File: SourceTaskTerminationTest.java    License: Apache License 2.0 6 votes vote down vote up
private StreamTaskTestHarness<Long> getSourceStreamTaskTestHarness() {
	final StreamTaskTestHarness<Long> testHarness = new StreamTaskTestHarness<>(
			SourceStreamTask::new,
			BasicTypeInfo.LONG_TYPE_INFO);

	final LockStepSourceWithOneWmPerElement source = new LockStepSourceWithOneWmPerElement();

	testHarness.setupOutputForSingletonOperatorChain();
	testHarness.getExecutionConfig().setLatencyTrackingInterval(-1);

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamSource<Long, ?> sourceOperator = new StreamSource<>(source);
	streamConfig.setStreamOperator(sourceOperator);
	streamConfig.setOperatorID(new OperatorID());
	return testHarness;
}
 
Example 4
Source Project: flink   Source File: StreamSourceOperatorWatermarksTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMaxWatermarkIsForwardedLastForFiniteSource() throws Exception {
	StreamSource<String, ?> sourceOperator = new StreamSource<>(new FiniteSource(true));
	StreamTaskTestHarness<String> testHarness = setupSourceStreamTask(sourceOperator, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.invoke();
	testHarness.waitForTaskCompletion();

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	expectedOutput.add(new StreamRecord<>("Hello"));
	expectedOutput.add(Watermark.MAX_WATERMARK);

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example 5
Source Project: flink   Source File: StreamExecutionEnvironment.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Ads a data source with a custom type information thus opening a
 * {@link DataStream}. Only in very special cases does the user need to
 * support type information. Otherwise use
 * {@link #addSource(org.apache.flink.streaming.api.functions.source.SourceFunction)}
 *
 * @param function
 * 		the user defined function
 * @param sourceName
 * 		Name of the data source
 * @param <OUT>
 * 		type of the returned stream
 * @param typeInfo
 * 		the user defined type information for the stream
 * @return the data stream constructed
 */
@SuppressWarnings("unchecked")
public <OUT> DataStreamSource<OUT> addSource(SourceFunction<OUT> function, String sourceName, TypeInformation<OUT> typeInfo) {

	if (function instanceof ResultTypeQueryable) {
		typeInfo = ((ResultTypeQueryable<OUT>) function).getProducedType();
	}
	if (typeInfo == null) {
		try {
			typeInfo = TypeExtractor.createTypeInfo(
					SourceFunction.class,
					function.getClass(), 0, null, null);
		} catch (final InvalidTypesException e) {
			typeInfo = (TypeInformation<OUT>) new MissingTypeInfo(sourceName, e);
		}
	}

	boolean isParallel = function instanceof ParallelSourceFunction;

	clean(function);

	final StreamSource<OUT, ?> sourceOperator = new StreamSource<>(function);
	return new DataStreamSource<>(this, typeInfo, sourceOperator, isParallel, sourceName);
}
 
Example 6
Source Project: flink   Source File: SourceStreamTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * If finishing a task doesn't swallow exceptions this test would fail with an exception.
 */
@Test
public void finishingIgnoresExceptions() throws Exception {
	final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
			SourceStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO);

	final CompletableFuture<Void> operatorRunningWaitingFuture = new CompletableFuture<>();
	ExceptionThrowingSource.setIsInRunLoopFuture(operatorRunningWaitingFuture);

	testHarness.setupOutputForSingletonOperatorChain();
	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setStreamOperator(new StreamSource<>(new ExceptionThrowingSource()));
	streamConfig.setOperatorID(new OperatorID());

	testHarness.invoke();
	operatorRunningWaitingFuture.get();
	testHarness.getTask().finishTask();

	testHarness.waitForTaskCompletion();
}
 
Example 7
Source Project: flink   Source File: SourceTaskTerminationTest.java    License: Apache License 2.0 6 votes vote down vote up
private StreamTaskTestHarness<Long> getSourceStreamTaskTestHarness() {
	final StreamTaskTestHarness<Long> testHarness = new StreamTaskTestHarness<>(
			SourceStreamTask::new,
			BasicTypeInfo.LONG_TYPE_INFO);

	final LockStepSourceWithOneWmPerElement source = new LockStepSourceWithOneWmPerElement();

	testHarness.setupOutputForSingletonOperatorChain();
	testHarness.getExecutionConfig().setLatencyTrackingInterval(-1);

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamSource<Long, ?> sourceOperator = new StreamSource<>(source);
	streamConfig.setStreamOperator(sourceOperator);
	streamConfig.setOperatorID(new OperatorID());
	return testHarness;
}
 
Example 8
Source Project: flink   Source File: StreamSourceOperatorWatermarksTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNoMaxWatermarkOnImmediateCancel() throws Exception {
	StreamSource<String, ?> sourceOperator = new StreamSource<>(new InfiniteSource<>());
	StreamTaskTestHarness<String> testHarness = setupSourceStreamTask(
		sourceOperator, BasicTypeInfo.STRING_TYPE_INFO, true);

	testHarness.invoke();
	try {
		testHarness.waitForTaskCompletion();
		fail("should throw an exception");
	} catch (Throwable t) {
		if (!ExceptionUtils.findThrowable(t, CancelTaskException.class).isPresent()) {
			throw t;
		}
	}
	assertTrue(testHarness.getOutput().isEmpty());
}
 
Example 9
private static <T> void setupSourceOperator(
		StreamSource<T, ?> operator,
		ExecutionConfig executionConfig,
		Environment env,
		ProcessingTimeService timeProvider) {

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(TimeCharacteristic.EventTime);
	cfg.setOperatorID(new OperatorID());

	try {
		MockStreamTask mockTask = new MockStreamTaskBuilder(env)
			.setConfig(cfg)
			.setExecutionConfig(executionConfig)
			.setProcessingTimeService(timeProvider)
			.build();

		operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 10
Source Project: flink   Source File: StreamSourceOperatorWatermarksTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testEmitMaxWatermarkForFiniteSource() throws Exception {

	// regular stream source operator
	StreamSource<String, FiniteSource<String>> operator =
			new StreamSource<>(new FiniteSource<String>());

	final List<StreamElement> output = new ArrayList<>();

	setupSourceOperator(operator, TimeCharacteristic.EventTime, 0);
	OperatorChain<?, ?> operatorChain = createOperatorChain(operator);
	try {
		operator.run(new Object(), mock(StreamStatusMaintainer.class), new CollectorOutput<String>(output), operatorChain);
	} finally {
		operatorChain.releaseOutputs();
	}

	assertEquals(1, output.size());
	assertEquals(Watermark.MAX_WATERMARK, output.get(0));
}
 
Example 11
Source Project: flink   Source File: StreamSourceOperatorWatermarksTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNoMaxWatermarkOnImmediateCancel() throws Exception {

	final List<StreamElement> output = new ArrayList<>();

	// regular stream source operator
	final StreamSource<String, InfiniteSource<String>> operator =
			new StreamSource<>(new InfiniteSource<String>());

	setupSourceOperator(operator, TimeCharacteristic.EventTime, 0);
	operator.cancel();

	// run and exit
	OperatorChain<?, ?> operatorChain = createOperatorChain(operator);
	try {
		operator.run(new Object(), mock(StreamStatusMaintainer.class), new CollectorOutput<String>(output), operatorChain);
	} finally {
		operatorChain.releaseOutputs();
	}

	assertTrue(output.isEmpty());
}
 
Example 12
Source Project: flink   Source File: BatchExecutorTest.java    License: Apache License 2.0 6 votes vote down vote up
public BatchExecutorTest() {
	batchExecutor = new BatchExecutor(LocalStreamEnvironment.getExecutionEnvironment());

	final Transformation testTransform = new LegacySourceTransformation<>(
		"MockTransform",
		new StreamSource<>(new SourceFunction<String>() {
			@Override
			public void run(SourceContext<String> ctx) {
			}

			@Override
			public void cancel() {
			}
		}),
		BasicTypeInfo.STRING_TYPE_INFO,
		1);
	Pipeline pipeline = batchExecutor.createPipeline(
		Collections.singletonList(testTransform), new TableConfig(), "Test Job");
	streamGraph = (StreamGraph) pipeline;
}
 
Example 13
/**
 * Test restoring from an legacy empty state, when no partitions could be found for topics.
 */
@Test
public void testRestoreFromEmptyStateNoPartitions() throws Exception {
	final DummyFlinkKafkaConsumer<String> consumerFunction =
			new DummyFlinkKafkaConsumer<>(
				Collections.singletonList("dummy-topic"),
				Collections.<KafkaTopicPartition>emptyList(),
				FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file
	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"kafka-consumer-migration-test-flink" + testMigrateVersion + "-empty-state-snapshot"));

	testHarness.open();

	// assert that no partitions were found and is empty
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null);
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty());

	// assert that no state was restored
	assertTrue(consumerFunction.getRestoredState().isEmpty());

	consumerOperator.close();
	consumerOperator.cancel();
}
 
Example 14
/**
 * Test restoring from a non-empty state taken using a previous Flink version, when some partitions could be
 * found for topics.
 */
@Test
public void testRestore() throws Exception {
	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
			new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file
	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	// assert that there are partitions and is identical to expected list
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null);
	assertTrue(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty());

	// on restore, subscribedPartitionsToStartOffsets should be identical to the restored state
	assertEquals(PARTITION_STATE, consumerFunction.getSubscribedPartitionsToStartOffsets());

	// assert that state is correctly restored from legacy checkpoint
	assertTrue(consumerFunction.getRestoredState() != null);
	assertEquals(PARTITION_STATE, consumerFunction.getRestoredState());

	consumerOperator.close();
	consumerOperator.cancel();
}
 
Example 15
/**
 * Test restoring from savepoints before version Flink 1.3 should fail if discovery is enabled.
 */
@Test
public void testRestoreFailsWithNonEmptyPreFlink13StatesIfDiscoveryEnabled() throws Exception {
	assumeTrue(testMigrateVersion == MigrationVersion.v1_3 || testMigrateVersion == MigrationVersion.v1_2);

	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, 1000L); // discovery enabled

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
		new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
		new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file; should fail since discovery is enabled
	try {
		testHarness.initializeState(
			OperatorSnapshotUtil.getResourceFilename(
				"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));

		fail("Restore from savepoints from version before Flink 1.3.x should have failed if discovery is enabled.");
	} catch (Exception e) {
		Assert.assertTrue(e instanceof IllegalArgumentException);
	}
}
 
Example 16
Source Project: Flink-CEPplus   Source File: FlinkKafkaConsumerBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
private static <T> AbstractStreamOperatorTestHarness<T> createTestHarness(
	SourceFunction<T> source, int numSubtasks, int subtaskIndex) throws Exception {

	AbstractStreamOperatorTestHarness<T> testHarness =
		new AbstractStreamOperatorTestHarness<>(
			new StreamSource<>(source), maxParallelism, numSubtasks, subtaskIndex);

	testHarness.setTimeCharacteristic(TimeCharacteristic.EventTime);

	return testHarness;
}
 
Example 17
Source Project: flink   Source File: FlinkKafkaConsumerBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
private static <T> AbstractStreamOperatorTestHarness<T> createTestHarness(
	SourceFunction<T> source, int numSubtasks, int subtaskIndex) throws Exception {

	AbstractStreamOperatorTestHarness<T> testHarness =
		new AbstractStreamOperatorTestHarness<>(
			new StreamSource<>(source), maxParallelism, numSubtasks, subtaskIndex);

	testHarness.setTimeCharacteristic(TimeCharacteristic.EventTime);

	return testHarness;
}
 
Example 18
Source Project: flink   Source File: RMQSourceTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testOpen() throws Exception {
	MockDeserializationSchema<String> deserializationSchema = new MockDeserializationSchema<>();

	RMQSource<String> consumer = new RMQTestSource(deserializationSchema);
	AbstractStreamOperatorTestHarness<String> testHarness = new AbstractStreamOperatorTestHarness<>(
		new StreamSource<>(consumer), 1, 1, 0
	);

	testHarness.open();
	assertThat("Open method was not called", deserializationSchema.isOpenCalled(), is(true));
}
 
Example 19
Source Project: flink   Source File: StreamSourceOperatorWatermarksTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testEmitMaxWatermarkForFiniteSource() throws Exception {
	StreamSource<String, ?> sourceOperator = new StreamSource<>(new FiniteSource());
	StreamTaskTestHarness<String> testHarness = setupSourceStreamTask(sourceOperator, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.invoke();
	testHarness.waitForTaskCompletion();

	assertEquals(1, testHarness.getOutput().size());
	assertEquals(Watermark.MAX_WATERMARK, testHarness.getOutput().peek());
}
 
Example 20
Source Project: Flink-CEPplus   Source File: DataStreamSource.java    License: Apache License 2.0 5 votes vote down vote up
public DataStreamSource(StreamExecutionEnvironment environment,
		TypeInformation<T> outTypeInfo, StreamSource<T, ?> operator,
		boolean isParallel, String sourceName) {
	super(environment, new SourceTransformation<>(sourceName, operator, outTypeInfo, environment.getParallelism()));

	this.isParallel = isParallel;
	if (!isParallel) {
		setParallelism(1);
	}
}
 
Example 21
Source Project: Flink-CEPplus   Source File: StreamExecutionEnvironment.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Ads a data source with a custom type information thus opening a
 * {@link DataStream}. Only in very special cases does the user need to
 * support type information. Otherwise use
 * {@link #addSource(org.apache.flink.streaming.api.functions.source.SourceFunction)}
 *
 * @param function
 * 		the user defined function
 * @param sourceName
 * 		Name of the data source
 * @param <OUT>
 * 		type of the returned stream
 * @param typeInfo
 * 		the user defined type information for the stream
 * @return the data stream constructed
 */
@SuppressWarnings("unchecked")
public <OUT> DataStreamSource<OUT> addSource(SourceFunction<OUT> function, String sourceName, TypeInformation<OUT> typeInfo) {

	if (typeInfo == null) {
		if (function instanceof ResultTypeQueryable) {
			typeInfo = ((ResultTypeQueryable<OUT>) function).getProducedType();
		} else {
			try {
				typeInfo = TypeExtractor.createTypeInfo(
						SourceFunction.class,
						function.getClass(), 0, null, null);
			} catch (final InvalidTypesException e) {
				typeInfo = (TypeInformation<OUT>) new MissingTypeInfo(sourceName, e);
			}
		}
	}

	boolean isParallel = function instanceof ParallelSourceFunction;

	clean(function);
	StreamSource<OUT, ?> sourceOperator;
	if (function instanceof StoppableFunction) {
		sourceOperator = new StoppableStreamSource<>(cast2StoppableSourceFunction(function));
	} else {
		sourceOperator = new StreamSource<>(function);
	}

	return new DataStreamSource<>(this, typeInfo, sourceOperator, isParallel, sourceName);
}
 
Example 22
Source Project: Flink-CEPplus   Source File: SourceTransformation.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@code SourceTransformation} from the given operator.
 *
 * @param name The name of the {@code SourceTransformation}, this will be shown in Visualizations and the Log
 * @param operator The {@code StreamSource} that is the operator of this Transformation
 * @param outputType The type of the elements produced by this {@code SourceTransformation}
 * @param parallelism The parallelism of this {@code SourceTransformation}
 */
public SourceTransformation(
		String name,
		StreamSource<T, ?> operator,
		TypeInformation<T> outputType,
		int parallelism) {
	super(name, outputType, parallelism);
	this.operator = operator;
}
 
Example 23
Source Project: Flink-CEPplus   Source File: StreamGraph.java    License: Apache License 2.0 5 votes vote down vote up
public <IN, OUT> void addOperator(
		Integer vertexID,
		String slotSharingGroup,
		@Nullable String coLocationGroup,
		StreamOperator<OUT> operatorObject,
		TypeInformation<IN> inTypeInfo,
		TypeInformation<OUT> outTypeInfo,
		String operatorName) {

	if (operatorObject instanceof StoppableStreamSource) {
		addNode(vertexID, slotSharingGroup, coLocationGroup, StoppableSourceStreamTask.class, operatorObject, operatorName);
	} else if (operatorObject instanceof StreamSource) {
		addNode(vertexID, slotSharingGroup, coLocationGroup, SourceStreamTask.class, operatorObject, operatorName);
	} else {
		addNode(vertexID, slotSharingGroup, coLocationGroup, OneInputStreamTask.class, operatorObject, operatorName);
	}

	TypeSerializer<IN> inSerializer = inTypeInfo != null && !(inTypeInfo instanceof MissingTypeInfo) ? inTypeInfo.createSerializer(executionConfig) : null;

	TypeSerializer<OUT> outSerializer = outTypeInfo != null && !(outTypeInfo instanceof MissingTypeInfo) ? outTypeInfo.createSerializer(executionConfig) : null;

	setSerializers(vertexID, inSerializer, null, outSerializer);

	if (operatorObject instanceof OutputTypeConfigurable && outTypeInfo != null) {
		@SuppressWarnings("unchecked")
		OutputTypeConfigurable<OUT> outputTypeConfigurable = (OutputTypeConfigurable<OUT>) operatorObject;
		// sets the output type which must be know at StreamGraph creation time
		outputTypeConfigurable.setOutputType(outTypeInfo, executionConfig);
	}

	if (operatorObject instanceof InputTypeConfigurable) {
		InputTypeConfigurable inputTypeConfigurable = (InputTypeConfigurable) operatorObject;
		inputTypeConfigurable.setInputType(inTypeInfo, executionConfig);
	}

	if (LOG.isDebugEnabled()) {
		LOG.debug("Vertex: {}", vertexID);
	}
}
 
Example 24
Source Project: flink   Source File: SourceStreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testInterruptionExceptionNotSwallowed(InterruptedSource.ExceptionGenerator exceptionGenerator)
		throws Exception {
	final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
		SourceStreamTask::new,
		BasicTypeInfo.STRING_TYPE_INFO);

	CancelLockingSource.reset();
	testHarness
		.setupOperatorChain(
			new OperatorID(),
			new StreamSource<>(new InterruptedSource(exceptionGenerator)))
		.chain(
			new OperatorID(),
			new TestBoundedOneInputStreamOperator("Operator1"),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.finish();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.invoke();
	try {
		testHarness.waitForTaskCompletion();
	} catch (Exception e) {
		if (!ExceptionUtils.findThrowable(e, InterruptedException.class).isPresent()) {
			throw e;
		}
	}
}
 
Example 25
private void testRestoreWithInterrupt(int mode) throws Exception {

		IN_RESTORE_LATCH.reset();
		Configuration taskConfig = new Configuration();
		StreamConfig cfg = new StreamConfig(taskConfig);
		cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
		switch (mode) {
			case OPERATOR_MANAGED:
			case OPERATOR_RAW:
			case KEYED_MANAGED:
			case KEYED_RAW:
				cfg.setStateKeySerializer(IntSerializer.INSTANCE);
				cfg.setStreamOperator(new StreamSource<>(new TestSource(mode)));
				break;
			default:
				throw new IllegalArgumentException();
		}

		StreamStateHandle lockingHandle = new InterruptLockingStateHandle();

		Task task = createTask(cfg, taskConfig, lockingHandle, mode);

		// start the task and wait until it is in "restore"
		task.startTaskThread();
		IN_RESTORE_LATCH.await();

		// trigger cancellation and signal to continue
		task.cancelExecution();

		task.getExecutingThread().join(30000);

		if (task.getExecutionState() == ExecutionState.CANCELING) {
			fail("Task is stuck and not canceling");
		}

		assertEquals(ExecutionState.CANCELED, task.getExecutionState());
		assertNull(task.getFailureCause());
	}
 
Example 26
@SuppressWarnings("unchecked")
private static <T> void setupSourceOperator(StreamSource<T, ?> operator,
											TimeCharacteristic timeChar,
											long watermarkInterval,
											final ProcessingTimeService timeProvider) throws Exception {

	ExecutionConfig executionConfig = new ExecutionConfig();
	executionConfig.setAutoWatermarkInterval(watermarkInterval);

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(timeChar);
	cfg.setOperatorID(new OperatorID());

	Environment env = new DummyEnvironment("MockTwoInputTask", 1, 0);

	StreamStatusMaintainer streamStatusMaintainer = mock(StreamStatusMaintainer.class);
	when(streamStatusMaintainer.getStreamStatus()).thenReturn(StreamStatus.ACTIVE);

	MockStreamTask mockTask = new MockStreamTaskBuilder(env)
		.setConfig(cfg)
		.setExecutionConfig(executionConfig)
		.setStreamStatusMaintainer(streamStatusMaintainer)
		.setProcessingTimeService(timeProvider)
		.build();

	operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
}
 
Example 27
Source Project: flink   Source File: FlinkKafkaConsumerBaseMigrationTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test restoring from an legacy empty state, when no partitions could be found for topics.
 */
@Test
public void testRestoreFromEmptyStateNoPartitions() throws Exception {
	final DummyFlinkKafkaConsumer<String> consumerFunction =
			new DummyFlinkKafkaConsumer<>(
				Collections.singletonList("dummy-topic"),
				Collections.<KafkaTopicPartition>emptyList(),
				FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file
	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"kafka-consumer-migration-test-flink" + testMigrateVersion + "-empty-state-snapshot"));

	testHarness.open();

	// assert that no partitions were found and is empty
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null);
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty());

	// assert that no state was restored
	assertTrue(consumerFunction.getRestoredState().isEmpty());

	consumerOperator.close();
	consumerOperator.cancel();
}
 
Example 28
Source Project: flink   Source File: FlinkKafkaConsumerBaseMigrationTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test restoring from a non-empty state taken using a previous Flink version, when some partitions could be
 * found for topics.
 */
@Test
public void testRestore() throws Exception {
	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
			new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file
	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	// assert that there are partitions and is identical to expected list
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null);
	assertTrue(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty());

	// on restore, subscribedPartitionsToStartOffsets should be identical to the restored state
	assertEquals(PARTITION_STATE, consumerFunction.getSubscribedPartitionsToStartOffsets());

	// assert that state is correctly restored from legacy checkpoint
	assertTrue(consumerFunction.getRestoredState() != null);
	assertEquals(PARTITION_STATE, consumerFunction.getRestoredState());

	consumerOperator.close();
	consumerOperator.cancel();
}
 
Example 29
Source Project: flink   Source File: FlinkKafkaConsumerBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
private static <T> AbstractStreamOperatorTestHarness<T> createTestHarness(
	SourceFunction<T> source, int numSubtasks, int subtaskIndex) throws Exception {

	AbstractStreamOperatorTestHarness<T> testHarness =
		new AbstractStreamOperatorTestHarness<>(
			new StreamSource<>(source), maxParallelism, numSubtasks, subtaskIndex);

	testHarness.setTimeCharacteristic(TimeCharacteristic.EventTime);

	return testHarness;
}
 
Example 30
Source Project: flink   Source File: DataStreamSource.java    License: Apache License 2.0 5 votes vote down vote up
public DataStreamSource(StreamExecutionEnvironment environment,
		TypeInformation<T> outTypeInfo, StreamSource<T, ?> operator,
		boolean isParallel, String sourceName) {
	super(environment, new SourceTransformation<>(sourceName, operator, outTypeInfo, environment.getParallelism()));

	this.isParallel = isParallel;
	if (!isParallel) {
		setParallelism(1);
	}
}