Java Code Examples for org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#close()

The following examples show how to use org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#close() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MiniBatchDeduplicateKeepFirstRowFunctionTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeepFirstRowWithGenerateUpdateBefore() throws Exception {
	MiniBatchDeduplicateKeepFirstRowFunction func = new MiniBatchDeduplicateKeepFirstRowFunction(typeSerializer, minTime.toMilliseconds());
	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(func);
	testHarness.open();
	testHarness.processElement(insertRecord("book", 1L, 12));
	testHarness.processElement(insertRecord("book", 2L, 11));

	// output is empty because bundle not trigger yet.
	Assert.assertTrue(testHarness.getOutput().isEmpty());

	testHarness.processElement(insertRecord("book", 1L, 13));

	// Keep FirstRow in deduplicate will not send retraction
	List<Object> expectedOutput = new ArrayList<>();
	expectedOutput.add(insertRecord("book", 1L, 12));
	expectedOutput.add(insertRecord("book", 2L, 11));
	assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());
	testHarness.close();
}
 
Example 2
Source File: MiniBatchDeduplicateKeepLastRowFunctionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testWithoutGenerateRetraction() throws Exception {
	MiniBatchDeduplicateKeepLastRowFunction func = createFunction(false);
	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(func);
	testHarness.open();
	testHarness.processElement(record("book", 1L, 10));
	testHarness.processElement(record("book", 2L, 11));
	// output is empty because bundle not trigger yet.
	Assert.assertTrue(testHarness.getOutput().isEmpty());

	testHarness.processElement(record("book", 1L, 13));

	List<Object> expectedOutput = new ArrayList<>();
	expectedOutput.add(record("book", 2L, 11));
	expectedOutput.add(record("book", 1L, 13));
	assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(record("book", 1L, 12));
	testHarness.processElement(record("book", 2L, 11));
	testHarness.processElement(record("book", 3L, 11));

	expectedOutput.add(record("book", 1L, 12));
	expectedOutput.add(record("book", 2L, 11));
	expectedOutput.add(record("book", 3L, 11));
	testHarness.close();
	assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());
}
 
Example 3
Source File: BucketingSinkTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testRolloverInterval() throws Exception {
	final File outDir = tempFolder.newFolder();

	OneInputStreamOperatorTestHarness<String, Object> testHarness = createRescalingTestSinkWithRollover(outDir, 1, 0, 1000L, 100L);
	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(0L);

	testHarness.processElement(new StreamRecord<>("test1", 1L));
	checkLocalFs(outDir, 1, 0, 0, 0);

	// invoke rollover based on rollover interval
	testHarness.setProcessingTime(101L);
	testHarness.processElement(new StreamRecord<>("test1", 2L));
	checkLocalFs(outDir, 1, 1, 0, 0);

	testHarness.snapshot(0, 0);
	testHarness.notifyOfCompletedCheckpoint(0);
	checkLocalFs(outDir, 1, 0, 1, 0);

	// move the in-progress file to pending
	testHarness.setProcessingTime(3000L);
	testHarness.snapshot(1, 1);
	checkLocalFs(outDir, 0, 1, 1, 0);

	// move the pending file to "committed"
	testHarness.notifyOfCompletedCheckpoint(1);
	testHarness.close();

	checkLocalFs(outDir, 0, 0, 2, 0);
}
 
Example 4
Source File: MiniBatchDeduplicateKeepLastRowFunctionTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testWithGenerateRetraction() throws Exception {
	MiniBatchDeduplicateKeepLastRowFunction func = createFunction(true);
	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(func);
	testHarness.open();
	testHarness.processElement(record("book", 1L, 10));
	testHarness.processElement(record("book", 2L, 11));
	// output is empty because bundle not trigger yet.
	Assert.assertTrue(testHarness.getOutput().isEmpty());

	testHarness.processElement(record("book", 1L, 13));

	List<Object> expectedOutput = new ArrayList<>();
	expectedOutput.add(record("book", 2L, 11));
	expectedOutput.add(record("book", 1L, 13));
	assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(record("book", 1L, 12));
	testHarness.processElement(record("book", 2L, 11));
	testHarness.processElement(record("book", 3L, 11));

	// this will send retract message to downstream
	expectedOutput.add(retractRecord("book", 1L, 13));
	expectedOutput.add(record("book", 1L, 12));
	expectedOutput.add(retractRecord("book", 2L, 11));
	expectedOutput.add(record("book", 2L, 11));
	expectedOutput.add(record("book", 3L, 11));
	testHarness.close();
	assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, testHarness.getOutput());
}
 
Example 5
Source File: KeyedProcessOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This also verifies that the timestamps ouf side-emitted records is correct.
 */
@Test
public void testSideOutput() throws Exception {
	KeyedProcessOperator<Integer, Integer, String> operator = new KeyedProcessOperator<>(new SideOutputProcessFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(
			operator, new IdentityKeySelector<>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(42, 17L /* timestamp */));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>("IN:42", 17L /* timestamp */));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	ConcurrentLinkedQueue<StreamRecord<Integer>> expectedIntSideOutput = new ConcurrentLinkedQueue<>();
	expectedIntSideOutput.add(new StreamRecord<>(42, 17L /* timestamp */));
	ConcurrentLinkedQueue<StreamRecord<Integer>> intSideOutput =
		testHarness.getSideOutput(SideOutputProcessFunction.INTEGER_OUTPUT_TAG);
	TestHarnessUtil.assertOutputEquals(
		"Side output was not correct.",
		expectedIntSideOutput,
		intSideOutput);

	ConcurrentLinkedQueue<StreamRecord<Long>> expectedLongSideOutput = new ConcurrentLinkedQueue<>();
	expectedLongSideOutput.add(new StreamRecord<>(42L, 17L /* timestamp */));
	ConcurrentLinkedQueue<StreamRecord<Long>> longSideOutput =
		testHarness.getSideOutput(SideOutputProcessFunction.LONG_OUTPUT_TAG);
	TestHarnessUtil.assertOutputEquals(
		"Side output was not correct.",
		expectedLongSideOutput,
		longSideOutput);

	testHarness.close();
}
 
Example 6
Source File: BucketingSinkTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This tests {@link StringWriter} with
 * non-bucketing output.
 */
@Test
public void testNonRollingStringWriter() throws Exception {
	final String outPath = hdfsURI + "/string-non-rolling-out";

	final int numElements = 20;

	BucketingSink<String> sink = new BucketingSink<String>(outPath)
		.setBucketer(new BasePathBucketer<String>())
		.setPartPrefix(PART_PREFIX)
		.setPendingPrefix("")
		.setPendingSuffix("");

	OneInputStreamOperatorTestHarness<String, Object> testHarness = createTestSink(sink, 1, 0);

	testHarness.setProcessingTime(0L);

	testHarness.setup();
	testHarness.open();

	for (int i = 0; i < numElements; i++) {
		testHarness.processElement(new StreamRecord<>("message #" + Integer.toString(i)));
	}

	testHarness.close();

	FSDataInputStream inStream = dfs.open(new Path(outPath + "/" + PART_PREFIX + "-0-0"));

	BufferedReader br = new BufferedReader(new InputStreamReader(inStream));

	for (int i = 0; i < numElements; i++) {
		String line = br.readLine();
		Assert.assertEquals("message #" + i, line);
	}

	inStream.close();
}
 
Example 7
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCleanupTimerWithEmptyListStateForSessionWindows() throws Exception {
	final int gapSize = 3;
	final long lateness = 10;

	ListStateDescriptor<Tuple2<String, Integer>> windowStateDesc =
		new ListStateDescriptor<>("window-contents", STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator =
		new WindowOperator<>(
			EventTimeSessionWindows.withGap(Time.seconds(gapSize)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			windowStateDesc,
			new InternalIterableWindowFunction<>(new PassThroughFunction()),
			EventTimeTrigger.create(),
			lateness,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		createTestHarness(operator);

	testHarness.open();

	ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
	testHarness.processWatermark(new Watermark(4998));

	expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
	expected.add(new Watermark(4998));

	testHarness.processWatermark(new Watermark(14600));
	expected.add(new Watermark(14600));

	ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
	testHarness.close();
}
 
Example 8
Source File: WindowOperatorMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeApplyProcessingTimeWindowsSnapshot() throws Exception {
	final int windowSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>()),
			ProcessingTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(10);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));

	testHarness.setProcessingTime(3010);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key3", 1)));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 1), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 2999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());

	// do snapshot and save to file
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
	OperatorSnapshotUtil.writeStateHandle(
		snapshot,
		"src/test/resources/win-op-migration-test-apply-processing-time-flink" + flinkGenerateSavepointVersion + "-snapshot");

	testHarness.close();
}
 
Example 9
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeReducingEventTimeWindowsSnapshot() throws Exception {
	final int windowSize = 3;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer<>(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
			EventTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));

	testHarness.processWatermark(new Watermark(999));
	expectedOutput.add(new Watermark(999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());

	testHarness.processWatermark(new Watermark(1999));
	expectedOutput.add(new Watermark(1999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());

	// do snapshot and save to file
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
	OperatorSnapshotUtil.writeStateHandle(
		snapshot,
		"src/test/resources/win-op-migration-test-reduce-event-time-flink" + flinkGenerateSavepointVersion + "-snapshot");

	testHarness.close();
}
 
Example 10
Source File: WindowOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testProcessingTimeTumblingWindows() throws Throwable {
	final int windowSize = 3;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
			ProcessingTimeTrigger.create(),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.setProcessingTime(3);

	// timestamp is ignored in processing time
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));

	testHarness.setProcessingTime(5000);

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 2999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));

	testHarness.setProcessingTime(7000);

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.close();
}
 
Example 11
Source File: BucketingSinkTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testScalingUp() throws Exception {
	final File outDir = tempFolder.newFolder();

	OneInputStreamOperatorTestHarness<String, Object> testHarness1 = createRescalingTestSink(outDir, 2, 0, 100);
	testHarness1.setup();
	testHarness1.open();

	OneInputStreamOperatorTestHarness<String, Object> testHarness2 = createRescalingTestSink(outDir, 2, 0, 100);
	testHarness2.setup();
	testHarness2.open();

	testHarness1.processElement(new StreamRecord<>("test1", 1L));
	testHarness1.processElement(new StreamRecord<>("test2", 1L));

	checkLocalFs(outDir, 2, 0, 0, 0);

	testHarness2.processElement(new StreamRecord<>("test3", 1L));
	testHarness2.processElement(new StreamRecord<>("test4", 1L));
	testHarness2.processElement(new StreamRecord<>("test5", 1L));

	checkLocalFs(outDir, 5, 0, 0, 0);

	// intentionally we snapshot them in the reverse order so that the states are shuffled
	OperatorSubtaskState mergedSnapshot = AbstractStreamOperatorTestHarness.repackageState(
		testHarness2.snapshot(0, 0),
		testHarness1.snapshot(0, 0)
	);

	OperatorSubtaskState initState1 = AbstractStreamOperatorTestHarness.repartitionOperatorState(
		mergedSnapshot, maxParallelism, 2, 3, 0);

	testHarness1 = createRescalingTestSink(outDir, 3, 0, 100);
	testHarness1.setup();
	testHarness1.initializeState(initState1);
	testHarness1.open();

	checkLocalFs(outDir, 2, 0, 3, 3);

	OperatorSubtaskState initState2 = AbstractStreamOperatorTestHarness.repartitionOperatorState(
		mergedSnapshot, maxParallelism, 2, 3, 1);

	testHarness2 = createRescalingTestSink(outDir, 3, 1, 100);
	testHarness2.setup();
	testHarness2.initializeState(initState2);
	testHarness2.open();

	checkLocalFs(outDir, 0, 0, 5, 5);

	OperatorSubtaskState initState3 = AbstractStreamOperatorTestHarness.repartitionOperatorState(
		mergedSnapshot, maxParallelism, 2, 3, 2);

	OneInputStreamOperatorTestHarness<String, Object> testHarness3 = createRescalingTestSink(outDir, 3, 2, 100);
	testHarness3.setup();
	testHarness3.initializeState(initState3);
	testHarness3.open();

	checkLocalFs(outDir, 0, 0, 5, 5);

	testHarness1.processElement(new StreamRecord<>("test6", 0));
	testHarness2.processElement(new StreamRecord<>("test6", 0));
	testHarness3.processElement(new StreamRecord<>("test6", 0));

	checkLocalFs(outDir, 3, 0, 5, 5);

	testHarness1.snapshot(1, 0);
	testHarness2.snapshot(1, 0);
	testHarness3.snapshot(1, 0);

	testHarness1.close();
	testHarness2.close();
	testHarness3.close();

	checkLocalFs(outDir, 0, 3, 5, 5);
}
 
Example 12
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testRestoreReducingProcessingTimeWindows() throws Exception {
	final int windowSize = 3;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer<>(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
			ProcessingTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-reduce-processing-time-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	testHarness.setProcessingTime(3020);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3)));

	testHarness.setProcessingTime(6000);

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), 5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key3", 1), 5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());
	testHarness.close();
}
 
Example 13
Source File: MapBundleOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testSimple() throws Exception {
	@SuppressWarnings("unchecked")
	TestMapBundleFunction func = new TestMapBundleFunction();
	CountBundleTrigger<Tuple2<String, String>> trigger = new CountBundleTrigger<>(3);
	KeySelector<Tuple2<String, String>, String> keySelector =
			(KeySelector<Tuple2<String, String>, String>) value -> value.f0;

	OneInputStreamOperatorTestHarness<Tuple2<String, String>, String> op =
			new OneInputStreamOperatorTestHarness<>(
					new MapBundleOperator<>(func, trigger, keySelector));
	op.open();
	synchronized (op.getCheckpointLock()) {
		StreamRecord<Tuple2<String, String>> input = new StreamRecord<>(null);

		input.replace(new Tuple2<>("k1", "v1"));
		op.processElement(input);

		input.replace(new Tuple2<>("k1", "v2"));
		op.processElement(input);

		assertEquals(0, func.getFinishCount());

		input.replace(new Tuple2<>("k2", "v3"));
		op.processElement(input);

		assertEquals(1, func.getFinishCount());
		assertThat(Arrays.asList("k1=v1,v2", "k2=v3"), is(func.getOutputs()));

		input.replace(new Tuple2<>("k3", "v4"));
		op.processElement(input);

		input.replace(new Tuple2<>("k4", "v5"));
		op.processElement(input);

		assertEquals(1, func.getFinishCount());

		op.close();
		assertEquals(2, func.getFinishCount());
		assertThat(Arrays.asList("k3=v4", "k4=v5"), is(func.getOutputs()));
	}
}
 
Example 14
Source File: AsyncWaitOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Delay a while before async invocation to check whether end input waits for all elements finished or not.
 */
@Test
public void testEndInput() throws Exception {
	final AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(
		new DelayedAsyncFunction(10),
		-1,
		2,
		AsyncDataStream.OutputMode.ORDERED);

	final OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
		new OneInputStreamOperatorTestHarness<>(operator, IntSerializer.INSTANCE);

	final long initialTime = 0L;
	final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	expectedOutput.add(new StreamRecord<>(2, initialTime + 1));
	expectedOutput.add(new StreamRecord<>(4, initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<>(6, initialTime + 3));

	testHarness.open();

	try {
		synchronized (testHarness.getCheckpointLock()) {
			testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
			testHarness.processElement(new StreamRecord<>(2, initialTime + 2));
			testHarness.processWatermark(new Watermark(initialTime + 2));
			testHarness.processElement(new StreamRecord<>(3, initialTime + 3));
		}

		// wait until all async collectors in the buffer have been emitted out.
		synchronized (testHarness.getCheckpointLock()) {
			testHarness.endInput();
		}

		TestHarnessUtil.assertOutputEquals("Output with watermark was not correct.", expectedOutput, testHarness.getOutput());
	} finally {
		synchronized (testHarness.getCheckpointLock()) {
			testHarness.close();
		}
	}
}
 
Example 15
Source File: FlinkKafkaProducerITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This tests checks whether FlinkKafkaProducer correctly aborts lingering transactions after a failure.
 * If such transactions were left alone lingering it consumers would be unable to read committed records
 * that were created after this lingering transaction.
 */
@Test
public void testFailBeforeNotifyAndResumeWorkAfterwards() throws Exception {
	String topic = "flink-kafka-producer-fail-before-notify";

	OneInputStreamOperatorTestHarness<Integer, Object> testHarness1 = createTestHarness(topic);
	checkProducerLeak();
	testHarness1.setup();
	testHarness1.open();
	testHarness1.processElement(42, 0);
	testHarness1.snapshot(0, 1);
	testHarness1.processElement(43, 2);
	OperatorSubtaskState snapshot1 = testHarness1.snapshot(1, 3);

	testHarness1.processElement(44, 4);
	testHarness1.snapshot(2, 5);
	testHarness1.processElement(45, 6);

	// do not close previous testHarness to make sure that closing do not clean up something (in case of failure
	// there might not be any close)
	OneInputStreamOperatorTestHarness<Integer, Object> testHarness2 = createTestHarness(topic);
	testHarness2.setup();
	// restore from snapshot1, transactions with records 44 and 45 should be aborted
	testHarness2.initializeState(snapshot1);
	testHarness2.open();

	// write and commit more records, after potentially lingering transactions
	testHarness2.processElement(46, 7);
	testHarness2.snapshot(4, 8);
	testHarness2.processElement(47, 9);
	testHarness2.notifyOfCompletedCheckpoint(4);

	//now we should have:
	// - records 42 and 43 in committed transactions
	// - aborted transactions with records 44 and 45
	// - committed transaction with record 46
	// - pending transaction with record 47
	assertExactlyOnceForTopic(createProperties(), topic, 0, Arrays.asList(42, 43, 46));

	try {
		testHarness1.close();
	} catch (Exception e) {
		// The only acceptable exception is ProducerFencedException because testHarness2 uses the same
		// transactional ID.
		if (!(e.getCause() instanceof ProducerFencedException)) {
			fail("Received unexpected exception " + e);
		}
	}
	testHarness2.close();
	deleteTestTopic(topic);
	checkProducerLeak();
}
 
Example 16
Source File: EvictingWindowOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Tests TimeEvictor evictBefore behavior.
 */
@Test
public void testTimeEvictorEvictBefore() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);
	final int triggerCount = 2;
	final int windowSize = 4;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
		(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
		new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new EvictingWindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>(closeCalled)),
		CountTrigger.of(triggerCount),
		TimeEvictor.of(Time.seconds(2)),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 5999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2001));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1001));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 3999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 6500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1002));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 7999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());

	testHarness.close();

	Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
 
Example 17
Source File: EvictingWindowOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testTumblingWindowWithApply() throws Exception {
	AtomicInteger closeCalled = new AtomicInteger(0);

	final int windowSize = 4;

	@SuppressWarnings({"unchecked", "rawtypes"})
	TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer =
			(TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc =
			new ListStateDescriptor<>("window-contents", streamRecordSerializer);

	EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new EvictingWindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector(),
		BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
		stateDesc,
		new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>(closeCalled)),
		EventTimeTrigger.create(),
		CountEvictor.of(windowSize),
		0,
		null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);

	long initialTime = 0L;

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 100));

	testHarness.processWatermark(new Watermark(1999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 1997));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 1998));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 2310)); // not late but more than 4
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 2310));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2310));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2310));

	testHarness.processWatermark(new Watermark(3999));											 // now is the evictor

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	expectedOutput.add(new Watermark(1999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), 3999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));
	expectedOutput.add(new Watermark(3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(),
		new EvictingWindowOperatorTest.ResultSortComparator());
	testHarness.close();
}
 
Example 18
Source File: MapBundleOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testSimple() throws Exception {
	@SuppressWarnings("unchecked")
	TestMapBundleFunction func = new TestMapBundleFunction();
	CountBundleTrigger<Tuple2<String, String>> trigger = new CountBundleTrigger<>(3);
	KeySelector<Tuple2<String, String>, String> keySelector =
			(KeySelector<Tuple2<String, String>, String>) value -> value.f0;

	OneInputStreamOperatorTestHarness<Tuple2<String, String>, String> op =
			new OneInputStreamOperatorTestHarness<>(
					new MapBundleOperator<>(func, trigger, keySelector));
	op.open();
	synchronized (op.getCheckpointLock()) {
		StreamRecord<Tuple2<String, String>> input = new StreamRecord<>(null);

		input.replace(new Tuple2<>("k1", "v1"));
		op.processElement(input);

		input.replace(new Tuple2<>("k1", "v2"));
		op.processElement(input);

		assertEquals(0, func.getFinishCount());

		input.replace(new Tuple2<>("k2", "v3"));
		op.processElement(input);

		assertEquals(1, func.getFinishCount());
		assertThat(Arrays.asList("k1=v1,v2", "k2=v3"), is(func.getOutputs()));

		input.replace(new Tuple2<>("k3", "v4"));
		op.processElement(input);

		input.replace(new Tuple2<>("k4", "v5"));
		op.processElement(input);

		assertEquals(1, func.getFinishCount());

		op.close();
		assertEquals(2, func.getFinishCount());
		assertThat(Arrays.asList("k3=v4", "k4=v5"), is(func.getOutputs()));
	}
}
 
Example 19
Source File: ContinuousFileProcessingTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testReaderSnapshotRestore() throws Exception {
	String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/";

	TimestampedFileInputSplit split1 =
		new TimestampedFileInputSplit(0, 3, new Path("test/test1"), 0, 100, null);

	TimestampedFileInputSplit split2 =
		new TimestampedFileInputSplit(10, 2, new Path("test/test2"), 101, 200, null);

	TimestampedFileInputSplit split3 =
		new TimestampedFileInputSplit(10, 1, new Path("test/test2"), 0, 100, null);

	TimestampedFileInputSplit split4 =
		new TimestampedFileInputSplit(11, 0, new Path("test/test3"), 0, 100, null);

	final OneShotLatch latch = new OneShotLatch();

	BlockingFileInputFormat format = new BlockingFileInputFormat(latch, new Path(testBasePath));
	TypeInformation<FileInputSplit> typeInfo = TypeExtractor.getInputFormatTypes(format);

	ContinuousFileReaderOperator<FileInputSplit> initReader = new ContinuousFileReaderOperator<>(format);
	initReader.setOutputType(typeInfo, new ExecutionConfig());

	OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, FileInputSplit> initTestInstance =
		new OneInputStreamOperatorTestHarness<>(initReader);
	initTestInstance.setTimeCharacteristic(TimeCharacteristic.EventTime);
	initTestInstance.open();

	// create some state in the reader
	initTestInstance.processElement(new StreamRecord<>(split1));
	initTestInstance.processElement(new StreamRecord<>(split2));
	initTestInstance.processElement(new StreamRecord<>(split3));
	initTestInstance.processElement(new StreamRecord<>(split4));

	// take a snapshot of the operator's state. This will be used
	// to initialize another reader and compare the results of the
	// two operators.

	final OperatorSubtaskState snapshot;
	synchronized (initTestInstance.getCheckpointLock()) {
		snapshot = initTestInstance.snapshot(0L, 0L);
	}

	ContinuousFileReaderOperator<FileInputSplit> restoredReader = new ContinuousFileReaderOperator<>(
		new BlockingFileInputFormat(latch, new Path(testBasePath)));
	restoredReader.setOutputType(typeInfo, new ExecutionConfig());

	OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, FileInputSplit> restoredTestInstance  =
		new OneInputStreamOperatorTestHarness<>(restoredReader);
	restoredTestInstance.setTimeCharacteristic(TimeCharacteristic.EventTime);

	restoredTestInstance.initializeState(snapshot);
	restoredTestInstance.open();

	// now let computation start
	latch.trigger();

	// ... and wait for the operators to close gracefully

	synchronized (initTestInstance.getCheckpointLock()) {
		initTestInstance.close();
	}

	synchronized (restoredTestInstance.getCheckpointLock()) {
		restoredTestInstance.close();
	}

	FileInputSplit fsSplit1 = createSplitFromTimestampedSplit(split1);
	FileInputSplit fsSplit2 = createSplitFromTimestampedSplit(split2);
	FileInputSplit fsSplit3 = createSplitFromTimestampedSplit(split3);
	FileInputSplit fsSplit4 = createSplitFromTimestampedSplit(split4);

	// compare if the results contain what they should contain and also if
	// they are the same, as they should.

	Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit1)));
	Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit2)));
	Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit3)));
	Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit4)));

	Assert.assertArrayEquals(
		initTestInstance.getOutput().toArray(),
		restoredTestInstance.getOutput().toArray()
	);
}
 
Example 20
Source File: KeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
@Test
public void testEventTimeTimers() throws Exception {

	final int expectedKey = 17;

	KeyedProcessOperator<Integer, Integer, Integer> operator =
			new KeyedProcessOperator<>(new TriggeringFlatMapFunction(TimeDomain.EVENT_TIME, expectedKey));

	OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processWatermark(new Watermark(0));

	testHarness.processElement(new StreamRecord<>(expectedKey, 42L));

	testHarness.processWatermark(new Watermark(5));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new Watermark(0L));
	expectedOutput.add(new StreamRecord<>(expectedKey, 42L));
	expectedOutput.add(new StreamRecord<>(1777, 5L));
	expectedOutput.add(new Watermark(5L));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}