Java Code Examples for org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#initializeState()

The following examples show how to use org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#initializeState() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkKafkaProducerITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testRecoverCommittedTransaction() throws Exception {
	String topic = "flink-kafka-producer-recover-committed-transaction";

	OneInputStreamOperatorTestHarness<Integer, Object> testHarness = createTestHarness(topic);

	testHarness.setup();
	testHarness.open(); // producerA - start transaction (txn) 0
	testHarness.processElement(42, 0); // producerA - write 42 in txn 0
	OperatorSubtaskState checkpoint0 = testHarness.snapshot(0, 1); // producerA - pre commit txn 0, producerB - start txn 1
	testHarness.processElement(43, 2); // producerB - write 43 in txn 1
	testHarness.notifyOfCompletedCheckpoint(0); // producerA - commit txn 0 and return to the pool
	testHarness.snapshot(1, 3); // producerB - pre txn 1,  producerA - start txn 2
	testHarness.processElement(44, 4); // producerA - write 44 in txn 2
	testHarness.close(); // producerA - abort txn 2

	testHarness = createTestHarness(topic);
	testHarness.initializeState(checkpoint0); // recover state 0 - producerA recover and commit txn 0
	testHarness.close();

	assertExactlyOnceForTopic(createProperties(), topic, 0, Arrays.asList(42));

	deleteTestTopic(topic);
	checkProducerLeak();
}
 
Example 2
Source File: PojoSerializerUpgradeTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private OperatorSubtaskState runOperator(
		Configuration taskConfiguration,
		ExecutionConfig executionConfig,
		OneInputStreamOperator<Long, Long> operator,
		KeySelector<Long, Long> keySelector,
		boolean isKeyedState,
		StateBackend stateBackend,
		ClassLoader classLoader,
		OperatorSubtaskState operatorSubtaskState,
		Iterable<Long> input) throws Exception {

	try (final MockEnvironment environment =
			new MockEnvironmentBuilder()
				.setTaskName("test task")
				.setMemorySize(32 * 1024)
				.setInputSplitProvider(new MockInputSplitProvider())
				.setBufferSize(256)
				.setTaskConfiguration(taskConfiguration)
				.setExecutionConfig(executionConfig)
				.setMaxParallelism(16)
				.setUserCodeClassLoader(classLoader)
				.build()) {

		OneInputStreamOperatorTestHarness<Long, Long> harness = null;
		try {
			if (isKeyedState) {
				harness = new KeyedOneInputStreamOperatorTestHarness<>(
					operator,
					keySelector,
					BasicTypeInfo.LONG_TYPE_INFO,
					environment);
			} else {
				harness = new OneInputStreamOperatorTestHarness<>(operator, LongSerializer.INSTANCE, environment);
			}

			harness.setStateBackend(stateBackend);

			harness.setup();
			harness.initializeState(operatorSubtaskState);
			harness.open();

			long timestamp = 0L;

			for (Long value : input) {
				harness.processElement(value, timestamp++);
			}

			long checkpointId = 1L;
			long checkpointTimestamp = timestamp + 1L;

			return harness.snapshot(checkpointId, checkpointTimestamp);
		} finally {
			IOUtils.closeQuietly(harness);
		}
	}
}
 
Example 3
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This checks that we can restore from a virgin {@code WindowOperator} that has never seen
 * any elements.
 */
@Test
public void testRestoreSessionWindowsWithCountTriggerInMintCondition() throws Exception {

	final int sessionSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(
			EventTimeSessionWindows.withGap(Time.seconds(sessionSize)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<String>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new SessionWindowFunction()),
			PurgingTrigger.of(CountTrigger.of(4)),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-session-with-stateful-trigger-mint-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 6000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 6500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 7000));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-10", 0L, 6500L), 6499));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());

	// add an element that merges the two "key1" sessions, they should now have count 6, and therefore fire
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 10), 4500));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-22", 10L, 10000L), 9999L));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());

	testHarness.close();
}
 
Example 4
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testReduceSessionWindows() throws Exception {
	closeCalled.set(0);

	final int sessionSize = 3;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>(
			"window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(
			EventTimeSessionWindows.withGap(Time.seconds(sessionSize)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()),
			EventTimeTrigger.create(),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
			createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
	testHarness.close();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));

	testHarness.processWatermark(new Watermark(12000));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
	expectedOutput.add(new Watermark(12000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));

	testHarness.processWatermark(new Watermark(17999));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
	expectedOutput.add(new Watermark(17999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());

	testHarness.close();
}
 
Example 5
Source File: KafkaMigrationTestBase.java    From flink with Apache License 2.0 4 votes vote down vote up
protected void initializeState(OneInputStreamOperatorTestHarness testHarness) throws Exception{
	testHarness.setup();
	testHarness.initializeState(getOperatorSnapshotPath());
	testHarness.open();
}
 
Example 6
Source File: FlinkKafkaProducerITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This tests checks whether FlinkKafkaProducer correctly aborts lingering transactions after a failure.
 * If such transactions were left alone lingering it consumers would be unable to read committed records
 * that were created after this lingering transaction.
 */
@Test
public void testFailBeforeNotifyAndResumeWorkAfterwards() throws Exception {
	String topic = "flink-kafka-producer-fail-before-notify";

	OneInputStreamOperatorTestHarness<Integer, Object> testHarness1 = createTestHarness(topic);
	checkProducerLeak();
	testHarness1.setup();
	testHarness1.open();
	testHarness1.processElement(42, 0);
	testHarness1.snapshot(0, 1);
	testHarness1.processElement(43, 2);
	OperatorSubtaskState snapshot1 = testHarness1.snapshot(1, 3);

	testHarness1.processElement(44, 4);
	testHarness1.snapshot(2, 5);
	testHarness1.processElement(45, 6);

	// do not close previous testHarness to make sure that closing do not clean up something (in case of failure
	// there might not be any close)
	OneInputStreamOperatorTestHarness<Integer, Object> testHarness2 = createTestHarness(topic);
	testHarness2.setup();
	// restore from snapshot1, transactions with records 44 and 45 should be aborted
	testHarness2.initializeState(snapshot1);
	testHarness2.open();

	// write and commit more records, after potentially lingering transactions
	testHarness2.processElement(46, 7);
	testHarness2.snapshot(4, 8);
	testHarness2.processElement(47, 9);
	testHarness2.notifyOfCompletedCheckpoint(4);

	//now we should have:
	// - records 42 and 43 in committed transactions
	// - aborted transactions with records 44 and 45
	// - committed transaction with record 46
	// - pending transaction with record 47
	assertExactlyOnceForTopic(createProperties(), topic, 0, Arrays.asList(42, 43, 46));

	try {
		testHarness1.close();
	} catch (Exception e) {
		// The only acceptable exception is ProducerFencedException because testHarness2 uses the same
		// transactional ID.
		if (!(e.getCause() instanceof ProducerFencedException)) {
			fail("Received unexpected exception " + e);
		}
	}
	testHarness2.close();
	deleteTestTopic(topic);
	checkProducerLeak();
}
 
Example 7
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This tests whether merging works correctly with the CountTrigger.
 */
@Test
public void testSessionWindowsWithCountTrigger() throws Exception {
	closeCalled.set(0);

	final int sessionSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(
			EventTimeSessionWindows.withGap(Time.seconds(sessionSize)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new SessionWindowFunction()),
			PurgingTrigger.of(CountTrigger.of(4)),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
			createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
	testHarness.close();

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-10", 0L, 6500L), 6499));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
	expectedOutput.clear();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 6000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 6500));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 7000));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());

	// add an element that merges the two "key1" sessions, they should now have count 6, and therefore fire
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 10), 4500));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-22", 10L, 10000L), 9999L));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());

	testHarness.close();
}
 
Example 8
Source File: CEPOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testCEPOperatorCleanupEventTime() throws Exception {

	Event startEvent1 = new Event(42, "start", 1.0);
	Event startEvent2 = new Event(42, "start", 2.0);
	SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
	SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
	SubEvent middleEvent3 = new SubEvent(42, "foo3", 1.0, 10.0);
	Event endEvent1 = new Event(42, "end", 1.0);
	Event endEvent2 = new Event(42, "end", 2.0);

	Event startEventK2 = new Event(43, "start", 1.0);

	CepOperator<Event, Integer, Map<String, List<Event>>> operator = getKeyedCepOperator(false);
	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);

	try {
		harness.open();

		harness.processWatermark(new Watermark(Long.MIN_VALUE));

		harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 2L));
		harness.processElement(new StreamRecord<Event>(middleEvent1, 2L));
		harness
			.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3L));
		harness.processElement(new StreamRecord<>(startEvent1, 1L));
		harness.processElement(new StreamRecord<>(startEventK2, 1L));

		// there must be 2 keys 42, 43 registered for the watermark callback
		// all the seen elements must be in the priority queues but no NFA yet.

		assertEquals(2L, harness.numEventTimeTimers());
		assertEquals(4L, operator.getPQSize(42));
		assertEquals(1L, operator.getPQSize(43));
		assertTrue(!operator.hasNonEmptySharedBuffer(42));
		assertTrue(!operator.hasNonEmptySharedBuffer(43));

		harness.processWatermark(new Watermark(2L));

		verifyWatermark(harness.getOutput().poll(), Long.MIN_VALUE);
		verifyWatermark(harness.getOutput().poll(), 2L);

		// still the 2 keys
		// one element in PQ for 42 (the barfoo) as it arrived early
		// for 43 the element entered the NFA and the PQ is empty

		assertEquals(2L, harness.numEventTimeTimers());
		assertTrue(operator.hasNonEmptySharedBuffer(42));
		assertEquals(1L, operator.getPQSize(42));
		assertTrue(operator.hasNonEmptySharedBuffer(43));
		assertTrue(!operator.hasNonEmptyPQ(43));

		harness.processElement(new StreamRecord<>(startEvent2, 4L));
		harness.processElement(new StreamRecord<Event>(middleEvent2, 5L));

		OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
		harness.close();

		CepOperator<Event, Integer, Map<String, List<Event>>> operator2 = getKeyedCepOperator(false);
		harness = CepOperatorTestUtilities.getCepTestHarness(operator2);
		harness.setup();
		harness.initializeState(snapshot);
		harness.open();

		harness.processElement(new StreamRecord<>(endEvent1, 6L));
		harness.processWatermark(11L);
		harness.processWatermark(12L);

		// now we have 1 key because the 43 expired and was removed.
		// 42 is still there due to startEvent2
		assertEquals(1L, harness.numEventTimeTimers());
		assertTrue(operator2.hasNonEmptySharedBuffer(42));
		assertTrue(!operator2.hasNonEmptyPQ(42));
		assertTrue(!operator2.hasNonEmptySharedBuffer(43));
		assertTrue(!operator2.hasNonEmptyPQ(43));

		verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
		verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent2, endEvent1);
		verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent1);
		verifyWatermark(harness.getOutput().poll(), 11L);
		verifyWatermark(harness.getOutput().poll(), 12L);

		// this is a late event, because timestamp(12) = last watermark(12)
		harness.processElement(new StreamRecord<Event>(middleEvent3, 12L));
		harness.processElement(new StreamRecord<>(endEvent2, 13L));
		harness.processWatermark(20L);
		harness.processWatermark(21L);

		assertTrue(!operator2.hasNonEmptySharedBuffer(42));
		assertTrue(!operator2.hasNonEmptyPQ(42));
		assertEquals(0L, harness.numEventTimeTimers());

		assertEquals(3, harness.getOutput().size());
		verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent2);

		verifyWatermark(harness.getOutput().poll(), 20L);
		verifyWatermark(harness.getOutput().poll(), 21L);
	} finally {
		harness.close();
	}
}
 
Example 9
Source File: WindowOperatorMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testRestoreReducingEventTimeWindows() throws Exception {
	final int windowSize = 3;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer<>(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
			EventTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-reduce-event-time-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	testHarness.processWatermark(new Watermark(2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
	expectedOutput.add(new Watermark(2999));

	testHarness.processWatermark(new Watermark(3999));
	expectedOutput.add(new Watermark(3999));

	testHarness.processWatermark(new Watermark(4999));
	expectedOutput.add(new Watermark(4999));

	testHarness.processWatermark(new Watermark(5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 5999));
	expectedOutput.add(new Watermark(5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());
	testHarness.close();
}
 
Example 10
Source File: RowTimeSortOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testSortOnTwoFields() throws Exception {
	BaseRowTypeInfo inputRowType = new BaseRowTypeInfo(
			new IntType(),
			new BigIntType(),
			new VarCharType(VarCharType.MAX_LENGTH),
			new IntType());

	// Note: RowTimeIdx must be 0 in product environment, the value is 1 here just for simplify the testing
	int rowTimeIdx = 1;
	GeneratedRecordComparator gComparator = new GeneratedRecordComparator("", "", new Object[0]) {

		private static final long serialVersionUID = -6067266199060901331L;

		@Override
		public RecordComparator newInstance(ClassLoader classLoader) {

			return IntRecordComparator.INSTANCE;
		}
	};

	BaseRowHarnessAssertor assertor = new BaseRowHarnessAssertor(inputRowType.getFieldTypes());

	RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, gComparator);
	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(operator);
	testHarness.open();
	testHarness.processElement(record(3, 3L, "Hello world", 3));
	testHarness.processElement(record(2, 2L, "Hello", 2));
	testHarness.processElement(record(6, 2L, "Luke Skywalker", 6));
	testHarness.processElement(record(5, 3L, "I am fine.", 5));
	testHarness.processElement(record(7, 1L, "Comment#1", 7));
	testHarness.processElement(record(9, 4L, "Comment#3", 9));
	testHarness.processElement(record(10, 4L, "Comment#4", 10));
	testHarness.processElement(record(8, 4L, "Comment#2", 8));
	testHarness.processElement(record(1, 1L, "Hi", 2));
	testHarness.processElement(record(1, 1L, "Hi", 1));
	testHarness.processElement(record(4, 3L, "Helloworld, how are you?", 4));
	testHarness.processElement(record(4, 5L, "Hello, how are you?", 4));
	testHarness.processWatermark(new Watermark(4L));

	List<Object> expectedOutput = new ArrayList<>();
	expectedOutput.add(record(1, 1L, "Hi", 2));
	expectedOutput.add(record(1, 1L, "Hi", 1));
	expectedOutput.add(record(7, 1L, "Comment#1", 7));
	expectedOutput.add(record(2, 2L, "Hello", 2));
	expectedOutput.add(record(6, 2L, "Luke Skywalker", 6));
	expectedOutput.add(record(3, 3L, "Hello world", 3));
	expectedOutput.add(record(4, 3L, "Helloworld, how are you?", 4));
	expectedOutput.add(record(5, 3L, "I am fine.", 5));
	expectedOutput.add(record(8, 4L, "Comment#2", 8));
	expectedOutput.add(record(9, 4L, "Comment#3", 9));
	expectedOutput.add(record(10, 4L, "Comment#4", 10));
	expectedOutput.add(new Watermark(4L));

	// do a snapshot, data could be recovered from state
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
	testHarness.close();

	expectedOutput.clear();

	operator = createSortOperator(inputRowType, rowTimeIdx, gComparator);
	testHarness = createTestHarness(operator);
	testHarness.initializeState(snapshot);
	testHarness.open();
	// late data will be dropped
	testHarness.processElement(record(5, 3L, "I am fine.", 6));
	testHarness.processWatermark(new Watermark(5L));

	expectedOutput.add(record(4, 5L, "Hello, how are you?", 4));
	expectedOutput.add(new Watermark(5L));

	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());

	// those watermark has no effect
	testHarness.processWatermark(new Watermark(11L));
	testHarness.processWatermark(new Watermark(12L));
	expectedOutput.add(new Watermark(11L));
	expectedOutput.add(new Watermark(12L));

	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
}
 
Example 11
Source File: BucketingSinkTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testScalingUp() throws Exception {
	final File outDir = tempFolder.newFolder();

	OneInputStreamOperatorTestHarness<String, Object> testHarness1 = createRescalingTestSink(outDir, 2, 0, 100);
	testHarness1.setup();
	testHarness1.open();

	OneInputStreamOperatorTestHarness<String, Object> testHarness2 = createRescalingTestSink(outDir, 2, 0, 100);
	testHarness2.setup();
	testHarness2.open();

	testHarness1.processElement(new StreamRecord<>("test1", 1L));
	testHarness1.processElement(new StreamRecord<>("test2", 1L));

	checkLocalFs(outDir, 2, 0, 0, 0);

	testHarness2.processElement(new StreamRecord<>("test3", 1L));
	testHarness2.processElement(new StreamRecord<>("test4", 1L));
	testHarness2.processElement(new StreamRecord<>("test5", 1L));

	checkLocalFs(outDir, 5, 0, 0, 0);

	// intentionally we snapshot them in the reverse order so that the states are shuffled
	OperatorSubtaskState mergedSnapshot = AbstractStreamOperatorTestHarness.repackageState(
		testHarness2.snapshot(0, 0),
		testHarness1.snapshot(0, 0)
	);

	OperatorSubtaskState initState1 = AbstractStreamOperatorTestHarness.repartitionOperatorState(
		mergedSnapshot, maxParallelism, 2, 3, 0);

	testHarness1 = createRescalingTestSink(outDir, 3, 0, 100);
	testHarness1.setup();
	testHarness1.initializeState(initState1);
	testHarness1.open();

	checkLocalFs(outDir, 2, 0, 3, 3);

	OperatorSubtaskState initState2 = AbstractStreamOperatorTestHarness.repartitionOperatorState(
		mergedSnapshot, maxParallelism, 2, 3, 1);

	testHarness2 = createRescalingTestSink(outDir, 3, 1, 100);
	testHarness2.setup();
	testHarness2.initializeState(initState2);
	testHarness2.open();

	checkLocalFs(outDir, 0, 0, 5, 5);

	OperatorSubtaskState initState3 = AbstractStreamOperatorTestHarness.repartitionOperatorState(
		mergedSnapshot, maxParallelism, 2, 3, 2);

	OneInputStreamOperatorTestHarness<String, Object> testHarness3 = createRescalingTestSink(outDir, 3, 2, 100);
	testHarness3.setup();
	testHarness3.initializeState(initState3);
	testHarness3.open();

	checkLocalFs(outDir, 0, 0, 5, 5);

	testHarness1.processElement(new StreamRecord<>("test6", 0));
	testHarness2.processElement(new StreamRecord<>("test6", 0));
	testHarness3.processElement(new StreamRecord<>("test6", 0));

	checkLocalFs(outDir, 3, 0, 5, 5);

	testHarness1.snapshot(1, 0);
	testHarness2.snapshot(1, 0);
	testHarness3.snapshot(1, 0);

	testHarness1.close();
	testHarness2.close();
	testHarness3.close();

	checkLocalFs(outDir, 0, 3, 5, 5);
}
 
Example 12
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This tests a custom Session window assigner that assigns some elements to "point windows",
 * windows that have the same timestamp for start and end.
 *
 * <p>In this test, elements that have 33 as the second tuple field will be put into a point
 * window.
 */
@Test
@SuppressWarnings("unchecked")
public void testPointSessions() throws Exception {
	closeCalled.set(0);

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.assigner(new PointSessionWindowAssigner(3000))
			.withEventTime(2)
			.aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(insertRecord("key2", 1, 0L));
	testHarness.processElement(insertRecord("key2", 33, 1000L));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
	testHarness.close();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processElement(insertRecord("key2", 33, 2500L));

	testHarness.processElement(insertRecord("key1", 1, 10L));
	testHarness.processElement(insertRecord("key1", 2, 1000L));
	testHarness.processElement(insertRecord("key1", 33, 2500L));

	testHarness.processWatermark(new Watermark(12000));

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 36L, 3L, 10L, 4000L, 3999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 67L, 3L, 0L, 3000L, 2999L)));
	expectedOutput.add(new Watermark(12000));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();

	// we close once in the rest...
	assertEquals("Close was not called.", 2, closeCalled.get());
}
 
Example 13
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private void testTumblingEventTimeWindows(OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator) throws Exception {
	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));

	testHarness.processWatermark(new Watermark(999));
	expectedOutput.add(new Watermark(999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.processWatermark(new Watermark(1999));
	expectedOutput.add(new Watermark(1999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
	testHarness.close();

	testHarness = createTestHarness(operator);
	expectedOutput.clear();
	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processWatermark(new Watermark(2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
	expectedOutput.add(new Watermark(2999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.processWatermark(new Watermark(3999));
	expectedOutput.add(new Watermark(3999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.processWatermark(new Watermark(4999));
	expectedOutput.add(new Watermark(4999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.processWatermark(new Watermark(5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 5999));
	expectedOutput.add(new Watermark(5999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	// those don't have any effect...
	testHarness.processWatermark(new Watermark(6999));
	testHarness.processWatermark(new Watermark(7999));
	expectedOutput.add(new Watermark(6999));
	expectedOutput.add(new Watermark(7999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.close();
}
 
Example 14
Source File: WindowOperatorMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testRestoreApplyProcessingTimeWindows() throws Exception {
	final int windowSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>()),
			ProcessingTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-apply-processing-time-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	testHarness.setProcessingTime(3020);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3)));

	testHarness.setProcessingTime(6000);

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), 5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key3", 1), 5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());
	testHarness.close();
}
 
Example 15
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testEventTimeSessionWindows() throws Exception {
	closeCalled.set(0);

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.session(Duration.ofSeconds(3))
			.withEventTime(2)
			.aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(insertRecord("key2", 1, 0L));
	testHarness.processElement(insertRecord("key2", 2, 1000L));
	testHarness.processElement(insertRecord("key2", 3, 2500L));

	testHarness.processElement(insertRecord("key1", 1, 10L));
	testHarness.processElement(insertRecord("key1", 2, 1000L));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshotV2 = testHarness.snapshot(0L, 0);
	testHarness.close();
	expectedOutput.clear();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshotV2);
	testHarness.open();

	assertEquals(0L, operator.getWatermarkLatency().getValue());

	testHarness.processElement(insertRecord("key1", 3, 2500L));

	testHarness.processElement(insertRecord("key2", 4, 5501L));
	testHarness.processElement(insertRecord("key2", 5, 6000L));
	testHarness.processElement(insertRecord("key2", 5, 6000L));
	testHarness.processElement(insertRecord("key2", 6, 6050L));

	testHarness.processWatermark(new Watermark(12000));

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 6L, 3L, 10L, 5500L, 5499L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 6L, 3L, 0L, 5500L, 5499L)));

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 20L, 4L, 5501L, 9050L, 9049L)));
	expectedOutput.add(new Watermark(12000));

	// add a late data
	testHarness.processElement(insertRecord("key1", 3, 4000L));
	testHarness.processElement(insertRecord("key2", 10, 15000L));
	testHarness.processElement(insertRecord("key2", 20, 15000L));

	testHarness.processWatermark(new Watermark(17999));

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 30L, 2L, 15000L, 18000L, 17999L)));
	expectedOutput.add(new Watermark(17999));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.setProcessingTime(18000);
	assertEquals(1L, operator.getWatermarkLatency().getValue());

	testHarness.close();

	// we close once in the rest...
	assertEquals("Close was not called.", 2, closeCalled.get());
	assertEquals(1, operator.getNumLateRecordsDropped().getCount());
}
 
Example 16
Source File: ContinuousFileProcessingRescalingTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testReaderScalingDown() throws Exception {
	// simulates the scenario of scaling down from 2 to 1 instances

	final OneShotLatch waitingLatch = new OneShotLatch();

	// create the first instance and let it process the first split till element 5
	final OneShotLatch triggerLatch1 = new OneShotLatch();
	BlockingFileInputFormat format1 = new BlockingFileInputFormat(
		triggerLatch1, waitingLatch, new Path("test"), 20, 5);
	FileInputSplit[] splits = format1.createInputSplits(2);

	OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, String> testHarness1 = getTestHarness(format1, 2, 0);
	testHarness1.open();
	testHarness1.processElement(new StreamRecord<>(getTimestampedSplit(0, splits[0])));

	// wait until its arrives to element 5
	if (!triggerLatch1.isTriggered()) {
		triggerLatch1.await();
	}

	// create the second instance and let it process the second split till element 15
	final OneShotLatch triggerLatch2 = new OneShotLatch();
	BlockingFileInputFormat format2 = new BlockingFileInputFormat(
		triggerLatch2, waitingLatch, new Path("test"), 20, 15);

	OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, String> testHarness2 = getTestHarness(format2, 2, 1);
	testHarness2.open();
	testHarness2.processElement(new StreamRecord<>(getTimestampedSplit(0, splits[1])));

	// wait until its arrives to element 15
	if (!triggerLatch2.isTriggered()) {
		triggerLatch2.await();
	}

	// 1) clear the outputs of the two previous instances so that
	// we can compare their newly produced outputs with the merged one
	testHarness1.getOutput().clear();
	testHarness2.getOutput().clear();

	// 2) take the snapshots from the previous instances and merge them
	// into a new one which will be then used to initialize a third instance
	OperatorSubtaskState mergedState = AbstractStreamOperatorTestHarness.
		repackageState(
			testHarness2.snapshot(0, 0),
			testHarness1.snapshot(0, 0)
		);

	// 3) and repartition to get the initialized state when scaling down.
	OperatorSubtaskState initState =
		AbstractStreamOperatorTestHarness.repartitionOperatorState(mergedState, maxParallelism, 2, 1, 0);

	// create the third instance
	final OneShotLatch wLatch = new OneShotLatch();
	final OneShotLatch tLatch = new OneShotLatch();

	BlockingFileInputFormat format = new BlockingFileInputFormat(wLatch, tLatch, new Path("test"), 20, 5);
	OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, String> testHarness = getTestHarness(format, 1, 0);

	// initialize the state of the new operator with the constructed by
	// combining the partial states of the instances above.
	testHarness.initializeState(initState);
	testHarness.open();

	// now restart the waiting operators
	wLatch.trigger();
	tLatch.trigger();
	waitingLatch.trigger();

	// and wait for the processing to finish
	synchronized (testHarness1.getCheckpointLock()) {
		testHarness1.close();
	}
	synchronized (testHarness2.getCheckpointLock()) {
		testHarness2.close();
	}
	synchronized (testHarness.getCheckpointLock()) {
		testHarness.close();
	}

	Queue<Object> expectedResult = new ArrayDeque<>();
	putElementsInQ(expectedResult, testHarness1.getOutput());
	putElementsInQ(expectedResult, testHarness2.getOutput());

	Queue<Object> actualResult = new ArrayDeque<>();
	putElementsInQ(actualResult, testHarness.getOutput());

	Assert.assertEquals(20, actualResult.size());
	Assert.assertArrayEquals(expectedResult.toArray(), actualResult.toArray());
}
 
Example 17
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testCountTrigger() throws Exception {
	closeCalled.set(0);

	final int windowSize = 4;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new WindowOperator<>(
			GlobalWindows.create(),
			new GlobalWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>()),
			PurgingTrigger.of(CountTrigger.of(windowSize)),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// The global window actually ignores these timestamps...

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);

	testHarness.close();

	ConcurrentLinkedQueue<Object> outputBeforeClose = testHarness.getOutput();

	stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	operator = new WindowOperator<>(
			GlobalWindows.create(),
			new GlobalWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>()),
			PurgingTrigger.of(CountTrigger.of(windowSize)),
			0,
			null /* late data output tag */);

	testHarness = createTestHarness(operator);

	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, Iterables.concat(outputBeforeClose, testHarness.getOutput()), new Tuple2ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, Iterables.concat(outputBeforeClose, testHarness.getOutput()), new Tuple2ResultSortComparator());

	testHarness.close();
}
 
Example 18
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testRestoreKryoSerializedKeysWindows() throws Exception {
	final int windowSize = 3;

	TypeInformation<Tuple2<NonPojoType, Integer>> inputType = new TypeHint<Tuple2<NonPojoType, Integer>>() {}.getTypeInfo();

	ReducingStateDescriptor<Tuple2<NonPojoType, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
		new SumReducer<>(),
		inputType.createSerializer(new ExecutionConfig()));

	TypeSerializer<NonPojoType> keySerializer = TypeInformation.of(NonPojoType.class).createSerializer(new ExecutionConfig());
	assertTrue(keySerializer instanceof KryoSerializer);

	WindowOperator<NonPojoType, Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>, TimeWindow> operator = new WindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector<>(),
		keySerializer,
		stateDesc,
		new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<>()),
		EventTimeTrigger.create(),
		0,
		null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), TypeInformation.of(NonPojoType.class));

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-kryo-serialized-key-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	testHarness.processWatermark(new Watermark(2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key1"), 3), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key2"), 3), 2999));
	expectedOutput.add(new Watermark(2999));

	testHarness.processWatermark(new Watermark(3999));
	expectedOutput.add(new Watermark(3999));

	testHarness.processWatermark(new Watermark(4999));
	expectedOutput.add(new Watermark(4999));

	testHarness.processWatermark(new Watermark(5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key2"), 2), 5999));
	expectedOutput.add(new Watermark(5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());
	testHarness.close();
}
 
Example 19
Source File: WrappingFunctionSnapshotRestoreTest.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
@Test
public void testSnapshotAndRestoreWrappedCheckpointedFunction() throws Exception {

	StreamMap<Integer, Integer> operator = new StreamMap<>(
			new WrappingTestFun(new WrappingTestFun(new InnerTestFun())));

	OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new OneInputStreamOperatorTestHarness<>(operator);

	testHarness.setup();
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(5, 12L));

	// snapshot and restore from scratch
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);

	testHarness.close();

	InnerTestFun innerTestFun = new InnerTestFun();
	operator = new StreamMap<>(new WrappingTestFun(new WrappingTestFun(innerTestFun)));

	testHarness = new OneInputStreamOperatorTestHarness<>(operator);

	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	Assert.assertTrue(innerTestFun.wasRestored);
	testHarness.close();
}
 
Example 20
Source File: LegacyKeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
@Test
public void testSnapshotAndRestore() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(5, 12L));

	// snapshot and restore from scratch
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);

	testHarness.close();

	operator = new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction());

	testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.setProcessingTime(5);
	testHarness.processWatermark(new Watermark(6));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>("PROC:1777"));
	expectedOutput.add(new StreamRecord<>("EVENT:1777", 6L));
	expectedOutput.add(new Watermark(6));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}