Java Code Examples for org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#processWatermark()

The following examples show how to use org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#processWatermark() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamMapTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testMap() throws Exception {
	StreamMap<Integer, String> operator = new StreamMap<Integer, String>(new Map());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<Integer, String>(operator);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<Integer>(1, initialTime + 1));
	testHarness.processElement(new StreamRecord<Integer>(2, initialTime + 2));
	testHarness.processWatermark(new Watermark(initialTime + 2));
	testHarness.processElement(new StreamRecord<Integer>(3, initialTime + 3));

	expectedOutput.add(new StreamRecord<String>("+2", initialTime + 1));
	expectedOutput.add(new StreamRecord<String>("+3", initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<String>("+4", initialTime + 3));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example 2
Source File: StreamProjectTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testProject() throws Exception {

	TypeInformation<Tuple5<Integer, String, Integer, String, Integer>> inType = TypeExtractor
			.getForObject(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4));

	int[] fields = new int[]{4, 4, 3};

	TupleSerializer<Tuple3<Integer, Integer, String>> serializer =
			new TupleTypeInfo<Tuple3<Integer, Integer, String>>(StreamProjection.extractFieldTypes(fields, inType))
					.createSerializer(new ExecutionConfig());
	@SuppressWarnings("unchecked")
	StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> operator =
			new StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>(
					fields, serializer);

	OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> testHarness = new OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>(operator);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4), initialTime + 1));
	testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "s", 3, "c", 2), initialTime + 2));
	testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "c", 2), initialTime + 3));
	testHarness.processWatermark(new Watermark(initialTime + 2));
	testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "a", 7), initialTime + 4));

	expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(4, 4, "b"), initialTime + 1));
	expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 2));
	expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 3));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(7, 7, "a"), initialTime + 4));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example 3
Source File: TimestampsAndWatermarksOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void longMaxInputWatermarkIsForwarded() throws Exception {
	OneInputStreamOperatorTestHarness<Long, Long> testHarness = createTestHarness(
			WatermarkStrategy
					.forGenerator((ctx) -> new PeriodicWatermarkGenerator())
					.withTimestampAssigner((ctx) -> new LongExtractor()));

	testHarness.processWatermark(createLegacyWatermark(Long.MAX_VALUE));

	assertThat(pollNextLegacyWatermark(testHarness), is(legacyWatermark(Long.MAX_VALUE)));
}
 
Example 4
Source File: CEPMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception {

	KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
		private static final long serialVersionUID = -4873366487571254798L;

		@Override
		public Integer getKey(Event value) throws Exception {
			return value.getId();
		}
	};

	final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);

	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
		new KeyedOneInputStreamOperatorTestHarness<>(
			getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
			keySelector,
			BasicTypeInfo.INT_TYPE_INFO);

	try {
		harness.setup();
		harness.open();
		harness.processElement(new StreamRecord<>(startEvent1, 5));
		harness.processWatermark(new Watermark(6));

		// do snapshot and save to file
		OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
		OperatorSnapshotUtil.writeStateHandle(snapshot,
			"src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot");
	} finally {
		harness.close();
	}
}
 
Example 5
Source File: CEPMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception {

	KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
		private static final long serialVersionUID = -4873366487571254798L;

		@Override
		public Integer getKey(Event value) throws Exception {
			return value.getId();
		}
	};

	final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);

	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
		new KeyedOneInputStreamOperatorTestHarness<>(
			getKeyedCepOpearator(false, new NFAComplexConditionsFactory()),
			keySelector,
			BasicTypeInfo.INT_TYPE_INFO);

	try {
		harness.setup();
		harness.open();
		harness.processElement(new StreamRecord<>(startEvent1, 5));
		harness.processWatermark(new Watermark(6));

		// do snapshot and save to file
		OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
		OperatorSnapshotUtil.writeStateHandle(snapshot,
			"src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot");
	} finally {
		harness.close();
	}
}
 
Example 6
Source File: CEPMigrationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeSinglePatternAfterMigrationSnapshot() throws Exception {

	KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
		private static final long serialVersionUID = -4873366487571254798L;

		@Override
		public Integer getKey(Event value) throws Exception {
			return value.getId();
		}
	};

	final Event startEvent1 = new Event(42, "start", 1.0);

	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness =
			new KeyedOneInputStreamOperatorTestHarness<>(
					getKeyedCepOpearator(false, new SinglePatternNFAFactory()),
					keySelector,
					BasicTypeInfo.INT_TYPE_INFO);

	try {
		harness.setup();
		harness.open();
		harness.processWatermark(new Watermark(5));

		// do snapshot and save to file
		OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
		OperatorSnapshotUtil.writeStateHandle(snapshot,
			"src/test/resources/cep-migration-single-pattern-afterwards-flink" + flinkGenerateSavepointVersion + "-snapshot");
	} finally {
		harness.close();
	}
}
 
Example 7
Source File: StreamFilterTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testFilter() throws Exception {
	StreamFilter<Integer> operator = new StreamFilter<Integer>(new MyFilter());

	OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<Integer, Integer>(operator);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<Integer>(1, initialTime + 1));
	testHarness.processElement(new StreamRecord<Integer>(2, initialTime + 2));
	testHarness.processWatermark(new Watermark(initialTime + 2));
	testHarness.processElement(new StreamRecord<Integer>(3, initialTime + 3));
	testHarness.processElement(new StreamRecord<Integer>(4, initialTime + 4));
	testHarness.processElement(new StreamRecord<Integer>(5, initialTime + 5));
	testHarness.processElement(new StreamRecord<Integer>(6, initialTime + 6));
	testHarness.processElement(new StreamRecord<Integer>(7, initialTime + 7));

	expectedOutput.add(new StreamRecord<Integer>(2, initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<Integer>(4, initialTime + 4));
	expectedOutput.add(new StreamRecord<Integer>(6, initialTime + 6));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example 8
Source File: LegacyKeyedProcessOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTimestampAndWatermarkQuerying() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new QueryingFlatMapFunction(TimeDomain.EVENT_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processWatermark(new Watermark(17));
	testHarness.processElement(new StreamRecord<>(5, 12L));

	testHarness.processWatermark(new Watermark(42));
	testHarness.processElement(new StreamRecord<>(6, 13L));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new Watermark(17L));
	expectedOutput.add(new StreamRecord<>("5TIME:17 TS:12", 12L));
	expectedOutput.add(new Watermark(42L));
	expectedOutput.add(new StreamRecord<>("6TIME:42 TS:13", 13L));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 9
Source File: AsyncWaitOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private void testEventTime(AsyncDataStream.OutputMode mode) throws Exception {
	final AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(
		new MyAsyncFunction(),
		TIMEOUT,
		2,
		mode);

	final OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new OneInputStreamOperatorTestHarness<>(operator, IntSerializer.INSTANCE);

	final long initialTime = 0L;
	final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	synchronized (testHarness.getCheckpointLock()) {
		testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
		testHarness.processElement(new StreamRecord<>(2, initialTime + 2));
		testHarness.processWatermark(new Watermark(initialTime + 2));
		testHarness.processElement(new StreamRecord<>(3, initialTime + 3));
	}

	// wait until all async collectors in the buffer have been emitted out.
	synchronized (testHarness.getCheckpointLock()) {
		testHarness.close();
	}

	expectedOutput.add(new StreamRecord<>(2, initialTime + 1));
	expectedOutput.add(new StreamRecord<>(4, initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<>(6, initialTime + 3));

	if (AsyncDataStream.OutputMode.ORDERED == mode) {
		TestHarnessUtil.assertOutputEquals("Output with watermark was not correct.", expectedOutput, testHarness.getOutput());
	}
	else {
		Object[] jobOutputQueue = testHarness.getOutput().toArray();

		Assert.assertEquals("Watermark should be at index 2", new Watermark(initialTime + 2), jobOutputQueue[2]);
		Assert.assertEquals("StreamRecord 3 should be at the end", new StreamRecord<>(6, initialTime + 3), jobOutputQueue[3]);

		TestHarnessUtil.assertOutputEqualsSorted(
				"Output for StreamRecords does not match",
				expectedOutput,
				testHarness.getOutput(),
				new StreamRecordComparator());
	}
}
 
Example 10
Source File: RowTimeSortOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testOnlySortOnRowTime() throws Exception {
	RowDataTypeInfo inputRowType = new RowDataTypeInfo(
			new BigIntType(),
			new BigIntType(),
			new VarCharType(VarCharType.MAX_LENGTH),
			new IntType());
	int rowTimeIdx = 0;
	RowDataHarnessAssertor assertor = new RowDataHarnessAssertor(inputRowType.getFieldTypes());
	RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, null);
	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
	testHarness.open();
	testHarness.processElement(insertRecord(3L, 2L, "Hello world", 3));
	testHarness.processElement(insertRecord(2L, 2L, "Hello", 2));
	testHarness.processElement(insertRecord(6L, 3L, "Luke Skywalker", 6));
	testHarness.processElement(insertRecord(5L, 3L, "I am fine.", 5));
	testHarness.processElement(insertRecord(7L, 4L, "Comment#1", 7));
	testHarness.processElement(insertRecord(9L, 4L, "Comment#3", 9));
	testHarness.processElement(insertRecord(10L, 4L, "Comment#4", 10));
	testHarness.processElement(insertRecord(8L, 4L, "Comment#2", 8));
	testHarness.processElement(insertRecord(1L, 1L, "Hi", 2));
	testHarness.processElement(insertRecord(1L, 1L, "Hi", 1));
	testHarness.processElement(insertRecord(4L, 3L, "Helloworld, how are you?", 4));
	testHarness.processWatermark(new Watermark(9L));

	List<Object> expectedOutput = new ArrayList<>();
	expectedOutput.add(insertRecord(1L, 1L, "Hi", 2));
	expectedOutput.add(insertRecord(1L, 1L, "Hi", 1));
	expectedOutput.add(insertRecord(2L, 2L, "Hello", 2));
	expectedOutput.add(insertRecord(3L, 2L, "Hello world", 3));
	expectedOutput.add(insertRecord(4L, 3L, "Helloworld, how are you?", 4));
	expectedOutput.add(insertRecord(5L, 3L, "I am fine.", 5));
	expectedOutput.add(insertRecord(6L, 3L, "Luke Skywalker", 6));
	expectedOutput.add(insertRecord(7L, 4L, "Comment#1", 7));
	expectedOutput.add(insertRecord(8L, 4L, "Comment#2", 8));
	expectedOutput.add(insertRecord(9L, 4L, "Comment#3", 9));
	expectedOutput.add(new Watermark(9L));

	// do a snapshot, data could be recovered from state
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
	testHarness.close();

	expectedOutput.clear();

	operator = createSortOperator(inputRowType, rowTimeIdx, null);
	testHarness = createTestHarness(operator);
	testHarness.initializeState(snapshot);
	testHarness.open();
	// late data will be dropped
	testHarness.processElement(insertRecord(5L, 3L, "I am fine.", 6));
	testHarness.processWatermark(new Watermark(10L));

	expectedOutput.add(insertRecord(10L, 4L, "Comment#4", 10));
	expectedOutput.add(new Watermark(10L));

	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());

	// those watermark has no effect
	testHarness.processWatermark(new Watermark(11L));
	testHarness.processWatermark(new Watermark(12L));
	expectedOutput.add(new Watermark(11L));
	expectedOutput.add(new Watermark(12L));

	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
}
 
Example 11
Source File: WindowOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testLateness() throws Exception {
	final int windowSize = 2;
	final long lateness = 500;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
		new SumReducer(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator =
		new WindowOperator<>(
			TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
			PurgingTrigger.of(EventTimeTrigger.create()),
			lateness,
			lateOutputTag);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		createTestHarness(operator);

	testHarness.open();

	ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
	ConcurrentLinkedQueue<Object> lateExpected = new ConcurrentLinkedQueue<>();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 500));
	testHarness.processWatermark(new Watermark(1500));

	expected.add(new Watermark(1500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1300));
	testHarness.processWatermark(new Watermark(2300));

	expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 1999));
	expected.add(new Watermark(2300));

	// this will not be sideoutput because window.maxTimestamp() + allowedLateness > currentWatermark
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1997));
	testHarness.processWatermark(new Watermark(6000));

	// this is 1 and not 3 because the trigger fires and purges
	expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
	expected.add(new Watermark(6000));

	// this will be side output because window.maxTimestamp() + allowedLateness < currentWatermark
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
	testHarness.processWatermark(new Watermark(7000));

	lateExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
	expected.add(new Watermark(7000));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());

	TestHarnessUtil.assertOutputEqualsSorted(
			"SideOutput was not correct.",
			lateExpected,
			(Iterable) testHarness.getSideOutput(lateOutputTag),
			new Tuple2ResultSortComparator());

	testHarness.close();
}
 
Example 12
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testTumblingCountWindow() throws Exception {
	closeCalled.set(0);
	final int windowSize = 3;
	LogicalType[] windowTypes = new LogicalType[] { new BigIntType() };

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.countWindow(windowSize)
			.aggregateAndBuild(getCountWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(record("key2", 1, 0L));
	testHarness.processElement(record("key2", 2, 1000L));
	testHarness.processElement(record("key2", 3, 2500L));
	testHarness.processElement(record("key1", 1, 10L));
	testHarness.processElement(record("key1", 2, 1000L));

	testHarness.processWatermark(new Watermark(12000));
	testHarness.setProcessingTime(12000L);
	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 6L, 3L, 0L)));
	expectedOutput.add(new Watermark(12000));
	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshotV2 = testHarness.snapshot(0L, 0);
	testHarness.close();
	expectedOutput.clear();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshotV2);
	testHarness.open();

	testHarness.processElement(record("key1", 2, 2500L));
	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key1", 5L, 3L, 0L)));
	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(record("key2", 4, 5501L));
	testHarness.processElement(record("key2", 5, 6000L));
	testHarness.processElement(record("key2", 5, 6000L));
	testHarness.processElement(record("key2", 6, 6050L));

	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 14L, 3L, 1L)));
	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(record("key1", 3, 4000L));
	testHarness.processElement(record("key2", 10, 15000L));
	testHarness.processElement(record("key2", 20, 15000L));
	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 36L, 3L, 2L)));
	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(record("key1", 2, 2500L));
	testHarness.processElement(record("key1", 2, 2500L));
	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key1", 7L, 3L, 1L)));
	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();

	// we close once in the rest...
	assertEquals("Close was not called.", 2, closeCalled.get());
}
 
Example 13
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeApplyEventTimeWindowsSnapshot() throws Exception {
	final int windowSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>()),
			EventTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));

	testHarness.processWatermark(new Watermark(999));
	expectedOutput.add(new Watermark(999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());

	testHarness.processWatermark(new Watermark(1999));
	expectedOutput.add(new Watermark(1999));
	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());

	// do snapshot and save to file
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
	OperatorSnapshotUtil.writeStateHandle(
		snapshot,
		"src/test/resources/win-op-migration-test-apply-event-time-flink" + flinkGenerateSavepointVersion + "-snapshot");

	testHarness.close();
}
 
Example 14
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This tests a custom Session window assigner that assigns some elements to "point windows",
 * windows that have the same timestamp for start and end.
 *
 * <p>In this test, elements that have 33 as the second tuple field will be put into a point
 * window.
 */
@Test
@SuppressWarnings("unchecked")
public void testPointSessions() throws Exception {
	closeCalled.set(0);

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.assigner(new PointSessionWindowAssigner(3000))
			.withEventTime(2)
			.aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(record("key2", 1, 0L));
	testHarness.processElement(record("key2", 33, 1000L));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
	testHarness.close();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processElement(record("key2", 33, 2500L));

	testHarness.processElement(record("key1", 1, 10L));
	testHarness.processElement(record("key1", 2, 1000L));
	testHarness.processElement(record("key1", 33, 2500L));

	testHarness.processWatermark(new Watermark(12000));

	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key1", 36L, 3L, 10L, 4000L, 3999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 67L, 3L, 0L, 3000L, 2999L)));
	expectedOutput.add(new Watermark(12000));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();

	// we close once in the rest...
	assertEquals("Close was not called.", 2, closeCalled.get());
}
 
Example 15
Source File: WindowOperatorMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testRestoreKryoSerializedKeysWindows() throws Exception {
	final int windowSize = 3;

	TypeInformation<Tuple2<NonPojoType, Integer>> inputType = new TypeHint<Tuple2<NonPojoType, Integer>>() {}.getTypeInfo();

	ReducingStateDescriptor<Tuple2<NonPojoType, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
		new SumReducer<>(),
		inputType.createSerializer(new ExecutionConfig()));

	TypeSerializer<NonPojoType> keySerializer = TypeInformation.of(NonPojoType.class).createSerializer(new ExecutionConfig());
	assertTrue(keySerializer instanceof KryoSerializer);

	WindowOperator<NonPojoType, Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>, TimeWindow> operator = new WindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector<>(),
		keySerializer,
		stateDesc,
		new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<>()),
		EventTimeTrigger.create(),
		0,
		null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), TypeInformation.of(NonPojoType.class));

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-kryo-serialized-key-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	testHarness.processWatermark(new Watermark(2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key1"), 3), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key2"), 3), 2999));
	expectedOutput.add(new Watermark(2999));

	testHarness.processWatermark(new Watermark(3999));
	expectedOutput.add(new Watermark(3999));

	testHarness.processWatermark(new Watermark(4999));
	expectedOutput.add(new Watermark(4999));

	testHarness.processWatermark(new Watermark(5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key2"), 2), 5999));
	expectedOutput.add(new Watermark(5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());
	testHarness.close();
}
 
Example 16
Source File: MiniBatchedWatermarkAssignerOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testMiniBatchedWatermarkAssignerOperator() throws Exception {
	final MiniBatchedWatermarkAssignerOperator operator = new MiniBatchedWatermarkAssignerOperator(0, 1, 0, -1, 50);

	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness =
			new OneInputStreamOperatorTestHarness<>(operator);

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(GenericRow.of(1L)));
	testHarness.processElement(new StreamRecord<>(GenericRow.of(2L)));
	testHarness.processWatermark(new Watermark(2)); // this watermark should be ignored
	testHarness.processElement(new StreamRecord<>(GenericRow.of(3L)));
	testHarness.processElement(new StreamRecord<>(GenericRow.of(4L)));
	// this watermark excess expected watermark, should emit a watermark of 49
	testHarness.processElement(new StreamRecord<>(GenericRow.of(50L)));

	ConcurrentLinkedQueue<Object> output = testHarness.getOutput();
	List<Watermark> watermarks = extractWatermarks(output);
	assertEquals(1, watermarks.size());
	assertEquals(new Watermark(49), watermarks.get(0));
	output.clear();

	testHarness.setProcessingTime(1001);
	output = testHarness.getOutput();
	watermarks = extractWatermarks(output);
	assertTrue(watermarks.isEmpty());
	output.clear();

	testHarness.processElement(new StreamRecord<>(GenericRow.of(99L)));
	output = testHarness.getOutput();
	watermarks = extractWatermarks(output);
	assertTrue(watermarks.isEmpty());
	output.clear();

	testHarness.processElement(new StreamRecord<>(GenericRow.of(100L)));
	output = testHarness.getOutput();
	watermarks = extractWatermarks(output);
	assertEquals(1, watermarks.size());
	assertEquals(new Watermark(99), watermarks.get(0));
	output.clear();
}
 
Example 17
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testSessionWindows() throws Exception {
	closeCalled.set(0);

	final int sessionSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(
			EventTimeSessionWindows.withGap(Time.seconds(sessionSize)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new SessionWindowFunction()),
			EventTimeTrigger.create(),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
			createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
	testHarness.close();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));

	testHarness.processWatermark(new Watermark(12000));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
	expectedOutput.add(new Watermark(12000));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));

	testHarness.processWatermark(new Watermark(17999));

	expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
	expectedOutput.add(new Watermark(17999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());

	testHarness.close();
}
 
Example 18
Source File: WindowOperatorMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testRestoreKryoSerializedKeysWindows() throws Exception {
	final int windowSize = 3;

	TypeInformation<Tuple2<NonPojoType, Integer>> inputType = new TypeHint<Tuple2<NonPojoType, Integer>>() {}.getTypeInfo();

	ReducingStateDescriptor<Tuple2<NonPojoType, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
		new SumReducer<>(),
		inputType.createSerializer(new ExecutionConfig()));

	TypeSerializer<NonPojoType> keySerializer = TypeInformation.of(NonPojoType.class).createSerializer(new ExecutionConfig());
	assertTrue(keySerializer instanceof KryoSerializer);

	WindowOperator<NonPojoType, Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>, TimeWindow> operator = new WindowOperator<>(
		TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
		new TimeWindow.Serializer(),
		new TupleKeySelector<>(),
		keySerializer,
		stateDesc,
		new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<>()),
		EventTimeTrigger.create(),
		0,
		null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<NonPojoType, Integer>, Tuple2<NonPojoType, Integer>> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), TypeInformation.of(NonPojoType.class));

	testHarness.setup();

	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"win-op-migration-test-kryo-serialized-key-flink" + testMigrateVersion + "-snapshot"));

	testHarness.open();

	testHarness.processWatermark(new Watermark(2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key1"), 3), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key2"), 3), 2999));
	expectedOutput.add(new Watermark(2999));

	testHarness.processWatermark(new Watermark(3999));
	expectedOutput.add(new Watermark(3999));

	testHarness.processWatermark(new Watermark(4999));
	expectedOutput.add(new Watermark(4999));

	testHarness.processWatermark(new Watermark(5999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>(new NonPojoType("key2"), 2), 5999));
	expectedOutput.add(new Watermark(5999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());
	testHarness.close();
}
 
Example 19
Source File: WindowOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testCleanupTimeOverflow() throws Exception {
	final int windowSize = 1000;
	final long lateness = 2000;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
		new SumReducer(),
		STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	TumblingEventTimeWindows windowAssigner = TumblingEventTimeWindows.of(Time.milliseconds(windowSize));

	final WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator =
		new WindowOperator<>(
				windowAssigner,
				new TimeWindow.Serializer(),
				new TupleKeySelector(),
				BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
				stateDesc,
				new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
				EventTimeTrigger.create(),
				lateness,
				null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
		createTestHarness(operator);

	testHarness.open();

	ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();

	long timestamp = Long.MAX_VALUE - 1750;
	Collection<TimeWindow> windows = windowAssigner.assignWindows(new Tuple2<>("key2", 1), timestamp, new WindowAssigner.WindowAssignerContext() {
		@Override
		public long getCurrentProcessingTime() {
			return operator.windowAssignerContext.getCurrentProcessingTime();
		}
	});
	TimeWindow window = Iterables.getOnlyElement(windows);

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), timestamp));

	// the garbage collection timer would wrap-around
	Assert.assertTrue(window.maxTimestamp() + lateness < window.maxTimestamp());

	// and it would prematurely fire with watermark (Long.MAX_VALUE - 1500)
	Assert.assertTrue(window.maxTimestamp() + lateness < Long.MAX_VALUE - 1500);

	// if we don't correctly prevent wrap-around in the garbage collection
	// timers this watermark will clean our window state for the just-added
	// element/window
	testHarness.processWatermark(new Watermark(Long.MAX_VALUE - 1500));

	// this watermark is before the end timestamp of our only window
	Assert.assertTrue(Long.MAX_VALUE - 1500 < window.maxTimestamp());
	Assert.assertTrue(window.maxTimestamp() < Long.MAX_VALUE);

	// push in a watermark that will trigger computation of our window
	testHarness.processWatermark(new Watermark(window.maxTimestamp()));

	expected.add(new Watermark(Long.MAX_VALUE - 1500));
	expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), window.maxTimestamp()));
	expected.add(new Watermark(window.maxTimestamp()));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
	testHarness.close();
}
 
Example 20
Source File: WindowOperatorContractTest.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
@Test
public void testLateSideOutput() throws Exception {

	OutputTag<Integer> lateOutputTag = new OutputTag<Integer>("late"){};

	WindowAssigner<Integer, TimeWindow> mockAssigner = mockTimeWindowAssigner();
	Trigger<Integer, TimeWindow> mockTrigger = mockTrigger();
	InternalWindowFunction<Iterable<Integer>, Void, Integer, TimeWindow> mockWindowFunction = mockWindowFunction();

	OneInputStreamOperatorTestHarness<Integer, Void> testHarness =
			createWindowOperator(mockAssigner, mockTrigger, 0L, mockWindowFunction, lateOutputTag);

	testHarness.open();

	when(mockAssigner.assignWindows(anyInt(), anyLong(), anyAssignerContext()))
			.thenReturn(Collections.singletonList(new TimeWindow(0, 0)));

	testHarness.processWatermark(20);
	testHarness.processElement(new StreamRecord<>(0, 5L));

	verify(mockAssigner, times(1)).assignWindows(eq(0), eq(5L), anyAssignerContext());

	assertThat(testHarness.getSideOutput(lateOutputTag),
			contains(isStreamRecord(0, 5L)));

	// we should also see side output if the WindowAssigner assigns no windows
	when(mockAssigner.assignWindows(anyInt(), anyLong(), anyAssignerContext()))
			.thenReturn(Collections.<TimeWindow>emptyList());

	testHarness.processElement(new StreamRecord<>(0, 10L));

	verify(mockAssigner, times(1)).assignWindows(eq(0), eq(5L), anyAssignerContext());
	verify(mockAssigner, times(1)).assignWindows(eq(0), eq(10L), anyAssignerContext());

	assertThat(testHarness.getSideOutput(lateOutputTag),
			contains(isStreamRecord(0, 5L), isStreamRecord(0, 10L)));

}