Java Code Examples for org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#setProcessingTime()

The following examples show how to use org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness#setProcessingTime() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PythonScalarFunctionOperatorTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testFinishBundleTriggeredByTime() throws Exception {
	Configuration conf = new Configuration();
	conf.setInteger(PythonOptions.MAX_BUNDLE_SIZE, 10);
	conf.setLong(PythonOptions.MAX_BUNDLE_TIME_MILLS, 1000L);
	OneInputStreamOperatorTestHarness<IN, OUT> testHarness = getTestHarness(conf);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(newRow(true, "c1", "c2", 0L), initialTime + 1));
	assertOutputEquals("FinishBundle should not be triggered.", expectedOutput, testHarness.getOutput());

	testHarness.setProcessingTime(1000L);
	expectedOutput.add(new StreamRecord<>(newRow(true, "c1", "c2", 0L)));
	assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 2
Source File: KeyedProcessOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testNullOutputTagRefusal() throws Exception {
	KeyedProcessOperator<Integer, Integer, String> operator = new KeyedProcessOperator<>(new NullOutputTagEmittingProcessFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(
			operator, new IdentityKeySelector<>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(17);
	try {
		expectedException.expect(IllegalArgumentException.class);
		testHarness.processElement(new StreamRecord<>(5));
	} finally {
		testHarness.close();
	}
}
 
Example 3
Source File: BucketingSinkTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testInactivityPeriodWithLateNotify() throws Exception {
	final File outDir = tempFolder.newFolder();

	OneInputStreamOperatorTestHarness<String, Object> testHarness = createRescalingTestSink(outDir, 1, 0, 100);
	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(0L);

	testHarness.processElement(new StreamRecord<>("test1", 1L));
	testHarness.processElement(new StreamRecord<>("test2", 1L));
	checkLocalFs(outDir, 2, 0 , 0, 0);

	testHarness.setProcessingTime(101L);	// put some in pending
	checkLocalFs(outDir, 0, 2, 0, 0);

	testHarness.snapshot(0, 0);				// put them in pending for 0
	checkLocalFs(outDir, 0, 2, 0, 0);

	testHarness.processElement(new StreamRecord<>("test3", 1L));
	testHarness.processElement(new StreamRecord<>("test4", 1L));

	testHarness.setProcessingTime(202L);	// put some in pending

	testHarness.snapshot(1, 0);				// put them in pending for 1
	checkLocalFs(outDir, 0, 4, 0, 0);

	testHarness.notifyOfCompletedCheckpoint(0);	// put the pending for 0 to the "committed" state
	checkLocalFs(outDir, 0, 2, 2, 0);

	testHarness.notifyOfCompletedCheckpoint(1); // put the pending for 1 to the "committed" state
	checkLocalFs(outDir, 0, 0, 4, 0);
}
 
Example 4
Source File: LegacyKeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that we don't have leakage between different keys.
 */
@Test
public void testProcessingTimeTimerWithState() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new TriggeringStatefulFlatMapFunction(TimeDomain.PROCESSING_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(1);
	testHarness.processElement(new StreamRecord<>(17)); // should set timer for 6

	testHarness.setProcessingTime(2);
	testHarness.processElement(new StreamRecord<>(42)); // should set timer for 7

	testHarness.setProcessingTime(6);
	testHarness.setProcessingTime(7);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>("INPUT:17"));
	expectedOutput.add(new StreamRecord<>("INPUT:42"));
	expectedOutput.add(new StreamRecord<>("STATE:17"));
	expectedOutput.add(new StreamRecord<>("STATE:42"));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 5
Source File: BucketingSinkTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This tests {@link StringWriter} with
 * non-bucketing output.
 */
@Test
public void testNonRollingStringWriter() throws Exception {
	final String outPath = hdfsURI + "/string-non-rolling-out";

	final int numElements = 20;

	BucketingSink<String> sink = new BucketingSink<String>(outPath)
		.setBucketer(new BasePathBucketer<String>())
		.setPartPrefix(PART_PREFIX)
		.setPendingPrefix("")
		.setPendingSuffix("");

	OneInputStreamOperatorTestHarness<String, Object> testHarness = createTestSink(sink, 1, 0);

	testHarness.setProcessingTime(0L);

	testHarness.setup();
	testHarness.open();

	for (int i = 0; i < numElements; i++) {
		testHarness.processElement(new StreamRecord<>("message #" + Integer.toString(i)));
	}

	testHarness.close();

	FSDataInputStream inStream = dfs.open(new Path(outPath + "/" + PART_PREFIX + "-0-0"));

	BufferedReader br = new BufferedReader(new InputStreamReader(inStream));

	for (int i = 0; i < numElements; i++) {
		String line = br.readLine();
		Assert.assertEquals("message #" + i, line);
	}

	inStream.close();
}
 
Example 6
Source File: BucketingSinkTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testInactivityPeriodWithLateNotify() throws Exception {
	final File outDir = tempFolder.newFolder();

	OneInputStreamOperatorTestHarness<String, Object> testHarness = createRescalingTestSink(outDir, 1, 0, 100);
	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(0L);

	testHarness.processElement(new StreamRecord<>("test1", 1L));
	testHarness.processElement(new StreamRecord<>("test2", 1L));
	checkLocalFs(outDir, 2, 0 , 0, 0);

	testHarness.setProcessingTime(101L);	// put some in pending
	checkLocalFs(outDir, 0, 2, 0, 0);

	testHarness.snapshot(0, 0);				// put them in pending for 0
	checkLocalFs(outDir, 0, 2, 0, 0);

	testHarness.processElement(new StreamRecord<>("test3", 1L));
	testHarness.processElement(new StreamRecord<>("test4", 1L));

	testHarness.setProcessingTime(202L);	// put some in pending

	testHarness.snapshot(1, 0);				// put them in pending for 1
	checkLocalFs(outDir, 0, 4, 0, 0);

	testHarness.notifyOfCompletedCheckpoint(0);	// put the pending for 0 to the "committed" state
	checkLocalFs(outDir, 0, 2, 2, 0);

	testHarness.notifyOfCompletedCheckpoint(1); // put the pending for 1 to the "committed" state
	checkLocalFs(outDir, 0, 0, 4, 0);
}
 
Example 7
Source File: WindowOperatorContractTest.java    From flink with Apache License 2.0 4 votes vote down vote up
public void advanceTime(OneInputStreamOperatorTestHarness testHarness, long timestamp) throws Exception {
	testHarness.setProcessingTime(timestamp);
}
 
Example 8
Source File: CEPOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testCEPOperatorComparatorProcessTime() throws Exception {
	Event startEvent1 = new Event(42, "start", 1.0);
	Event startEvent2 = new Event(42, "start", 2.0);
	SubEvent middleEvent1 = new SubEvent(42, "foo1", 3.0, 10.0);
	SubEvent middleEvent2 = new SubEvent(42, "foo2", 4.0, 10.0);
	Event endEvent1 = new Event(42, "end", 1.0);

	Event startEventK2 = new Event(43, "start", 1.0);

	CepOperator<Event, Integer, Map<String, List<Event>>> operator = getKeyedCepOperatorWithComparator(true);
	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);

	try {
		harness.open();

		harness.setProcessingTime(0L);

		harness.processElement(new StreamRecord<>(startEvent1, 0L));
		harness.processElement(new StreamRecord<>(startEventK2, 0L));
		harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 0L));
		harness
			.processElement(new StreamRecord<>(new SubEvent(42, "barfoo", 1.0, 5.0), 0L));

		assertTrue(!operator.hasNonEmptySharedBuffer(42));
		assertTrue(!operator.hasNonEmptySharedBuffer(43));

		harness.setProcessingTime(3L);
		assertTrue(operator.hasNonEmptySharedBuffer(42));
		assertTrue(operator.hasNonEmptySharedBuffer(43));

		harness.processElement(new StreamRecord<>(middleEvent2, 3L));
		harness.processElement(new StreamRecord<>(middleEvent1, 3L));
		harness.processElement(new StreamRecord<>(startEvent2, 3L));

		OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
		harness.close();

		CepOperator<Event, Integer, Map<String, List<Event>>> operator2 = getKeyedCepOperatorWithComparator(true);
		harness = CepOperatorTestUtilities.getCepTestHarness(operator2);
		harness.setup();
		harness.initializeState(snapshot);
		harness.open();

		harness.setProcessingTime(4L);
		harness.processElement(new StreamRecord<>(endEvent1, 5L));
		harness.setProcessingTime(5L);

		verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
		verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent2, endEvent1);
		verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent1, endEvent1);
		verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent1);
	} finally {
		harness.close();
	}
}
 
Example 9
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testProcessingTimeSessionWindows() throws Throwable {
	closeCalled.set(0);

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.session(Duration.ofSeconds(3))
			.withProcessingTime()
			.aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);

	RowDataHarnessAssertor assertor = new RowDataHarnessAssertor(
			outputType.getFieldTypes(), new GenericRowRecordSortComparator(0, new VarCharType(VarCharType.MAX_LENGTH)));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// timestamp is ignored in processing time
	testHarness.setProcessingTime(3);
	testHarness.processElement(insertRecord("key2", 1, 1L));

	testHarness.setProcessingTime(1000);
	testHarness.processElement(insertRecord("key2", 1, 1002L));

	testHarness.setProcessingTime(5000);

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 2L, 2L, 3L, 4000L, 3999L)));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(insertRecord("key2", 1, 5000L));
	testHarness.processElement(insertRecord("key2", 1, 5000L));
	testHarness.processElement(insertRecord("key1", 1, 5000L));
	testHarness.processElement(insertRecord("key1", 1, 5000L));
	testHarness.processElement(insertRecord("key1", 1, 5000L));

	testHarness.setProcessingTime(10000);

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 2L, 2L, 5000L, 8000L, 7999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 3L, 3L, 5000L, 8000L, 7999L)));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 10
Source File: BucketingSinkTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * This tests {@link AvroKeyValueSinkWriter}
 * with non-rolling output and with compression.
 */
@Test
public void testNonRollingAvroKeyValueWithCompressionWriter() throws Exception {
	final String outPath = hdfsURI + "/avro-kv-no-comp-non-rolling-out";

	final int numElements = 20;

	Map<String, String> properties = new HashMap<>();
	Schema keySchema = Schema.create(Schema.Type.INT);
	Schema valueSchema = Schema.create(Schema.Type.STRING);
	properties.put(AvroKeyValueSinkWriter.CONF_OUTPUT_KEY_SCHEMA, keySchema.toString());
	properties.put(AvroKeyValueSinkWriter.CONF_OUTPUT_VALUE_SCHEMA, valueSchema.toString());
	properties.put(AvroKeyValueSinkWriter.CONF_COMPRESS, String.valueOf(true));
	properties.put(AvroKeyValueSinkWriter.CONF_COMPRESS_CODEC, DataFileConstants.SNAPPY_CODEC);

	BucketingSink<Tuple2<Integer, String>> sink = new BucketingSink<Tuple2<Integer, String>>(outPath)
		.setWriter(new AvroKeyValueSinkWriter<Integer, String>(properties))
		.setBucketer(new BasePathBucketer<Tuple2<Integer, String>>())
		.setPartPrefix(PART_PREFIX)
		.setPendingPrefix("")
		.setPendingSuffix("");

	OneInputStreamOperatorTestHarness<Tuple2<Integer, String>, Object> testHarness =
		createTestSink(sink, 1, 0);

	testHarness.setProcessingTime(0L);

	testHarness.setup();
	testHarness.open();

	for (int i = 0; i < numElements; i++) {
		testHarness.processElement(new StreamRecord<>(Tuple2.of(
			i, "message #" + Integer.toString(i)
		)));
	}

	testHarness.close();

	GenericData.setStringType(valueSchema, GenericData.StringType.String);
	Schema elementSchema = AvroKeyValueSinkWriter.AvroKeyValue.getSchema(keySchema, valueSchema);

	FSDataInputStream inStream = dfs.open(new Path(outPath + "/" + PART_PREFIX + "-0-0"));

	SpecificDatumReader<GenericRecord> elementReader = new SpecificDatumReader<>(elementSchema);
	DataFileStream<GenericRecord> dataFileStream = new DataFileStream<>(inStream, elementReader);
	for (int i = 0; i < numElements; i++) {
		AvroKeyValueSinkWriter.AvroKeyValue<Integer, String> wrappedEntry =
			new AvroKeyValueSinkWriter.AvroKeyValue<>(dataFileStream.next());
		int key = wrappedEntry.getKey();
		Assert.assertEquals(i, key);
		String value = wrappedEntry.getValue();
		Assert.assertEquals("message #" + i, value);
	}

	dataFileStream.close();
	inStream.close();
}
 
Example 11
Source File: WatermarkAssignerOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testCustomizedWatermarkGenerator() throws Exception {
	MyWatermarkGenerator.openCalled = false;
	MyWatermarkGenerator.closeCalled = false;
	WatermarkGenerator generator = new MyWatermarkGenerator(1);

	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(0, generator, -1);

	testHarness.getExecutionConfig().setAutoWatermarkInterval(5);

	long currentTime = 0;
	List<Watermark> expected = new ArrayList<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(GenericRowData.of(1L, 0L)));
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(2L, 1L)));
	testHarness.processWatermark(new Watermark(2)); // this watermark should be ignored
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(3L, 1L)));
	currentTime = currentTime + 5;
	testHarness.setProcessingTime(currentTime);
	expected.add(new Watermark(1L));

	testHarness.processElement(new StreamRecord<>(GenericRowData.of(4L, 2L)));
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(2L, 1L)));
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(1L, 0L)));
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(6L, null)));
	currentTime = currentTime + 5;
	testHarness.setProcessingTime(currentTime);
	expected.add(new Watermark(2L));

	testHarness.processElement(new StreamRecord<>(GenericRowData.of(9L, 8L)));
	expected.add(new Watermark(8L));

	// no watermark output
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(8L, 7L)));
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(10L, null)));
	testHarness.processElement(new StreamRecord<>(GenericRowData.of(11L, 10L)));
	currentTime = currentTime + 5;
	testHarness.setProcessingTime(currentTime);
	expected.add(new Watermark(10L));

	testHarness.close();
	expected.add(Watermark.MAX_WATERMARK);

	// num_watermark + num_records
	assertEquals(expected.size() + 11, testHarness.getOutput().size());
	List<Watermark> results = extractWatermarks(testHarness.getOutput());
	assertEquals(expected, results);
	assertTrue(MyWatermarkGenerator.openCalled);
	assertTrue(MyWatermarkGenerator.closeCalled);
}
 
Example 12
Source File: BucketingSinkTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This tests {@link AvroKeyValueSinkWriter}
 * with non-rolling output and with compression.
 */
@Test
public void testNonRollingAvroKeyValueWithCompressionWriter() throws Exception {
	final String outPath = hdfsURI + "/avro-kv-no-comp-non-rolling-out";

	final int numElements = 20;

	Map<String, String> properties = new HashMap<>();
	Schema keySchema = Schema.create(Schema.Type.INT);
	Schema valueSchema = Schema.create(Schema.Type.STRING);
	properties.put(AvroKeyValueSinkWriter.CONF_OUTPUT_KEY_SCHEMA, keySchema.toString());
	properties.put(AvroKeyValueSinkWriter.CONF_OUTPUT_VALUE_SCHEMA, valueSchema.toString());
	properties.put(AvroKeyValueSinkWriter.CONF_COMPRESS, String.valueOf(true));
	properties.put(AvroKeyValueSinkWriter.CONF_COMPRESS_CODEC, DataFileConstants.SNAPPY_CODEC);

	BucketingSink<Tuple2<Integer, String>> sink = new BucketingSink<Tuple2<Integer, String>>(outPath)
		.setWriter(new AvroKeyValueSinkWriter<Integer, String>(properties))
		.setBucketer(new BasePathBucketer<Tuple2<Integer, String>>())
		.setPartPrefix(PART_PREFIX)
		.setPendingPrefix("")
		.setPendingSuffix("");

	OneInputStreamOperatorTestHarness<Tuple2<Integer, String>, Object> testHarness =
		createTestSink(sink, 1, 0);

	testHarness.setProcessingTime(0L);

	testHarness.setup();
	testHarness.open();

	for (int i = 0; i < numElements; i++) {
		testHarness.processElement(new StreamRecord<>(Tuple2.of(
			i, "message #" + Integer.toString(i)
		)));
	}

	testHarness.close();

	GenericData.setStringType(valueSchema, GenericData.StringType.String);
	Schema elementSchema = AvroKeyValueSinkWriter.AvroKeyValue.getSchema(keySchema, valueSchema);

	FSDataInputStream inStream = dfs.open(new Path(outPath + "/" + PART_PREFIX + "-0-0"));

	SpecificDatumReader<GenericRecord> elementReader = new SpecificDatumReader<>(elementSchema);
	DataFileStream<GenericRecord> dataFileStream = new DataFileStream<>(inStream, elementReader);
	for (int i = 0; i < numElements; i++) {
		AvroKeyValueSinkWriter.AvroKeyValue<Integer, String> wrappedEntry =
			new AvroKeyValueSinkWriter.AvroKeyValue<>(dataFileStream.next());
		int key = wrappedEntry.getKey();
		Assert.assertEquals(i, key);
		String value = wrappedEntry.getValue();
		Assert.assertEquals("message #" + i, value);
	}

	dataFileStream.close();
	inStream.close();
}
 
Example 13
Source File: WindowOperatorContractTest.java    From flink with Apache License 2.0 4 votes vote down vote up
public void advanceTime(OneInputStreamOperatorTestHarness testHarness, long timestamp) throws Exception {
	testHarness.setProcessingTime(timestamp);
}
 
Example 14
Source File: WindowOperatorMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeApplyProcessingTimeWindowsSnapshot() throws Exception {
	final int windowSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			TumblingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>()),
			ProcessingTimeTrigger.create(),
			0,
			null /* late data output tag */);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(10);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));

	testHarness.setProcessingTime(3010);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key3", 1)));

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 1), 2999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 2999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator<>());

	// do snapshot and save to file
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
	OperatorSnapshotUtil.writeStateHandle(
		snapshot,
		"src/test/resources/win-op-migration-test-apply-processing-time-flink" + flinkGenerateSavepointVersion + "-snapshot");

	testHarness.close();
}
 
Example 15
Source File: WindowOperatorTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testProcessingTimeSessionWindows() throws Throwable {
	final int windowGap = 3;

	ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents",
			new SumReducer(),
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			ProcessingTimeSessionWindows.withGap(Time.of(windowGap, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()),
			ProcessingTimeTrigger.create(),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
			createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// timestamp is ignored in processing time
	testHarness.setProcessingTime(3);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1)); //Long.MAX_VALUE));

	testHarness.setProcessingTime(1000);
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1002)); //Long.MAX_VALUE));

	testHarness.setProcessingTime(5000);

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 5000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 5000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
	testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));

	testHarness.setProcessingTime(10000);

	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 7999));
	expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 7999));

	TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());

	assertEquals(expectedOutput.size(), testHarness.getOutput().size());
	for (Object elem : testHarness.getOutput()) {
		if (elem instanceof StreamRecord) {
			StreamRecord<Tuple2<String, Integer>> el = (StreamRecord<Tuple2<String, Integer>>) elem;
			assertTrue(expectedOutput.contains(el));
		}
	}
	testHarness.close();
}
 
Example 16
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testEventTimeSessionWindows() throws Exception {
	closeCalled.set(0);

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.session(Duration.ofSeconds(3))
			.withEventTime(2)
			.aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// add elements out-of-order
	testHarness.processElement(record("key2", 1, 0L));
	testHarness.processElement(record("key2", 2, 1000L));
	testHarness.processElement(record("key2", 3, 2500L));

	testHarness.processElement(record("key1", 1, 10L));
	testHarness.processElement(record("key1", 2, 1000L));

	// do a snapshot, close and restore again
	OperatorSubtaskState snapshotV2 = testHarness.snapshot(0L, 0);
	testHarness.close();
	expectedOutput.clear();

	testHarness = createTestHarness(operator);
	testHarness.setup();
	testHarness.initializeState(snapshotV2);
	testHarness.open();

	assertEquals(0L, operator.getWatermarkLatency().getValue());

	testHarness.processElement(record("key1", 3, 2500L));

	testHarness.processElement(record("key2", 4, 5501L));
	testHarness.processElement(record("key2", 5, 6000L));
	testHarness.processElement(record("key2", 5, 6000L));
	testHarness.processElement(record("key2", 6, 6050L));

	testHarness.processWatermark(new Watermark(12000));

	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key1", 6L, 3L, 10L, 5500L, 5499L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 6L, 3L, 0L, 5500L, 5499L)));

	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 20L, 4L, 5501L, 9050L, 9049L)));
	expectedOutput.add(new Watermark(12000));

	// add a late data
	testHarness.processElement(record("key1", 3, 4000L));
	testHarness.processElement(record("key2", 10, 15000L));
	testHarness.processElement(record("key2", 20, 15000L));

	testHarness.processWatermark(new Watermark(17999));

	expectedOutput.addAll(doubleRecord(isTableAggregate, record("key2", 30L, 2L, 15000L, 18000L, 17999L)));
	expectedOutput.add(new Watermark(17999));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.setProcessingTime(18000);
	assertEquals(1L, operator.getWatermarkLatency().getValue());

	testHarness.close();

	// we close once in the rest...
	assertEquals("Close was not called.", 2, closeCalled.get());
	assertEquals(1, operator.getNumLateRecordsDropped().getCount());
}
 
Example 17
Source File: KeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
@Test
public void testProcessingTimeTimers() throws Exception {

	final int expectedKey = 17;

	KeyedProcessOperator<Integer, Integer, Integer> operator =
			new KeyedProcessOperator<>(new TriggeringFlatMapFunction(TimeDomain.PROCESSING_TIME, expectedKey));

	OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(expectedKey));

	testHarness.setProcessingTime(5);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>(expectedKey));
	expectedOutput.add(new StreamRecord<>(1777));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 18
Source File: LegacyKeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
@Test
public void testSnapshotAndRestore() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(5, 12L));

	// snapshot and restore from scratch
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);

	testHarness.close();

	operator = new LegacyKeyedProcessOperator<>(new BothTriggeringFlatMapFunction());

	testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.initializeState(snapshot);
	testHarness.open();

	testHarness.setProcessingTime(5);
	testHarness.processWatermark(new Watermark(6));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>("PROC:1777"));
	expectedOutput.add(new StreamRecord<>("EVENT:1777", 6L));
	expectedOutput.add(new Watermark(6));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 19
Source File: ProcessOperatorTest.java    From flink with Apache License 2.0 3 votes vote down vote up
@Test
public void testTimestampAndProcessingTimeQuerying() throws Exception {

	ProcessOperator<Integer, String> operator =
			new ProcessOperator<>(new QueryingProcessFunction(TimeDomain.PROCESSING_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new OneInputStreamOperatorTestHarness<>(operator);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(17);
	testHarness.processElement(new StreamRecord<>(5));

	testHarness.setProcessingTime(42);
	testHarness.processElement(new StreamRecord<>(6));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>("5TIME:17 TS:null"));
	expectedOutput.add(new StreamRecord<>("6TIME:42 TS:null"));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 20
Source File: WindowOperatorTest.java    From flink with Apache License 2.0 2 votes vote down vote up
@Test
public void testProcessingTimeSlidingWindows() throws Throwable {
	closeCalled.set(0);

	WindowOperator operator = WindowOperatorBuilder
			.builder()
			.withInputFields(inputFieldTypes)
			.sliding(Duration.ofSeconds(3), Duration.ofSeconds(1))
			.withProcessingTime()
			.aggregateAndBuild(getTimeWindowAggFunction(), equaliser, accTypes, aggResultTypes, windowTypes);

	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	// timestamp is ignored in processing time
	testHarness.setProcessingTime(3);
	testHarness.processElement(insertRecord("key2", 1, Long.MAX_VALUE));

	testHarness.setProcessingTime(1000);

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 1L, 1L, -2000L, 1000L, 999L)));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(insertRecord("key2", 1, Long.MAX_VALUE));
	testHarness.processElement(insertRecord("key2", 1, Long.MAX_VALUE));

	testHarness.setProcessingTime(2000);

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 3L, 3L, -1000L, 2000L, 1999L)));
	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE));
	testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE));

	testHarness.setProcessingTime(3000);

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 3L, 3L, 0L, 3000L, 2999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 2L, 2L, 0L, 3000L, 2999L)));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE));
	testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE));
	testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE));

	testHarness.setProcessingTime(7000);

	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key2", 2L, 2L, 1000L, 4000L, 3999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 5L, 5L, 1000L, 4000L, 3999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 5L, 5L, 2000L, 5000L, 4999L)));
	expectedOutput.addAll(doubleRecord(isTableAggregate, insertRecord("key1", 3L, 3L, 3000L, 6000L, 5999L)));

	assertor.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}