Java Code Examples for org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness

The following examples show how to use org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: WindowOperatorContractTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testAssignerWithMultipleWindowsForAggregate() throws Exception {
	WindowAssigner<TimeWindow> mockAssigner = mockTimeWindowAssigner();
	Trigger<TimeWindow> mockTrigger = mockTrigger();
	NamespaceAggsHandleFunction<TimeWindow> mockAggregate = mockAggsHandleFunction();

	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness =
			createWindowOperator(mockAssigner, mockTrigger, mockAggregate, 0L);

	testHarness.open();

	when(mockAssigner.assignWindows(any(), anyLong()))
			.thenReturn(Arrays.asList(new TimeWindow(2, 4), new TimeWindow(0, 2)));

	shouldFireOnElement(mockTrigger);

	testHarness.processElement(record("String", 1, 0L));

	verify(mockAggregate, times(2)).getValue(anyTimeWindow());
	verify(mockAggregate, times(1)).getValue(eq(new TimeWindow(0, 2)));
	verify(mockAggregate, times(1)).getValue(eq(new TimeWindow(2, 4)));
}
 
Example 2
Source Project: Flink-CEPplus   Source File: FlinkKafkaProducerITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testRunOutOfProducersInThePool() throws Exception {
	String topic = "flink-kafka-run-out-of-producers";

	try (OneInputStreamOperatorTestHarness<Integer, Object> testHarness = createTestHarness(topic)) {

		testHarness.setup();
		testHarness.open();

		for (int i = 0; i < FlinkKafkaProducer.DEFAULT_KAFKA_PRODUCERS_POOL_SIZE * 2; i++) {
			testHarness.processElement(i, i * 2);
			testHarness.snapshot(i, i * 2 + 1);
		}
	}
	catch (Exception ex) {
		if (!ex.getCause().getMessage().startsWith("Too many ongoing")) {
			throw ex;
		}
	}
	deleteTestTopic(topic);
	checkProducerLeak();
}
 
Example 3
Source Project: flink   Source File: FlinkKafkaProducerITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testRecoverCommittedTransaction() throws Exception {
	String topic = "flink-kafka-producer-recover-committed-transaction";

	OneInputStreamOperatorTestHarness<Integer, Object> testHarness = createTestHarness(topic);

	testHarness.setup();
	testHarness.open(); // producerA - start transaction (txn) 0
	testHarness.processElement(42, 0); // producerA - write 42 in txn 0
	OperatorSubtaskState checkpoint0 = testHarness.snapshot(0, 1); // producerA - pre commit txn 0, producerB - start txn 1
	testHarness.processElement(43, 2); // producerB - write 43 in txn 1
	testHarness.notifyOfCompletedCheckpoint(0); // producerA - commit txn 0 and return to the pool
	testHarness.snapshot(1, 3); // producerB - pre txn 1,  producerA - start txn 2
	testHarness.processElement(44, 4); // producerA - write 44 in txn 2
	testHarness.close(); // producerA - abort txn 2

	testHarness = createTestHarness(topic);
	testHarness.initializeState(checkpoint0); // recover state 0 - producerA recover and commit txn 0
	testHarness.close();

	assertExactlyOnceForTopic(createProperties(), topic, 0, Arrays.asList(42));

	deleteTestTopic(topic);
	checkProducerLeak();
}
 
Example 4
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
private void testTimeoutExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	OneInputStreamOperatorTestHarness<Integer, Integer> harness =
		createTestHarness(new NoOpAsyncFunction<>(), 10L, 2, outputMode);

	harness.getEnvironment().setExpectedExternalFailureCause(Throwable.class);
	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	harness.setProcessingTime(10L);

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}
}
 
Example 5
private OneInputStreamOperatorTestHarness<IN, OUT> getTestHarness(Configuration config) throws Exception {
	RowType dataType = new RowType(Arrays.asList(
		new RowType.RowField("f1", new VarCharType()),
		new RowType.RowField("f2", new VarCharType()),
		new RowType.RowField("f3", new BigIntType())));
	AbstractPythonScalarFunctionOperator<IN, OUT, UDFIN> operator = getTestOperator(
		config,
		new PythonFunctionInfo[] {
			new PythonFunctionInfo(
				AbstractPythonScalarFunctionRunnerTest.DummyPythonFunction.INSTANCE,
				new Integer[]{0})
		},
		dataType,
		dataType,
		new int[]{2},
		new int[]{0, 1}
	);

	OneInputStreamOperatorTestHarness<IN, OUT> testHarness =
		new OneInputStreamOperatorTestHarness<>(operator);
	testHarness.getStreamConfig().setManagedMemoryFraction(0.5);
	testHarness.setup(getOutputTypeSerializer(dataType));
	return testHarness;
}
 
Example 6
Source Project: flink   Source File: TestUtils.java    License: Apache License 2.0 6 votes vote down vote up
static OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> createCustomRescalingTestSink(
		final File outDir,
		final int totalParallelism,
		final int taskIdx,
		final long bucketCheckInterval,
		final BucketAssigner<Tuple2<String, Integer>, String> bucketer,
		final Encoder<Tuple2<String, Integer>> writer,
		final RollingPolicy<Tuple2<String, Integer>, String> rollingPolicy,
		final BucketFactory<Tuple2<String, Integer>, String> bucketFactory) throws Exception {

	StreamingFileSink<Tuple2<String, Integer>> sink = StreamingFileSink
			.forRowFormat(new Path(outDir.toURI()), writer)
			.withBucketAssigner(bucketer)
			.withRollingPolicy(rollingPolicy)
			.withBucketCheckInterval(bucketCheckInterval)
			.withBucketFactory(bucketFactory)
			.build();

	return new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink), MAX_PARALLELISM, totalParallelism, taskIdx);
}
 
Example 7
Source Project: flink   Source File: KeyedProcessOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNullOutputTagRefusal() throws Exception {
	KeyedProcessOperator<Integer, Integer, String> operator = new KeyedProcessOperator<>(new NullOutputTagEmittingProcessFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
		new KeyedOneInputStreamOperatorTestHarness<>(
			operator, new IdentityKeySelector<>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(17);
	try {
		expectedException.expect(IllegalArgumentException.class);
		testHarness.processElement(new StreamRecord<>(5));
	} finally {
		testHarness.close();
	}
}
 
Example 8
Source Project: flink   Source File: TimestampsAndWatermarksOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void punctuatedWatermarksDoNotRegress() throws Exception {
	OneInputStreamOperatorTestHarness<Tuple2<Boolean, Long>, Tuple2<Boolean, Long>> testHarness = createTestHarness(
			WatermarkStrategy
					.forGenerator((ctx) -> new PunctuatedWatermarkGenerator())
					.withTimestampAssigner((ctx) -> new TupleExtractor()));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>(true, 4L), 1));

	assertThat(pollNextStreamRecord(testHarness), streamRecord(new Tuple2<>(true, 4L), 4L));
	assertThat(pollNextLegacyWatermark(testHarness), is(legacyWatermark(4L)));

	testHarness.processElement(new StreamRecord<>(new Tuple2<>(true, 2L), 1));

	assertThat(pollNextStreamRecord(testHarness), streamRecord(new Tuple2<>(true, 2L), 2L));
	assertThat(testHarness.getOutput(), empty());
}
 
Example 9
Source Project: Flink-CEPplus   Source File: BucketingSinkTest.java    License: Apache License 2.0 6 votes vote down vote up
private OneInputStreamOperatorTestHarness<String, Object> createTestSink(File dataDir, int totalParallelism, int taskIdx) throws Exception {
	BucketingSink<String> sink = new BucketingSink<String>(dataDir.getAbsolutePath())
		.setBucketer(new Bucketer<String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Path getBucketPath(Clock clock, Path basePath, String element) {
				return new Path(basePath, element);
			}
		})
		.setWriter(new StringWriter<String>())
		.setPartPrefix(PART_PREFIX)
		.setPendingPrefix("")
		.setInactiveBucketCheckInterval(5 * 60 * 1000L)
		.setInactiveBucketThreshold(5 * 60 * 1000L)
		.setPendingSuffix(PENDING_SUFFIX)
		.setInProgressSuffix(IN_PROGRESS_SUFFIX);

	return createTestSink(sink, totalParallelism, taskIdx);
}
 
Example 10
Source Project: flink   Source File: WindowOperatorContractTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testAssignerWithMultipleWindowsForAggregate() throws Exception {
	WindowAssigner<TimeWindow> mockAssigner = mockTimeWindowAssigner();
	Trigger<TimeWindow> mockTrigger = mockTrigger();
	NamespaceAggsHandleFunction<TimeWindow> mockAggregate = mockAggsHandleFunction();

	OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
			createWindowOperator(mockAssigner, mockTrigger, mockAggregate, 0L);

	testHarness.open();

	when(mockAssigner.assignWindows(any(), anyLong()))
			.thenReturn(Arrays.asList(new TimeWindow(2, 4), new TimeWindow(0, 2)));

	shouldFireOnElement(mockTrigger);

	testHarness.processElement(insertRecord("String", 1, 0L));

	verify(mockAggregate, times(2)).getValue(anyTimeWindow());
	verify(mockAggregate, times(1)).getValue(eq(new TimeWindow(0, 2)));
	verify(mockAggregate, times(1)).getValue(eq(new TimeWindow(2, 4)));
}
 
Example 11
Source Project: flink   Source File: FlinkKafkaProducer011ITCase.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * This tests checks whether there is some resource leak in form of growing threads number.
 */
public void resourceCleanUp(Semantic semantic) throws Exception {
	String topic = "flink-kafka-producer-resource-cleanup-" + semantic;

	final int allowedEpsilonThreadCountGrow = 50;

	Optional<Integer> initialActiveThreads = Optional.empty();
	for (int i = 0; i < allowedEpsilonThreadCountGrow * 2; i++) {
		try (OneInputStreamOperatorTestHarness<Integer, Object> testHarness1 =
				createTestHarness(topic, 1, 1, 0, semantic)) {
			testHarness1.setup();
			testHarness1.open();
		}

		if (initialActiveThreads.isPresent()) {
			assertThat("active threads count",
				Thread.activeCount(),
				lessThan(initialActiveThreads.get() + allowedEpsilonThreadCountGrow));
		}
		else {
			initialActiveThreads = Optional.of(Thread.activeCount());
		}
		checkProducerLeak();
	}
}
 
Example 12
Source Project: flink   Source File: LegacyKeyedProcessOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTimestampAndWatermarkQuerying() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new QueryingFlatMapFunction(TimeDomain.EVENT_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processWatermark(new Watermark(17));
	testHarness.processElement(new StreamRecord<>(5, 12L));

	testHarness.processWatermark(new Watermark(42));
	testHarness.processElement(new StreamRecord<>(6, 13L));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new Watermark(17L));
	expectedOutput.add(new StreamRecord<>("5TIME:17 TS:12", 12L));
	expectedOutput.add(new Watermark(42L));
	expectedOutput.add(new StreamRecord<>("6TIME:42 TS:13", 13L));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 13
Source Project: Flink-CEPplus   Source File: CassandraSinkBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnSnapshot() throws Exception {
	final TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {
		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.snapshot(123L, 123L);
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example 14
Source Project: flink   Source File: KeyedProcessOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTimestampAndWatermarkQuerying() throws Exception {

	KeyedProcessOperator<Integer, Integer, String> operator =
			new KeyedProcessOperator<>(new QueryingFlatMapFunction(TimeDomain.EVENT_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processWatermark(new Watermark(17));
	testHarness.processElement(new StreamRecord<>(5, 12L));

	testHarness.processWatermark(new Watermark(42));
	testHarness.processElement(new StreamRecord<>(6, 13L));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new Watermark(17L));
	expectedOutput.add(new StreamRecord<>("5TIME:17 TS:12", 12L));
	expectedOutput.add(new Watermark(42L));
	expectedOutput.add(new StreamRecord<>("6TIME:42 TS:13", 13L));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 15
Source Project: flink   Source File: FlinkKinesisProducerTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test ensuring that if a snapshot call happens right after an async exception is caught, it should be rethrown.
 */
@SuppressWarnings("ResultOfMethodCallIgnored")
@Test
public void testAsyncErrorRethrownOnCheckpoint() throws Throwable {
	final DummyFlinkKinesisProducer<String> producer = new DummyFlinkKinesisProducer<>(new SimpleStringSchema());

	OneInputStreamOperatorTestHarness<String, Object> testHarness =
		new OneInputStreamOperatorTestHarness<>(new StreamSink<>(producer));

	testHarness.open();

	testHarness.processElement(new StreamRecord<>("msg-1"));

	producer.getPendingRecordFutures().get(0).setException(new Exception("artificial async exception"));

	try {
		testHarness.snapshot(123L, 123L);
	} catch (Exception e) {
		// the next checkpoint should rethrow the async exception
		Assert.assertTrue(ExceptionUtils.findThrowableWithMessage(e, "artificial async exception").isPresent());

		// test succeeded
		return;
	}

	Assert.fail();
}
 
Example 16
Source Project: Flink-CEPplus   Source File: StreamGroupedReduceTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGroupedReduce() throws Exception {

	KeySelector<Integer, Integer> keySelector = new IntegerKeySelector();

	StreamGroupedReduce<Integer> operator = new StreamGroupedReduce<>(new MyReducer(), IntSerializer.INSTANCE);

	OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.INT_TYPE_INFO);

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
	testHarness.processElement(new StreamRecord<>(1, initialTime + 2));
	testHarness.processWatermark(new Watermark(initialTime + 2));
	testHarness.processElement(new StreamRecord<>(2, initialTime + 3));
	testHarness.processElement(new StreamRecord<>(2, initialTime + 4));
	testHarness.processElement(new StreamRecord<>(3, initialTime + 5));

	expectedOutput.add(new StreamRecord<>(1, initialTime + 1));
	expectedOutput.add(new StreamRecord<>(2, initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<>(2, initialTime + 3));
	expectedOutput.add(new StreamRecord<>(4, initialTime + 4));
	expectedOutput.add(new StreamRecord<>(3, initialTime + 5));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example 17
Source Project: Flink-CEPplus   Source File: LocalStreamingFileSinkTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testClosingWithoutInput() throws Exception {
	final File outDir = TEMP_FOLDER.newFolder();

	try (
		OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
				TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 124L);
	) {
		testHarness.setup();
		testHarness.open();
	}
}
 
Example 18
Source Project: beam   Source File: ExecutableStageDoFnOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void sdkErrorsSurfaceOnClose() throws Exception {
  TupleTag<Integer> mainOutput = new TupleTag<>("main-output");
  DoFnOperator.MultiOutputOutputManagerFactory<Integer> outputManagerFactory =
      new DoFnOperator.MultiOutputOutputManagerFactory(mainOutput, VoidCoder.of());
  ExecutableStageDoFnOperator<Integer, Integer> operator =
      getOperator(mainOutput, Collections.emptyList(), outputManagerFactory);

  OneInputStreamOperatorTestHarness<WindowedValue<Integer>, WindowedValue<Integer>> testHarness =
      new OneInputStreamOperatorTestHarness<>(operator);

  testHarness.open();

  @SuppressWarnings("unchecked")
  RemoteBundle bundle = Mockito.mock(RemoteBundle.class);
  when(stageBundleFactory.getBundle(any(), any(), any(), any())).thenReturn(bundle);

  @SuppressWarnings("unchecked")
  FnDataReceiver<WindowedValue<?>> receiver = Mockito.mock(FnDataReceiver.class);
  when(bundle.getInputReceivers()).thenReturn(ImmutableMap.of("input", receiver));

  Exception expected = new RuntimeException(new Exception());
  doThrow(expected).when(bundle).close();
  thrown.expectCause(is(expected));

  operator.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow(0)));
  testHarness.close();
}
 
Example 19
private OneInputStreamOperatorTestHarness<BaseRow, BaseRow> createTestHarness(
		MiniBatchDeduplicateKeepFirstRowFunction func)
		throws Exception {
	CountBundleTrigger<Tuple2<String, String>> trigger = new CountBundleTrigger<>(3);
	KeyedMapBundleOperator op = new KeyedMapBundleOperator(func, trigger);
	return new KeyedOneInputStreamOperatorTestHarness<>(op, rowKeySelector, rowKeySelector.getProducedType());
}
 
Example 20
Source Project: flink   Source File: BucketingSinkTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testInactivityPeriodWithLateNotify() throws Exception {
	final File outDir = tempFolder.newFolder();

	OneInputStreamOperatorTestHarness<String, Object> testHarness = createRescalingTestSink(outDir, 1, 0, 100);
	testHarness.setup();
	testHarness.open();

	testHarness.setProcessingTime(0L);

	testHarness.processElement(new StreamRecord<>("test1", 1L));
	testHarness.processElement(new StreamRecord<>("test2", 1L));
	checkLocalFs(outDir, 2, 0 , 0, 0);

	testHarness.setProcessingTime(101L);	// put some in pending
	checkLocalFs(outDir, 0, 2, 0, 0);

	testHarness.snapshot(0, 0);				// put them in pending for 0
	checkLocalFs(outDir, 0, 2, 0, 0);

	testHarness.processElement(new StreamRecord<>("test3", 1L));
	testHarness.processElement(new StreamRecord<>("test4", 1L));

	testHarness.setProcessingTime(202L);	// put some in pending

	testHarness.snapshot(1, 0);				// put them in pending for 1
	checkLocalFs(outDir, 0, 4, 0, 0);

	testHarness.notifyOfCompletedCheckpoint(0);	// put the pending for 0 to the "committed" state
	checkLocalFs(outDir, 0, 2, 2, 0);

	testHarness.notifyOfCompletedCheckpoint(1); // put the pending for 1 to the "committed" state
	checkLocalFs(outDir, 0, 0, 4, 0);
}
 
Example 21
Source Project: flink   Source File: BucketingSinkTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testThatPartIndexIsIncremented(String partSuffix, String existingPartFile) throws Exception {
	File outDir = tempFolder.newFolder();
	long inactivityInterval = 100;

	java.nio.file.Path bucket = Paths.get(outDir.getPath());
	Files.createFile(bucket.resolve(existingPartFile));

	String basePath = outDir.getAbsolutePath();
	BucketingSink<String> sink = new BucketingSink<String>(basePath)
		.setBucketer(new BasePathBucketer<>())
		.setInactiveBucketCheckInterval(inactivityInterval)
		.setInactiveBucketThreshold(inactivityInterval)
		.setPartPrefix(PART_PREFIX)
		.setInProgressPrefix("")
		.setPendingPrefix("")
		.setValidLengthPrefix("")
		.setInProgressSuffix(IN_PROGRESS_SUFFIX)
		.setPendingSuffix(PENDING_SUFFIX)
		.setValidLengthSuffix(VALID_LENGTH_SUFFIX)
		.setPartSuffix(partSuffix)
		.setBatchSize(0);

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createTestSink(sink, 1, 0)) {
		testHarness.setup();
		testHarness.open();

		testHarness.setProcessingTime(0L);

		testHarness.processElement(new StreamRecord<>("test1", 1L));

		testHarness.setProcessingTime(101L);
		testHarness.snapshot(0, 0);
		testHarness.notifyOfCompletedCheckpoint(0);
	}

	String expectedFileName = partSuffix == null ? "part-0-1" : "part-0-1" + partSuffix;
	assertThat(Files.exists(bucket.resolve(expectedFileName)), is(true));
}
 
Example 22
Source Project: flink   Source File: AsyncLookupJoinHarnessTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTemporalLeftAsyncJoinWithFilter() throws Exception {
	OneInputStreamOperatorTestHarness<BaseRow, BaseRow> testHarness = createHarness(
		JoinType.LEFT_JOIN,
		FilterOnTable.WITH_FILTER);

	testHarness.open();

	synchronized (testHarness.getCheckpointLock()) {
		testHarness.processElement(record(1, "a"));
		testHarness.processElement(record(2, "b"));
		testHarness.processElement(record(3, "c"));
		testHarness.processElement(record(4, "d"));
		testHarness.processElement(record(5, "e"));
	}

	// wait until all async collectors in the buffer have been emitted out.
	synchronized (testHarness.getCheckpointLock()) {
		testHarness.close();
	}

	List<Object> expectedOutput = new ArrayList<>();
	expectedOutput.add(record(1, "a", 1, "Julian"));
	expectedOutput.add(record(2, "b", null, null));
	expectedOutput.add(record(3, "c", 3, "Jackson"));
	expectedOutput.add(record(4, "d", 4, "Fabian"));
	expectedOutput.add(record(5, "e", null, null));

	assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
}
 
Example 23
Source Project: beam   Source File: DoFnOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Ensures Jackson cache is cleaned to get rid of any references to the Flink Classloader. See
 * https://jira.apache.org/jira/browse/BEAM-6460
 */
@Test
public void testRemoveCachedClassReferences() throws Exception {
  OneInputStreamOperatorTestHarness<WindowedValue<String>, WindowedValue<String>> testHarness =
      new OneInputStreamOperatorTestHarness<>(getOperatorForCleanupInspection());

  LRUMap typeCache =
      (LRUMap) Whitebox.getInternalState(TypeFactory.defaultInstance(), "_typeCache");
  assertThat(typeCache.size(), greaterThan(0));
  testHarness.open();
  testHarness.close();
  assertThat(typeCache.size(), is(0));
}
 
Example 24
Source Project: Flink-CEPplus   Source File: FlinkKinesisProducerTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test ensuring that if a snapshot call happens right after an async exception is caught, it should be rethrown.
 */
@SuppressWarnings("ResultOfMethodCallIgnored")
@Test
public void testAsyncErrorRethrownOnCheckpoint() throws Throwable {
	final DummyFlinkKinesisProducer<String> producer = new DummyFlinkKinesisProducer<>(new SimpleStringSchema());

	OneInputStreamOperatorTestHarness<String, Object> testHarness =
		new OneInputStreamOperatorTestHarness<>(new StreamSink<>(producer));

	testHarness.open();

	testHarness.processElement(new StreamRecord<>("msg-1"));

	producer.getPendingRecordFutures().get(0).setException(new Exception("artificial async exception"));

	try {
		testHarness.snapshot(123L, 123L);
	} catch (Exception e) {
		// the next checkpoint should rethrow the async exception
		Assert.assertTrue(ExceptionUtils.findThrowableWithMessage(e, "artificial async exception").isPresent());

		// test succeeded
		return;
	}

	Assert.fail();
}
 
Example 25
private OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, String> getTestHarness(
	BlockingFileInputFormat format,
	int noOfTasks,
	int taskIdx) throws Exception {
	OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, String> testHarness =
		new OneInputStreamOperatorTestHarness<>(
			new ContinuousFileReaderOperatorFactory<>(format, TypeExtractor.getInputFormatTypes(format), new ExecutionConfig()),
			maxParallelism,
			noOfTasks,
			taskIdx);
	testHarness.setTimeCharacteristic(TimeCharacteristic.EventTime);
	return testHarness;
}
 
Example 26
Source Project: flink   Source File: WindowOperatorMigrationTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Manually run this to write binary snapshot data.
 */
@Ignore
@Test
public void writeSessionWindowsWithCountTriggerInMintConditionSnapshot() throws Exception {

	final int sessionSize = 3;

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(
			EventTimeSessionWindows.withGap(Time.seconds(sessionSize)),
			new TimeWindow.Serializer(),
			new TupleKeySelector<String>(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new SessionWindowFunction()),
			PurgingTrigger.of(CountTrigger.of(4)),
			0,
			null /* late data output tag */);

	OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	// do snapshot and save to file
	OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
	OperatorSnapshotUtil.writeStateHandle(
		snapshot,
		"src/test/resources/win-op-migration-test-session-with-stateful-trigger-mint-flink" + flinkGenerateSavepointVersion + "-snapshot");

	testHarness.close();
}
 
Example 27
Source Project: flink   Source File: CepOperatorTestUtilities.java    License: Apache License 2.0 5 votes vote down vote up
public static <T> OneInputStreamOperatorTestHarness<Event, T> getCepTestHarness(
	CepOperator<Event, Integer, T> cepOperator) throws Exception {
	KeySelector<Event, Integer> keySelector = new TestKeySelector();

	return new KeyedOneInputStreamOperatorTestHarness<>(
		cepOperator,
		keySelector,
		BasicTypeInfo.INT_TYPE_INFO);
}
 
Example 28
Source Project: flink   Source File: LegacyKeyedProcessOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that we don't have leakage between different keys.
 */
@Test
public void testEventTimeTimerWithState() throws Exception {

	LegacyKeyedProcessOperator<Integer, Integer, String> operator =
			new LegacyKeyedProcessOperator<>(new TriggeringStatefulFlatMapFunction(TimeDomain.EVENT_TIME));

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
			new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);

	testHarness.setup();
	testHarness.open();

	testHarness.processWatermark(new Watermark(1));
	testHarness.processElement(new StreamRecord<>(17, 0L)); // should set timer for 6

	testHarness.processWatermark(new Watermark(2));
	testHarness.processElement(new StreamRecord<>(42, 1L)); // should set timer for 7

	testHarness.processWatermark(new Watermark(6));
	testHarness.processWatermark(new Watermark(7));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new Watermark(1L));
	expectedOutput.add(new StreamRecord<>("INPUT:17", 0L));
	expectedOutput.add(new Watermark(2L));
	expectedOutput.add(new StreamRecord<>("INPUT:42", 1L));
	expectedOutput.add(new StreamRecord<>("STATE:17", 6L));
	expectedOutput.add(new Watermark(6L));
	expectedOutput.add(new StreamRecord<>("STATE:42", 7L));
	expectedOutput.add(new Watermark(7L));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.close();
}
 
Example 29
Source Project: flink   Source File: AbstractStreamOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Extracts the result values form the test harness and clear the output queue.
 */
@SuppressWarnings({"unchecked", "rawtypes"})
private <T> List<T> extractResult(OneInputStreamOperatorTestHarness<?, T> testHarness) {
	List<StreamRecord<? extends T>> streamRecords = testHarness.extractOutputStreamRecords();
	List<T> result = new ArrayList<>();
	for (Object in : streamRecords) {
		if (in instanceof StreamRecord) {
			result.add((T) ((StreamRecord) in).getValue());
		}
	}
	testHarness.getOutput().clear();
	return result;
}
 
Example 30
Source Project: flink   Source File: ElasticsearchSinkBaseTest.java    License: Apache License 2.0 5 votes vote down vote up
/** Tests that any item failure in the listener callbacks is rethrown on an immediately following invoke call. */
@Test
public void testItemFailureRethrownOnInvoke() throws Throwable {
	final DummyElasticsearchSink<String> sink = new DummyElasticsearchSink<>(
		new HashMap<String, String>(), new SimpleSinkFunction<String>(), new NoOpFailureHandler());

	final OneInputStreamOperatorTestHarness<String, Object> testHarness =
		new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));

	testHarness.open();

	// setup the next bulk request, and its mock item failures
	sink.setMockItemFailuresListForNextBulkItemResponses(Collections.singletonList(new Exception("artificial failure for record")));
	testHarness.processElement(new StreamRecord<>("msg"));
	verify(sink.getMockBulkProcessor(), times(1)).add(any(IndexRequest.class));

	// manually execute the next bulk request
	sink.manualBulkRequestWithAllPendingRequests();

	try {
		testHarness.processElement(new StreamRecord<>("next msg"));
	} catch (Exception e) {
		// the invoke should have failed with the failure
		Assert.assertTrue(e.getCause().getMessage().contains("artificial failure for record"));

		// test succeeded
		return;
	}

	Assert.fail();
}