org.apache.flink.streaming.api.operators.StreamMap Java Examples

The following examples show how to use org.apache.flink.streaming.api.operators.StreamMap. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamTaskTimerTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private StreamTaskTestHarness<?> startTestHarness() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setChainIndex(0);
	streamConfig.setStreamOperator(new StreamMap<String, String>(new DummyMapFunction<>()));

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	return testHarness;
}
 
Example #2
Source File: SiddhiCEPITCase.java    From bahir-flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleWriteAndRead() throws Exception {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    DataStream<Event> input = env.fromElements(
        Event.of(1, "start", 1.0),
        Event.of(2, "middle", 2.0),
        Event.of(3, "end", 3.0),
        Event.of(4, "start", 4.0),
        Event.of(5, "middle", 5.0),
        Event.of(6, "end", 6.0)
    );

    String path = tempFolder.newFile().toURI().toString();
    input.transform("transformer", TypeInformation.of(Event.class), new StreamMap<>(new MapFunction<Event, Event>() {
        @Override
        public Event map(Event event) throws Exception {
            return event;
        }
    })).writeAsText(path);
    env.execute();
    Assert.assertEquals(6, getLineCount(path));
}
 
Example #3
Source File: SiddhiCEPITCase.java    From flink-siddhi with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleWriteAndRead() throws Exception {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    DataStream<Event> input = env.fromElements(
        Event.of(1, "start", 1.0),
        Event.of(2, "middle", 2.0),
        Event.of(3, "end", 3.0),
        Event.of(4, "start", 4.0),
        Event.of(5, "middle", 5.0),
        Event.of(6, "end", 6.0)
    );

    String path = tempFolder.newFile().toURI().toString();
    input.transform("transformer", TypeInformation.of(Event.class), new StreamMap<>(new MapFunction<Event, Event>() {
        @Override
        public Event map(Event event) throws Exception {
            return event;
        }
    })).writeAsText(path);
    env.execute();
    Assert.assertEquals(6, getLineCount(path));
}
 
Example #4
Source File: KeyedStateBootstrapOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testNonTimerStatesRestorableByNonProcessesOperator() throws Exception {
	Path path = new Path(folder.newFolder().toURI());

	OperatorSubtaskState state;
	KeyedStateBootstrapOperator<Long, Long> bootstrapOperator = new KeyedStateBootstrapOperator<>(0L, path, new SimpleBootstrapFunction());
	try (KeyedOneInputStreamOperatorTestHarness<Long, Long, TaggedOperatorSubtaskState> harness = getHarness(bootstrapOperator)) {
		processElements(harness, 1L, 2L, 3L);
		state = getState(bootstrapOperator, harness);
	}

	StreamMap<Long, Long> mapOperator = new StreamMap<>(new StreamingFunction());
	try (KeyedOneInputStreamOperatorTestHarness<Long, Long, Long> harness = getHarness(mapOperator, state)) {
		processElements(harness, 1L, 2L, 3L);

		assertHarnessOutput(harness, 1L, 2L, 3L);
		harness.snapshot(0L, 0L);
	}
}
 
Example #5
Source File: StreamTaskTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that some StreamTask methods are called only in the main task's thread.
 * Currently, the main task's thread is the thread that creates the task.
 */
@Test
public void testThreadInvariants() throws Throwable {
	Configuration taskConfiguration = new Configuration();
	StreamConfig streamConfig = new StreamConfig(taskConfiguration);
	streamConfig.setStreamOperator(new StreamMap<>(value -> value));
	streamConfig.setOperatorID(new OperatorID());
	try (MockEnvironment mockEnvironment =
			new MockEnvironmentBuilder()
				.setTaskConfiguration(taskConfiguration)
				.build()) {

		ClassLoader taskClassLoader = new TestUserCodeClassLoader();

		RunningTask<ThreadInspectingTask> runningTask = runTask(() -> {
			Thread.currentThread().setContextClassLoader(taskClassLoader);
			return new ThreadInspectingTask(mockEnvironment);
		});
		runningTask.invocationFuture.get();

		assertThat(runningTask.streamTask.getTaskClassLoader(), is(sameInstance(taskClassLoader)));
	}
}
 
Example #6
Source File: StreamTaskTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void testFailToConfirmCheckpointMessage(Consumer<StreamTask<?, ?>> consumer) throws Exception {
	StreamMap<Integer, Integer> streamMap = new StreamMap<>(new FailOnNotifyCheckpointMapper<>());
	MultipleInputStreamTaskTestHarnessBuilder<Integer> builder =
		new MultipleInputStreamTaskTestHarnessBuilder<>(OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO)
			.addInput(BasicTypeInfo.INT_TYPE_INFO);
	StreamTaskMailboxTestHarness<Integer> harness = builder
		.setupOutputForSingletonOperatorChain(streamMap)
		.build();

	try {
		consumer.accept(harness.streamTask);
		harness.streamTask.runMailboxStep();
		fail();
	} catch (ExpectedTestException expected) {
		// expected exception
	}
}
 
Example #7
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies that open() and close() are correctly called. This test also verifies
 * that timestamps of emitted elements are correct. {@link StreamMap} assigns the input
 * timestamp to emitted elements.
 */
@Test
public void testOpenCloseAndTimestamps() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new TestOpenCloseMapFunction());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processElement(new StreamRecord<>("Hello", initialTime + 1));
	testHarness.processElement(new StreamRecord<>("Ciao", initialTime + 2));
	expectedOutput.add(new StreamRecord<>("Hello", initialTime + 1));
	expectedOutput.add(new StreamRecord<>("Ciao", initialTime + 2));

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	assertTrue("RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled);

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example #8
Source File: StreamTaskCancellationBarrierTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies (for onw input tasks) that the Stream tasks react the following way to
 * receiving a checkpoint cancellation barrier:
 *   - send a "decline checkpoint" notification out (to the JobManager)
 *   - emit a cancellation barrier downstream.
 */
@Test
public void testDeclineCallOnCancelBarrierOneInput() throws Exception {

	OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			1, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	StreamMockEnvironment environment = spy(testHarness.createEnvironment());

	// start the task
	testHarness.invoke(environment);
	testHarness.waitForTaskRunning();

	// emit cancellation barriers
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 1);
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 0);
	testHarness.waitForInputProcessing();

	// the decline call should go to the coordinator
	verify(environment, times(1)).declineCheckpoint(eq(2L),
		argThat(new CheckpointExceptionMatcher(CheckpointFailureReason.CHECKPOINT_DECLINED_ON_CANCELLATION_BARRIER)));

	// a cancellation barrier should be downstream
	Object result = testHarness.getOutput().poll();
	assertNotNull("nothing emitted", result);
	assertTrue("wrong type emitted", result instanceof CancelCheckpointMarker);
	assertEquals("wrong checkpoint id", 2L, ((CancelCheckpointMarker) result).getCheckpointId());

	// cancel and shutdown
	testHarness.endInput();
	testHarness.waitForTaskCompletion();
}
 
Example #9
Source File: ListCheckpointedTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static AbstractStreamOperatorTestHarness<Integer> createTestHarness(TestUserFunction userFunction) throws Exception {
	return new AbstractStreamOperatorTestHarness<>(
		new StreamMap<>(userFunction),
		1,
		1,
		0);
}
 
Example #10
Source File: StreamTaskCancellationBarrierTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies (for onw input tasks) that the Stream tasks react the following way to
 * receiving a checkpoint cancellation barrier:
 *   - send a "decline checkpoint" notification out (to the JobManager)
 *   - emit a cancellation barrier downstream.
 */
@Test
public void testDeclineCallOnCancelBarrierOneInput() throws Exception {

	OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			1, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	StreamMockEnvironment environment = spy(testHarness.createEnvironment());

	// start the task
	testHarness.invoke(environment);
	testHarness.waitForTaskRunning();

	// emit cancellation barriers
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 1);
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 0);
	testHarness.waitForInputProcessing();

	// the decline call should go to the coordinator
	verify(environment, times(1)).declineCheckpoint(eq(2L),
		argThat(new CheckpointExceptionMatcher(CheckpointFailureReason.CHECKPOINT_DECLINED_ON_CANCELLATION_BARRIER)));

	// a cancellation barrier should be downstream
	Object result = testHarness.getOutput().poll();
	assertNotNull("nothing emitted", result);
	assertTrue("wrong type emitted", result instanceof CancelCheckpointMarker);
	assertEquals("wrong checkpoint id", 2L, ((CancelCheckpointMarker) result).getCheckpointId());

	// cancel and shutdown
	testHarness.endInput();
	testHarness.waitForTaskCompletion();
}
 
Example #11
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies that open() and close() are correctly called. This test also verifies
 * that timestamps of emitted elements are correct. {@link StreamMap} assigns the input
 * timestamp to emitted elements.
 */
@Test
public void testOpenCloseAndTimestamps() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<String, String>(new TestOpenCloseMapFunction());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processElement(new StreamRecord<String>("Hello", initialTime + 1));
	testHarness.processElement(new StreamRecord<String>("Ciao", initialTime + 2));
	expectedOutput.add(new StreamRecord<String>("Hello", initialTime + 1));
	expectedOutput.add(new StreamRecord<String>("Ciao", initialTime + 2));

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	assertTrue("RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled);

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example #12
Source File: ListCheckpointedTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static AbstractStreamOperatorTestHarness<Integer> createTestHarness(TestUserFunction userFunction) throws Exception {
	return new AbstractStreamOperatorTestHarness<>(
		new StreamMap<>(userFunction),
		1,
		1,
		0);
}
 
Example #13
Source File: OneInputStreamTaskTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies that open() and close() are correctly called. This test also verifies
 * that timestamps of emitted elements are correct. {@link StreamMap} assigns the input
 * timestamp to emitted elements.
 */
@Test
public void testOpenCloseAndTimestamps() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<String, String>(new TestOpenCloseMapFunction());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processElement(new StreamRecord<String>("Hello", initialTime + 1));
	testHarness.processElement(new StreamRecord<String>("Ciao", initialTime + 2));
	expectedOutput.add(new StreamRecord<String>("Hello", initialTime + 1));
	expectedOutput.add(new StreamRecord<String>("Ciao", initialTime + 2));

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	assertTrue("RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled);

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example #14
Source File: StreamTaskCancellationBarrierTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies (for onw input tasks) that the Stream tasks react the following way to
 * receiving a checkpoint cancellation barrier:
 *   - send a "decline checkpoint" notification out (to the JobManager)
 *   - emit a cancellation barrier downstream.
 */
@Test
public void testDeclineCallOnCancelBarrierOneInput() throws Exception {

	OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			1, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	StreamMockEnvironment environment = spy(testHarness.createEnvironment());

	// start the task
	testHarness.invoke(environment);
	testHarness.waitForTaskRunning();

	// emit cancellation barriers
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 1);
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 0);
	testHarness.waitForInputProcessing();

	// the decline call should go to the coordinator
	verify(environment, times(1)).declineCheckpoint(eq(2L), any(CheckpointDeclineOnCancellationBarrierException.class));

	// a cancellation barrier should be downstream
	Object result = testHarness.getOutput().poll();
	assertNotNull("nothing emitted", result);
	assertTrue("wrong type emitted", result instanceof CancelCheckpointMarker);
	assertEquals("wrong checkpoint id", 2L, ((CancelCheckpointMarker) result).getCheckpointId());

	// cancel and shutdown
	testHarness.endInput();
	testHarness.waitForTaskCompletion();
}
 
Example #15
Source File: ListCheckpointedTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static AbstractStreamOperatorTestHarness<Integer> createTestHarness(TestUserFunction userFunction) throws Exception {
	return new AbstractStreamOperatorTestHarness<>(
		new StreamMap<>(userFunction),
		1,
		1,
		0);
}
 
Example #16
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers and barrier buffers work correctly with
 * concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e.
 * some inputs receive barriers from an earlier checkpoint, thereby blocking,
 * then all inputs receive barriers from a later checkpoint.
 */
@Test
public void testOvertakingCheckpointBarriers() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<>("Hello-1-1", initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<>("Ciao-1-1", initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<>("Hello-1-1", initialTime));
	expectedOutput.add(new StreamRecord<>("Ciao-1-1", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	// Now give a later barrier to all inputs, this should unblock the first channel
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	expectedOutput.add(new CancelCheckpointMarker(0));
	expectedOutput.add(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()));

	testHarness.waitForInputProcessing();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	// Then give the earlier barrier, these should be ignored
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example #17
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers are correctly forwarded.
 */
@Test
public void testCheckpointBarriers() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<>("Hello-1-1", initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<>("Ciao-1-1", initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<>("Hello-1-1", initialTime));
	expectedOutput.add(new StreamRecord<>("Ciao-1-1", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	// now we should see the barrier
	expectedOutput.add(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()));

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example #18
Source File: StreamingJobGraphGeneratorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private YieldingTestOperatorFactory() {
	super(new StreamMap<T, T>(x -> x));
}
 
Example #19
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers and barrier buffers work correctly with
 * concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e.
 * some inputs receive barriers from an earlier checkpoint, thereby blocking,
 * then all inputs receive barriers from a later checkpoint.
 */
@Test
public void testOvertakingCheckpointBarriers() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be buffered until we receive barriers from
	// all inputs
	testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
	testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	// Now give a later barrier to all inputs, this should unblock the first channel,
	// thereby allowing the two blocked elements through
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	expectedOutput.add(new CancelCheckpointMarker(0));
	expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));
	expectedOutput.add(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()));

	testHarness.waitForInputProcessing();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	// Then give the earlier barrier, these should be ignored
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example #20
Source File: PojoSerializerUpgradeTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private void testPojoSerializerUpgrade(String classSourceA, String classSourceB, boolean hasBField, boolean isKeyedState) throws Exception {
	final Configuration taskConfiguration = new Configuration();
	final ExecutionConfig executionConfig = new ExecutionConfig();
	final KeySelector<Long, Long> keySelector = new IdentityKeySelector<>();
	final Collection<Long> inputs = Arrays.asList(1L, 2L, 45L, 67L, 1337L);

	// run the program with classSourceA
	File rootPath = temporaryFolder.newFolder();
	File sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceA);
	compileClass(sourceFile);

	final ClassLoader classLoader = URLClassLoader.newInstance(
		new URL[]{rootPath.toURI().toURL()},
		Thread.currentThread().getContextClassLoader());

	OperatorSubtaskState stateHandles = runOperator(
		taskConfiguration,
		executionConfig,
		new StreamMap<>(new StatefulMapper(isKeyedState, false, hasBField)),
		keySelector,
		isKeyedState,
		stateBackend,
		classLoader,
		null,
		inputs);

	// run the program with classSourceB
	rootPath = temporaryFolder.newFolder();

	sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceB);
	compileClass(sourceFile);

	final ClassLoader classLoaderB = URLClassLoader.newInstance(
		new URL[]{rootPath.toURI().toURL()},
		Thread.currentThread().getContextClassLoader());

	runOperator(
		taskConfiguration,
		executionConfig,
		new StreamMap<>(new StatefulMapper(isKeyedState, true, hasBField)),
		keySelector,
		isKeyedState,
		stateBackend,
		classLoaderB,
		stateHandles,
		inputs);
}
 
Example #21
Source File: StreamTaskTimerTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void checkScheduledTimestampe() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>());
	streamConfig.setStreamOperator(mapOperator);

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	final OneInputStreamTask<String, String> mapTask = testHarness.getTask();

	final AtomicReference<Throwable> errorRef = new AtomicReference<>();

	final long t1 = System.currentTimeMillis();
	final long t2 = System.currentTimeMillis() - 200;
	final long t3 = System.currentTimeMillis() + 100;
	final long t4 = System.currentTimeMillis() + 200;

	ProcessingTimeService timeService = mapTask.getProcessingTimeService();
	timeService.registerTimer(t1, new ValidatingProcessingTimeCallback(errorRef, t1, 0));
	timeService.registerTimer(t2, new ValidatingProcessingTimeCallback(errorRef, t2, 1));
	timeService.registerTimer(t3, new ValidatingProcessingTimeCallback(errorRef, t3, 2));
	timeService.registerTimer(t4, new ValidatingProcessingTimeCallback(errorRef, t4, 3));

	long deadline = System.currentTimeMillis() + 20000;
	while (errorRef.get() == null &&
			ValidatingProcessingTimeCallback.numInSequence < 4 &&
			System.currentTimeMillis() < deadline) {
		Thread.sleep(100);
	}

	// handle errors
	if (errorRef.get() != null) {
		errorRef.get().printStackTrace();
		fail(errorRef.get().getMessage());
	}

	assertEquals(4, ValidatingProcessingTimeCallback.numInSequence);

	testHarness.endInput();
	testHarness.waitForTaskCompletion();

	// wait until the trigger thread is shut down. otherwise, the other tests may become unstable
	deadline = System.currentTimeMillis() + 4000;
	while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) {
		Thread.sleep(10);
	}

	assertEquals("Trigger timer thread did not properly shut down",
			0, StreamTask.TRIGGER_THREAD_GROUP.activeCount());
}
 
Example #22
Source File: StreamTaskTimerTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testOpenCloseAndTimestamps() throws Exception {

	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();

	StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	final OneInputStreamTask<String, String> mapTask = testHarness.getTask();

	// first one spawns thread
	mapTask.getProcessingTimeService().registerTimer(System.currentTimeMillis(), new ProcessingTimeCallback() {
		@Override
		public void onProcessingTime(long timestamp) {
		}
	});

	assertEquals(1, StreamTask.TRIGGER_THREAD_GROUP.activeCount());

	testHarness.endInput();
	testHarness.waitForTaskCompletion();

	// thread needs to die in time
	long deadline = System.currentTimeMillis() + 4000;
	while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) {
		Thread.sleep(10);
	}

	assertEquals("Trigger timer thread did not properly shut down",
			0, StreamTask.TRIGGER_THREAD_GROUP.activeCount());
}
 
Example #23
Source File: OneInputStreamTaskTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers are correctly forwarded.
 */
@Test
public void testCheckpointBarriers() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be buffered until we receive barriers from
	// all inputs
	testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
	testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	// now we should see the barrier and after that the buffered elements
	expectedOutput.add(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()));
	expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example #24
Source File: PojoSerializerUpgradeTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private void testPojoSerializerUpgrade(String classSourceA, String classSourceB, boolean hasBField, boolean isKeyedState) throws Exception {
	final Configuration taskConfiguration = new Configuration();
	final ExecutionConfig executionConfig = new ExecutionConfig();
	final KeySelector<Long, Long> keySelector = new IdentityKeySelector<>();
	final Collection<Long> inputs = Arrays.asList(1L, 2L, 45L, 67L, 1337L);

	// run the program with classSourceA
	File rootPath = temporaryFolder.newFolder();
	File sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceA);
	compileClass(sourceFile);

	final ClassLoader classLoader = URLClassLoader.newInstance(
		new URL[]{rootPath.toURI().toURL()},
		Thread.currentThread().getContextClassLoader());

	OperatorSubtaskState stateHandles = runOperator(
		taskConfiguration,
		executionConfig,
		new StreamMap<>(new StatefulMapper(isKeyedState, false, hasBField)),
		keySelector,
		isKeyedState,
		stateBackend,
		classLoader,
		null,
		inputs);

	// run the program with classSourceB
	rootPath = temporaryFolder.newFolder();

	sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceB);
	compileClass(sourceFile);

	final ClassLoader classLoaderB = URLClassLoader.newInstance(
		new URL[]{rootPath.toURI().toURL()},
		Thread.currentThread().getContextClassLoader());

	runOperator(
		taskConfiguration,
		executionConfig,
		new StreamMap<>(new StatefulMapper(isKeyedState, true, hasBField)),
		keySelector,
		isKeyedState,
		stateBackend,
		classLoaderB,
		stateHandles,
		inputs);
}
 
Example #25
Source File: StreamTaskTimerTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void checkScheduledTimestampe() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>());
	streamConfig.setStreamOperator(mapOperator);

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	final OneInputStreamTask<String, String> mapTask = testHarness.getTask();

	final AtomicReference<Throwable> errorRef = new AtomicReference<>();

	final long t1 = System.currentTimeMillis();
	final long t2 = System.currentTimeMillis() - 200;
	final long t3 = System.currentTimeMillis() + 100;
	final long t4 = System.currentTimeMillis() + 200;

	ProcessingTimeService timeService = mapTask.getProcessingTimeService();
	timeService.registerTimer(t1, new ValidatingProcessingTimeCallback(errorRef, t1, 0));
	timeService.registerTimer(t2, new ValidatingProcessingTimeCallback(errorRef, t2, 1));
	timeService.registerTimer(t3, new ValidatingProcessingTimeCallback(errorRef, t3, 2));
	timeService.registerTimer(t4, new ValidatingProcessingTimeCallback(errorRef, t4, 3));

	long deadline = System.currentTimeMillis() + 20000;
	while (errorRef.get() == null &&
			ValidatingProcessingTimeCallback.numInSequence < 4 &&
			System.currentTimeMillis() < deadline) {
		Thread.sleep(100);
	}

	// handle errors
	if (errorRef.get() != null) {
		errorRef.get().printStackTrace();
		fail(errorRef.get().getMessage());
	}

	assertEquals(4, ValidatingProcessingTimeCallback.numInSequence);

	testHarness.endInput();
	testHarness.waitForTaskCompletion();

	// wait until the trigger thread is shut down. otherwise, the other tests may become unstable
	deadline = System.currentTimeMillis() + 4000;
	while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) {
		Thread.sleep(10);
	}

	assertEquals("Trigger timer thread did not properly shut down",
			0, StreamTask.TRIGGER_THREAD_GROUP.activeCount());
}
 
Example #26
Source File: StreamTaskTimerTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testOpenCloseAndTimestamps() throws Exception {

	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();

	StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	final OneInputStreamTask<String, String> mapTask = testHarness.getTask();

	// first one spawns thread
	mapTask.getProcessingTimeService().registerTimer(System.currentTimeMillis(), new ProcessingTimeCallback() {
		@Override
		public void onProcessingTime(long timestamp) {
		}
	});

	assertEquals(1, StreamTask.TRIGGER_THREAD_GROUP.activeCount());

	testHarness.endInput();
	testHarness.waitForTaskCompletion();

	// thread needs to die in time
	long deadline = System.currentTimeMillis() + 4000;
	while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) {
		Thread.sleep(10);
	}

	assertEquals("Trigger timer thread did not properly shut down",
			0, StreamTask.TRIGGER_THREAD_GROUP.activeCount());
}
 
Example #27
Source File: OneInputStreamTaskTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers and barrier buffers work correctly with
 * concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e.
 * some inputs receive barriers from an earlier checkpoint, thereby blocking,
 * then all inputs receive barriers from a later checkpoint.
 */
@Test
public void testOvertakingCheckpointBarriers() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be buffered until we receive barriers from
	// all inputs
	testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
	testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	// Now give a later barrier to all inputs, this should unblock the first channel,
	// thereby allowing the two blocked elements through
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	expectedOutput.add(new CancelCheckpointMarker(0));
	expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));
	expectedOutput.add(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()));

	testHarness.waitForInputProcessing();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	// Then give the earlier barrier, these should be ignored
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example #28
Source File: OneInputStreamTaskTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * This test verifies that checkpoint barriers are correctly forwarded.
 */
@Test
public void testCheckpointBarriers() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
	long initialTime = 0L;

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0);

	// These elements should be buffered until we receive barriers from
	// all inputs
	testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
	testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);

	// These elements should be forwarded, since we did not yet receive a checkpoint barrier
	// on that input, only add to same input, otherwise we would not know the ordering
	// of the output since the Task might read the inputs in any order
	testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1);
	testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1);
	expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime));

	testHarness.waitForInputProcessing();
	// we should not yet see the barrier, only the two elements from non-blocked input
	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0);
	testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1);

	testHarness.waitForInputProcessing();

	// now we should see the barrier and after that the buffered elements
	expectedOutput.add(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()));
	expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
	expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
 
Example #29
Source File: PojoSerializerUpgradeTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private void testPojoSerializerUpgrade(String classSourceA, String classSourceB, boolean hasBField, boolean isKeyedState) throws Exception {
	final Configuration taskConfiguration = new Configuration();
	final ExecutionConfig executionConfig = new ExecutionConfig();
	final KeySelector<Long, Long> keySelector = new IdentityKeySelector<>();
	final Collection<Long> inputs = Arrays.asList(1L, 2L, 45L, 67L, 1337L);

	// run the program with classSourceA
	File rootPath = temporaryFolder.newFolder();
	File sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceA);
	compileClass(sourceFile);

	final ClassLoader classLoader = URLClassLoader.newInstance(
		new URL[]{rootPath.toURI().toURL()},
		Thread.currentThread().getContextClassLoader());

	OperatorSubtaskState stateHandles = runOperator(
		taskConfiguration,
		executionConfig,
		new StreamMap<>(new StatefulMapper(isKeyedState, false, hasBField)),
		keySelector,
		isKeyedState,
		stateBackend,
		classLoader,
		null,
		inputs);

	// run the program with classSourceB
	rootPath = temporaryFolder.newFolder();

	sourceFile = writeSourceFile(rootPath, POJO_NAME + ".java", classSourceB);
	compileClass(sourceFile);

	final ClassLoader classLoaderB = URLClassLoader.newInstance(
		new URL[]{rootPath.toURI().toURL()},
		Thread.currentThread().getContextClassLoader());

	runOperator(
		taskConfiguration,
		executionConfig,
		new StreamMap<>(new StatefulMapper(isKeyedState, true, hasBField)),
		keySelector,
		isKeyedState,
		stateBackend,
		classLoaderB,
		stateHandles,
		inputs);
}
 
Example #30
Source File: DataStream.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
/**
 * Applies a Map transformation on a {@link DataStream}. The transformation
 * calls a {@link MapFunction} for each element of the DataStream. Each
 * MapFunction call returns exactly one element. The user can also extend
 * {@link RichMapFunction} to gain access to other features provided by the
 * {@link org.apache.flink.api.common.functions.RichFunction} interface.
 *
 * @param mapper
 *            The MapFunction that is called for each element of the
 *            DataStream.
 * @param <R>
 *            output type
 * @return The transformed {@link DataStream}.
 */
public <R> SingleOutputStreamOperator<R> map(MapFunction<T, R> mapper) {

	TypeInformation<R> outType = TypeExtractor.getMapReturnTypes(clean(mapper), getType(),
			Utils.getCallLocationName(), true);

	return transform("Map", outType, new StreamMap<>(clean(mapper)));
}