Java Code Examples for org.apache.flink.streaming.api.graph.StreamConfig

The following examples show how to use org.apache.flink.streaming.api.graph.StreamConfig. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Override
@SuppressWarnings("unchecked")
public <T extends StreamOperator<E>> T createStreamOperator(
    StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<E>> output) {
  final TypeSerializer<E> serializer =
      config.getTypeSerializerIn1(containingTask.getUserCodeClassLoader());

  final long totalMemoryUsedForFeedbackCheckpointing =
      config
          .getConfiguration()
          .getInteger(FeedbackConfiguration.TOTAL_MEMORY_USED_FOR_FEEDBACK_CHECKPOINTING);

  FeedbackUnionOperator<E> op =
      new FeedbackUnionOperator<>(
          feedbackKey,
          isBarrierMessage,
          keySelector,
          totalMemoryUsedForFeedbackCheckpointing,
          serializer,
          mailboxExecutor);

  op.setup(containingTask, config, output);

  return (T) op;
}
 
Example 2
@Test
public void testLifeCycleCancel() throws Exception {
	ACTUAL_ORDER_TRACKING.clear();

	Configuration taskManagerConfig = new Configuration();
	StreamConfig cfg = new StreamConfig(new Configuration());
	MockSourceFunction srcFun = new MockSourceFunction();
	cfg.setStreamOperator(new LifecycleTrackingStreamSource<>(srcFun, false));
	cfg.setOperatorID(new OperatorID());
	cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	try (ShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build()) {
		Task task = StreamTaskTest.createTask(SourceStreamTask.class, shuffleEnvironment, cfg, taskManagerConfig);

		task.startTaskThread();
		LifecycleTrackingStreamSource.runStarted.await();

		// this should cancel the task even though it is blocked on runFinished
		task.cancelExecution();

		// wait for clean termination
		task.getExecutingThread().join();
		assertEquals(ExecutionState.CANCELED, task.getExecutionState());
		assertEquals(EXPECTED_CALL_ORDER_CANCEL_RUNNING, ACTUAL_ORDER_TRACKING);
	}
}
 
Example 3
Source Project: Flink-CEPplus   Source File: OperatorChain.java    License: Apache License 2.0 6 votes vote down vote up
private RecordWriterOutput<OUT> createStreamOutput(
		RecordWriter<SerializationDelegate<StreamRecord<OUT>>> recordWriter,
		StreamEdge edge,
		StreamConfig upStreamConfig,
		Environment taskEnvironment) {
	OutputTag sideOutputTag = edge.getOutputTag(); // OutputTag, return null if not sideOutput

	TypeSerializer outSerializer = null;

	if (edge.getOutputTag() != null) {
		// side output
		outSerializer = upStreamConfig.getTypeSerializerSideOut(
				edge.getOutputTag(), taskEnvironment.getUserClassLoader());
	} else {
		// main output
		outSerializer = upStreamConfig.getTypeSerializerOut(taskEnvironment.getUserClassLoader());
	}

	return new RecordWriterOutput<>(recordWriter, outSerializer, sideOutputTag, this);
}
 
Example 4
Source Project: flink   Source File: OneInputStreamTask.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void init() throws Exception {
	StreamConfig configuration = getConfiguration();
	int numberOfInputs = configuration.getNumberOfInputs();

	if (numberOfInputs > 0) {
		CheckpointedInputGate inputGate = createCheckpointedInputGate();
		DataOutput<IN> output = createDataOutput();
		StreamTaskInput<IN> input = createTaskInput(inputGate, output);
		inputProcessor = new StreamOneInputProcessor<>(
			input,
			output,
			operatorChain);
	}
	headOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, this.inputWatermarkGauge);
	// wrap watermark gauge since registered metrics must be unique
	getEnvironment().getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, this.inputWatermarkGauge::getValue);
}
 
Example 5
@Test
public void testLifeCycleCancel() throws Exception {
	ACTUAL_ORDER_TRACKING.clear();

	Configuration taskManagerConfig = new Configuration();
	StreamConfig cfg = new StreamConfig(new Configuration());
	MockSourceFunction srcFun = new MockSourceFunction();
	cfg.setStreamOperator(new LifecycleTrackingStreamSource<>(srcFun, false));
	cfg.setOperatorID(new OperatorID());
	cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	Task task = StreamTaskTest.createTask(SourceStreamTask.class, cfg, taskManagerConfig);

	task.startTaskThread();
	LifecycleTrackingStreamSource.runStarted.await();

	// this should cancel the task even though it is blocked on runFinished
	task.cancelExecution();

	// wait for clean termination
	task.getExecutingThread().join();
	assertEquals(ExecutionState.CANCELED, task.getExecutionState());
	assertEquals(EXPECTED_CALL_ORDER_CANCEL_RUNNING, ACTUAL_ORDER_TRACKING);
}
 
Example 6
@Override
public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) {
	ACTUAL_ORDER_TRACKING.add("OPERATOR::setup");
	super.setup(containingTask, config, output);
	if (simulateCheckpointing) {
		testCheckpointer = new Thread() {
			@Override
			public void run() {
				try {
					runStarted.await();
					if (getContainingTask().isCanceled() || getContainingTask().triggerCheckpoint(
							new CheckpointMetaData(0, System.currentTimeMillis()),
							CheckpointOptions.forCheckpointWithDefaultLocation())) {
						LifecycleTrackingStreamSource.runFinish.trigger();
					}
				} catch (Exception e) {
					e.printStackTrace();
					Assert.fail();
				}
			}
		};
		testCheckpointer.start();
	}
}
 
Example 7
Source Project: flink   Source File: StreamingRuntimeContext.java    License: Apache License 2.0 6 votes vote down vote up
public StreamingRuntimeContext(
		Environment env,
		Map<String, Accumulator<?, ?>> accumulators,
		MetricGroup operatorMetricGroup,
		OperatorID operatorID,
		ProcessingTimeService processingTimeService,
		@Nullable KeyedStateStore keyedStateStore,
		ExternalResourceInfoProvider externalResourceInfoProvider) {
	super(checkNotNull(env).getTaskInfo(),
			env.getUserClassLoader(),
			env.getExecutionConfig(),
			accumulators,
			env.getDistributedCacheEntries(),
			operatorMetricGroup);
	this.taskEnvironment = env;
	this.streamConfig = new StreamConfig(env.getTaskConfiguration());
	this.operatorUniqueID = checkNotNull(operatorID).toString();
	this.processingTimeService = processingTimeService;
	this.keyedStateStore = keyedStateStore;
	this.externalResourceInfoProvider = externalResourceInfoProvider;
}
 
Example 8
Source Project: flink   Source File: StreamTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Tests that some StreamTask methods are called only in the main task's thread.
 * Currently, the main task's thread is the thread that creates the task.
 */
@Test
public void testThreadInvariants() throws Throwable {
	Configuration taskConfiguration = new Configuration();
	StreamConfig streamConfig = new StreamConfig(taskConfiguration);
	streamConfig.setStreamOperator(new StreamMap<>(value -> value));
	streamConfig.setOperatorID(new OperatorID());
	try (MockEnvironment mockEnvironment =
			new MockEnvironmentBuilder()
				.setTaskConfiguration(taskConfiguration)
				.build()) {

		ClassLoader taskClassLoader = new TestUserCodeClassLoader();

		RunningTask<ThreadInspectingTask> runningTask = runTask(() -> {
			Thread.currentThread().setContextClassLoader(taskClassLoader);
			return new ThreadInspectingTask(mockEnvironment);
		});
		runningTask.invocationFuture.get();

		assertThat(runningTask.streamTask.getTaskClassLoader(), is(sameInstance(taskClassLoader)));
	}
}
 
Example 9
private static <T> void setupSourceOperator(
		StreamSource<T, ?> operator,
		ExecutionConfig executionConfig,
		Environment env,
		ProcessingTimeService timeProvider) {

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(TimeCharacteristic.EventTime);
	cfg.setOperatorID(new OperatorID());

	try {
		MockStreamTask mockTask = new MockStreamTaskBuilder(env)
			.setConfig(cfg)
			.setExecutionConfig(executionConfig)
			.setProcessingTimeService(timeProvider)
			.build();

		operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 10
Source Project: flink   Source File: StreamTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCancellationNotBlockedOnLock() throws Exception {
	syncLatch = new OneShotLatch();

	StreamConfig cfg = new StreamConfig(new Configuration());
	Task task = createTask(CancelLockingTask.class, cfg, new Configuration());

	// start the task and wait until it runs
	// execution state RUNNING is not enough, we need to wait until the stream task's run() method
	// is entered
	task.startTaskThread();
	syncLatch.await();

	// cancel the execution - this should lead to smooth shutdown
	task.cancelExecution();
	task.getExecutingThread().join();

	assertEquals(ExecutionState.CANCELED, task.getExecutionState());
}
 
Example 11
Source Project: flink   Source File: OneInputStreamTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testQuiesceTimerServiceAfterOpClose() throws Exception {

	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new,
			2, 2,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setStreamOperator(new TestOperator());
	streamConfig.setOperatorID(new OperatorID());

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	SystemProcessingTimeService timeService = (SystemProcessingTimeService)
			testHarness.getTask().getProcessingTimeService();

	// verify that the timer service is running
	Assert.assertTrue(timeService.isAlive());

	testHarness.endInput();
	testHarness.waitForTaskCompletion();
	timeService.shutdownService();
}
 
Example 12
Source Project: flink   Source File: AsyncWaitOperator.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) {
	super.setup(containingTask, config, output);

	this.inStreamElementSerializer = new StreamElementSerializer<>(
		getOperatorConfig().<IN>getTypeSerializerIn1(getUserCodeClassloader()));

	switch (outputMode) {
		case ORDERED:
			queue = new OrderedStreamElementQueue<>(capacity);
			break;
		case UNORDERED:
			queue = new UnorderedStreamElementQueue<>(capacity);
			break;
		default:
			throw new IllegalStateException("Unknown async mode: " + outputMode + '.');
	}

	this.timestampedCollector = new TimestampedCollector<>(output);
}
 
Example 13
Source Project: flink   Source File: StreamTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCanceleablesCanceledOnCancelTaskError() throws Exception {
	syncLatch = new OneShotLatch();

	StreamConfig cfg = new StreamConfig(new Configuration());
	try (NettyShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build()) {

		Task task = createTask(CancelFailingTask.class, shuffleEnvironment, cfg, new Configuration());

		// start the task and wait until it runs
		// execution state RUNNING is not enough, we need to wait until the stream task's run() method
		// is entered
		task.startTaskThread();
		syncLatch.await();

		// cancel the execution - this should lead to smooth shutdown
		task.cancelExecution();
		task.getExecutingThread().join();

		assertEquals(ExecutionState.CANCELED, task.getExecutionState());
	}
}
 
Example 14
private static <T> void setupSourceOperator(
		StreamSource<T, ?> operator,
		ExecutionConfig executionConfig,
		Environment env,
		ProcessingTimeService timeProvider) {

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(TimeCharacteristic.EventTime);
	cfg.setOperatorID(new OperatorID());

	try {
		MockStreamTask mockTask = new MockStreamTaskBuilder(env)
			.setConfig(cfg)
			.setExecutionConfig(executionConfig)
			.setProcessingTimeService(timeProvider)
			.build();

		operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 15
Source Project: flink   Source File: InputProcessorUtil.java    License: Apache License 2.0 6 votes vote down vote up
private static CheckpointBarrierHandler createCheckpointBarrierHandler(
		StreamConfig config,
		InputGate[] inputGates,
		SubtaskCheckpointCoordinator checkpointCoordinator,
		String taskName,
		AbstractInvokable toNotifyOnCheckpoint) {
	switch (config.getCheckpointMode()) {
		case EXACTLY_ONCE:
			if (config.isUnalignedCheckpointsEnabled()) {
				return new AlternatingCheckpointBarrierHandler(
					new CheckpointBarrierAligner(taskName, toNotifyOnCheckpoint, inputGates),
					new CheckpointBarrierUnaligner(checkpointCoordinator, taskName, toNotifyOnCheckpoint, inputGates),
					toNotifyOnCheckpoint);
			}
			return new CheckpointBarrierAligner(taskName, toNotifyOnCheckpoint, inputGates);
		case AT_LEAST_ONCE:
			if (config.isUnalignedCheckpointsEnabled()) {
				throw new IllegalStateException("Cannot use unaligned checkpoints with AT_LEAST_ONCE " +
					"checkpointing mode");
			}
			int numInputChannels = Arrays.stream(inputGates).mapToInt(InputGate::getNumberOfInputChannels).sum();
			return new CheckpointBarrierTracker(numInputChannels, toNotifyOnCheckpoint);
		default:
			throw new UnsupportedOperationException("Unrecognized Checkpointing Mode: " + config.getCheckpointMode());
	}
}
 
Example 16
Source Project: flink   Source File: SourceStreamTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * If finishing a task doesn't swallow exceptions this test would fail with an exception.
 */
@Test
public void finishingIgnoresExceptions() throws Exception {
	final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
			SourceStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO);

	final CompletableFuture<Void> operatorRunningWaitingFuture = new CompletableFuture<>();
	ExceptionThrowingSource.setIsInRunLoopFuture(operatorRunningWaitingFuture);

	testHarness.setupOutputForSingletonOperatorChain();
	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setStreamOperator(new StreamSource<>(new ExceptionThrowingSource()));
	streamConfig.setOperatorID(new OperatorID());

	testHarness.invoke();
	operatorRunningWaitingFuture.get();
	testHarness.getTask().finishTask();

	testHarness.waitForTaskCompletion();
}
 
Example 17
Source Project: flink   Source File: StreamTask.java    License: Apache License 2.0 6 votes vote down vote up
private static <OUT> List<RecordWriter<SerializationDelegate<StreamRecord<OUT>>>> createRecordWriters(
		StreamConfig configuration,
		Environment environment) {
	List<RecordWriter<SerializationDelegate<StreamRecord<OUT>>>> recordWriters = new ArrayList<>();
	List<StreamEdge> outEdgesInOrder = configuration.getOutEdgesInOrder(environment.getUserClassLoader());
	Map<Integer, StreamConfig> chainedConfigs = configuration.getTransitiveChainedTaskConfigsWithSelf(environment.getUserClassLoader());

	for (int i = 0; i < outEdgesInOrder.size(); i++) {
		StreamEdge edge = outEdgesInOrder.get(i);
		recordWriters.add(
			createRecordWriter(
				edge,
				i,
				environment,
				environment.getTaskInfo().getTaskName(),
				chainedConfigs.get(edge.getSourceId()).getBufferTimeout()));
	}
	return recordWriters;
}
 
Example 18
Source Project: flink   Source File: InputProcessorUtil.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
public static CheckpointedInputGate createCheckpointedInputGate(
		AbstractInvokable toNotifyOnCheckpoint,
		StreamConfig config,
		SubtaskCheckpointCoordinator checkpointCoordinator,
		IndexedInputGate[] inputGates,
		TaskIOMetricGroup taskIOMetricGroup,
		String taskName) {
	CheckpointedInputGate[] checkpointedInputGates = createCheckpointedMultipleInputGate(
		toNotifyOnCheckpoint,
		config,
		checkpointCoordinator,
		taskIOMetricGroup,
		taskName,
		Arrays.asList(inputGates));
	return Iterables.getOnlyElement(Arrays.asList(checkpointedInputGates));
}
 
Example 19
@Test
public void testLifeCycleFull() throws Exception {
	ACTUAL_ORDER_TRACKING.clear();

	Configuration taskManagerConfig = new Configuration();
	StreamConfig cfg = new StreamConfig(new Configuration());
	MockSourceFunction srcFun = new MockSourceFunction();

	cfg.setStreamOperator(new LifecycleTrackingStreamSource<>(srcFun, true));
	cfg.setOperatorID(new OperatorID());
	cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	Task task = StreamTaskTest.createTask(SourceStreamTask.class, cfg, taskManagerConfig);

	task.startTaskThread();

	LifecycleTrackingStreamSource.runStarted.await();

	// wait for clean termination
	task.getExecutingThread().join();
	assertEquals(ExecutionState.FINISHED, task.getExecutionState());
	assertEquals(EXPECTED_CALL_ORDER_FULL, ACTUAL_ORDER_TRACKING);
}
 
Example 20
Source Project: flink   Source File: AbstractTwoInputStreamTask.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void init() throws Exception {
	StreamConfig configuration = getConfiguration();
	ClassLoader userClassLoader = getUserCodeClassLoader();

	TypeSerializer<IN1> inputDeserializer1 = configuration.getTypeSerializerIn1(userClassLoader);
	TypeSerializer<IN2> inputDeserializer2 = configuration.getTypeSerializerIn2(userClassLoader);

	int numberOfInputs = configuration.getNumberOfInputs();

	ArrayList<InputGate> inputList1 = new ArrayList<InputGate>();
	ArrayList<InputGate> inputList2 = new ArrayList<InputGate>();

	List<StreamEdge> inEdges = configuration.getInPhysicalEdges(userClassLoader);

	for (int i = 0; i < numberOfInputs; i++) {
		int inputType = inEdges.get(i).getTypeNumber();
		InputGate reader = getEnvironment().getInputGate(i);
		switch (inputType) {
			case 1:
				inputList1.add(reader);
				break;
			case 2:
				inputList2.add(reader);
				break;
			default:
				throw new RuntimeException("Invalid input type number: " + inputType);
		}
	}

	createInputProcessor(inputList1, inputList2, inputDeserializer1, inputDeserializer2);

	headOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, minInputWatermarkGauge);
	headOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_1_WATERMARK, input1WatermarkGauge);
	headOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_2_WATERMARK, input2WatermarkGauge);
	// wrap watermark gauge since registered metrics must be unique
	getEnvironment().getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, minInputWatermarkGauge::getValue);
}
 
Example 21
Source Project: flink   Source File: StreamOperatorParameters.java    License: Apache License 2.0 5 votes vote down vote up
public StreamOperatorParameters(
		StreamTask<?, ?> containingTask,
		StreamConfig config,
		Output<StreamRecord<OUT>> output,
		Supplier<ProcessingTimeService> processingTimeServiceFactory,
		OperatorEventDispatcher operatorEventDispatcher) {
	this.containingTask = containingTask;
	this.config = config;
	this.output = output;
	this.processingTimeServiceFactory = processingTimeServiceFactory;
	this.operatorEventDispatcher = operatorEventDispatcher;
}
 
Example 22
Source Project: bahir-flink   Source File: AbstractSiddhiOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void setup(StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<OUT>> output) {
    super.setup(containingTask, config, output);
    if (priorityQueue == null) {
        priorityQueue = new PriorityQueue<>(INITIAL_PRIORITY_QUEUE_CAPACITY, new StreamRecordComparator<IN>());
    }
    startSiddhiRuntime();
}
 
Example 23
Source Project: flink   Source File: StreamSourceOperatorWatermarksTest.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private static <T> void setupSourceOperator(StreamSource<T, ?> operator,
											TimeCharacteristic timeChar,
											long watermarkInterval,
											final ProcessingTimeService timeProvider) throws Exception {

	ExecutionConfig executionConfig = new ExecutionConfig();
	executionConfig.setAutoWatermarkInterval(watermarkInterval);

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(timeChar);
	cfg.setOperatorID(new OperatorID());

	Environment env = new DummyEnvironment("MockTwoInputTask", 1, 0);

	StreamStatusMaintainer streamStatusMaintainer = mock(StreamStatusMaintainer.class);
	when(streamStatusMaintainer.getStreamStatus()).thenReturn(StreamStatus.ACTIVE);

	MockStreamTask mockTask = new MockStreamTaskBuilder(env)
		.setConfig(cfg)
		.setExecutionConfig(executionConfig)
		.setStreamStatusMaintainer(streamStatusMaintainer)
		.setProcessingTimeService(timeProvider)
		.build();

	operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
}
 
Example 24
Source Project: Flink-CEPplus   Source File: OneInputStreamTask.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void init() throws Exception {
	StreamConfig configuration = getConfiguration();

	TypeSerializer<IN> inSerializer = configuration.getTypeSerializerIn1(getUserCodeClassLoader());
	int numberOfInputs = configuration.getNumberOfInputs();

	if (numberOfInputs > 0) {
		InputGate[] inputGates = getEnvironment().getAllInputGates();

		inputProcessor = new StreamInputProcessor<>(
				inputGates,
				inSerializer,
				this,
				configuration.getCheckpointMode(),
				getCheckpointLock(),
				getEnvironment().getIOManager(),
				getEnvironment().getTaskManagerInfo().getConfiguration(),
				getStreamStatusMaintainer(),
				this.headOperator,
				getEnvironment().getMetricGroup().getIOMetricGroup(),
				inputWatermarkGauge);
	}
	headOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, this.inputWatermarkGauge);
	// wrap watermark gauge since registered metrics must be unique
	getEnvironment().getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, this.inputWatermarkGauge::getValue);
}
 
Example 25
Source Project: flink   Source File: SourceStreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testNotMarkingEndOfInputWhenTaskCancelled () throws Exception {
	final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
		SourceStreamTask::new,
		BasicTypeInfo.STRING_TYPE_INFO);

	testHarness
		.setupOperatorChain(
			new OperatorID(),
			new StreamSource<>(new CancelTestSource(
				BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), "Hello")))
		.chain(
			new OperatorID(),
			new TestBoundedOneInputStreamOperator("Operator1"),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.finish();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.invoke();
	CancelTestSource.getDataProcessing().get();
	testHarness.getTask().cancel();

	try {
		testHarness.waitForTaskCompletion();
	} catch (Throwable t) {
		assertTrue(ExceptionUtils.findThrowable(t, CancelTaskException.class).isPresent());
	}

	expectedOutput.add(new StreamRecord<>("Hello"));

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example 26
Source Project: Flink-CEPplus   Source File: OperatorChain.java    License: Apache License 2.0 5 votes vote down vote up
private <IN, OUT> WatermarkGaugeExposingOutput<StreamRecord<IN>> createChainedOperator(
		StreamTask<?, ?> containingTask,
		StreamConfig operatorConfig,
		Map<Integer, StreamConfig> chainedConfigs,
		ClassLoader userCodeClassloader,
		Map<StreamEdge, RecordWriterOutput<?>> streamOutputs,
		List<StreamOperator<?>> allOperators,
		OutputTag<IN> outputTag) {
	// create the output that the operator writes to first. this may recursively create more operators
	WatermarkGaugeExposingOutput<StreamRecord<OUT>> chainedOperatorOutput = createOutputCollector(
		containingTask,
		operatorConfig,
		chainedConfigs,
		userCodeClassloader,
		streamOutputs,
		allOperators);

	// now create the operator and give it the output collector to write its output to
	OneInputStreamOperator<IN, OUT> chainedOperator = operatorConfig.getStreamOperator(userCodeClassloader);

	chainedOperator.setup(containingTask, operatorConfig, chainedOperatorOutput);

	allOperators.add(chainedOperator);

	WatermarkGaugeExposingOutput<StreamRecord<IN>> currentOperatorOutput;
	if (containingTask.getExecutionConfig().isObjectReuseEnabled()) {
		currentOperatorOutput = new ChainingOutput<>(chainedOperator, this, outputTag);
	}
	else {
		TypeSerializer<IN> inSerializer = operatorConfig.getTypeSerializerIn1(userCodeClassloader);
		currentOperatorOutput = new CopyingChainingOutput<>(chainedOperator, inSerializer, outputTag, this);
	}

	// wrap watermark gauges since registered metrics must be unique
	chainedOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, currentOperatorOutput.getWatermarkGauge()::getValue);
	chainedOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_OUTPUT_WATERMARK, chainedOperatorOutput.getWatermarkGauge()::getValue);

	return currentOperatorOutput;
}
 
Example 27
Source Project: flink   Source File: OneInputStreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies that open() and close() are correctly called. This test also verifies
 * that timestamps of emitted elements are correct. {@link StreamMap} assigns the input
 * timestamp to emitted elements.
 */
@Test
public void testOpenCloseAndTimestamps() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
			OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);

	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	StreamMap<String, String> mapOperator = new StreamMap<>(new TestOpenCloseMapFunction());
	streamConfig.setStreamOperator(mapOperator);
	streamConfig.setOperatorID(new OperatorID());

	long initialTime = 0L;
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processElement(new StreamRecord<>("Hello", initialTime + 1));
	testHarness.processElement(new StreamRecord<>("Ciao", initialTime + 2));
	expectedOutput.add(new StreamRecord<>("Hello", initialTime + 1));
	expectedOutput.add(new StreamRecord<>("Ciao", initialTime + 2));

	testHarness.waitForInputProcessing();

	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	assertTrue("RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled);

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example 28
Source Project: Flink-CEPplus   Source File: OperatorChainTest.java    License: Apache License 2.0 5 votes vote down vote up
@SafeVarargs
private static <T, OP extends StreamOperator<T>> OperatorChain<T, OP> setupOperatorChain(
		OneInputStreamOperator<T, T>... operators) {

	checkNotNull(operators);
	checkArgument(operators.length > 0);

	try (MockEnvironment env = MockEnvironment.builder().build()) {

	final StreamTask<?, ?> containingTask = new OneInputStreamTask<T, OneInputStreamOperator<T, T>>(env);

		final StreamStatusProvider statusProvider = mock(StreamStatusProvider.class);
		final StreamConfig cfg = new StreamConfig(new Configuration());

		final StreamOperator<?>[] ops = new StreamOperator<?>[operators.length];

		// initial output goes to nowhere
		@SuppressWarnings({"unchecked", "rawtypes"})
		WatermarkGaugeExposingOutput<StreamRecord<T>> lastWriter = new BroadcastingOutputCollector<>(
				new Output[0], statusProvider);

		// build the reverse operators array
		for (int i = 0; i < ops.length; i++) {
			OneInputStreamOperator<T, T> op = operators[ops.length - i - 1];
			op.setup(containingTask, cfg, lastWriter);
			lastWriter = new ChainingOutput<>(op, statusProvider, null);
			ops[i] = op;
		}

		@SuppressWarnings("unchecked")
		final OP head = (OP) operators[0];

		return new OperatorChain<>(
				ops,
				new RecordWriterOutput<?>[0],
				lastWriter,
				head);
	}
}
 
Example 29
Source Project: flink   Source File: StreamTaskCancellationBarrierTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This test verifies (for two input tasks) that the Stream tasks react the following way to
 * receiving a checkpoint cancellation barrier:
 *   - send a "decline checkpoint" notification out (to the JobManager)
 *   - emit a cancellation barrier downstream.
 */
@Test
public void testDeclineCallOnCancelBarrierTwoInputs() throws Exception {

	TwoInputStreamTaskTestHarness<String, String, String> testHarness = new TwoInputStreamTaskTestHarness<>(
			TwoInputStreamTask::new,
			BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
	testHarness.setupOutputForSingletonOperatorChain();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	CoStreamMap<String, String, String> op = new CoStreamMap<>(new UnionCoMap());
	streamConfig.setStreamOperator(op);
	streamConfig.setOperatorID(new OperatorID());

	StreamMockEnvironment environment = spy(testHarness.createEnvironment());

	// start the task
	testHarness.invoke(environment);
	testHarness.waitForTaskRunning();

	// emit cancellation barriers
	testHarness.processEvent(new CancelCheckpointMarker(2L), 0, 0);
	testHarness.processEvent(new CancelCheckpointMarker(2L), 1, 0);
	testHarness.waitForInputProcessing();

	// the decline call should go to the coordinator
	verify(environment, times(1)).declineCheckpoint(eq(2L),
		argThat(new CheckpointBarrierAlignerTestBase.CheckpointExceptionMatcher(CheckpointFailureReason.CHECKPOINT_DECLINED_ON_CANCELLATION_BARRIER)));

	// a cancellation barrier should be downstream
	Object result = testHarness.getOutput().poll();
	assertNotNull("nothing emitted", result);
	assertTrue("wrong type emitted", result instanceof CancelCheckpointMarker);
	assertEquals("wrong checkpoint id", 2L, ((CancelCheckpointMarker) result).getCheckpointId());

	// cancel and shutdown
	testHarness.endInput();
	testHarness.waitForTaskCompletion();
}
 
Example 30
Source Project: flink   Source File: StreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testStateBackendClosingOnFailure() throws Exception {
	Configuration taskManagerConfig = new Configuration();
	taskManagerConfig.setString(CheckpointingOptions.STATE_BACKEND, TestMemoryStateBackendFactory.class.getName());

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateKeySerializer(mock(TypeSerializer.class));
	cfg.setOperatorID(new OperatorID(4711L, 42L));
	TestStreamSource<Long, MockSourceFunction> streamSource = new TestStreamSource<>(new MockSourceFunction());
	cfg.setStreamOperator(streamSource);
	cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	Task task = createTask(StateBackendTestSource.class, cfg, taskManagerConfig);

	StateBackendTestSource.fail = true;
	task.startTaskThread();

	// wait for clean termination
	task.getExecutingThread().join();

	// ensure that the state backends and stream iterables are closed ...
	verify(TestStreamSource.operatorStateBackend).close();
	verify(TestStreamSource.keyedStateBackend).close();
	verify(TestStreamSource.rawOperatorStateInputs).close();
	verify(TestStreamSource.rawKeyedStateInputs).close();
	// ... and disposed
	verify(TestStreamSource.operatorStateBackend).dispose();
	verify(TestStreamSource.keyedStateBackend).dispose();

	assertEquals(ExecutionState.FAILED, task.getExecutionState());
}