Java Code Examples for org.apache.flink.streaming.api.datastream.AsyncDataStream

The following examples show how to use org.apache.flink.streaming.api.datastream.AsyncDataStream. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: AsyncWaitOperator.java    License: Apache License 2.0 6 votes vote down vote up
public AsyncWaitOperator(
		AsyncFunction<IN, OUT> asyncFunction,
		long timeout,
		int capacity,
		AsyncDataStream.OutputMode outputMode) {
	super(asyncFunction);

	// TODO this is a temporary fix for the problems described under FLINK-13063 at the cost of breaking chains for
	//  AsyncOperators.
	setChainingStrategy(ChainingStrategy.HEAD);

	Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0.");
	this.capacity = capacity;

	this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode");

	this.timeout = timeout;
}
 
Example 2
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Test for the temporary fix to FLINK-13063.
 */
@Test
public void testAsyncOperatorIsNeverChained() {
	StreamExecutionEnvironment chainEnv = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Integer> input = chainEnv.fromElements(1);
	input = AsyncDataStream.orderedWait(
		input,
		new LazyAsyncFunction(),
		TIMEOUT,
		TimeUnit.MILLISECONDS,
		6).map((x) -> x);
	AsyncDataStream.unorderedWait(
		input,
		new MyAsyncFunction(),
		TIMEOUT,
		TimeUnit.MILLISECONDS,
		3).map((x) -> x).addSink(new DiscardingSink<>());

	final JobGraph jobGraph = chainEnv.getStreamGraph().getJobGraph();

	Assert.assertEquals(3, jobGraph.getVerticesSortedTopologicallyFromSources().size());
}
 
Example 3
private DataStream<Long> createAsyncOperator(DataStreamSource<Long> source) {
	switch (outputMode) {
		case ORDERED:
			return AsyncDataStream.orderedWait(
					source,
					new BenchmarkAsyncFunctionExecutor(),
					0,
					TimeUnit.MILLISECONDS);
		case UNORDERED:
			return AsyncDataStream.unorderedWait(
					source,
					new BenchmarkAsyncFunctionExecutor(),
					0,
					TimeUnit.MILLISECONDS);
		default:
			throw new UnsupportedOperationException("Unknown mode");
	}
}
 
Example 4
Source Project: flink   Source File: AsyncWaitOperator.java    License: Apache License 2.0 6 votes vote down vote up
public AsyncWaitOperator(
		@Nonnull AsyncFunction<IN, OUT> asyncFunction,
		long timeout,
		int capacity,
		@Nonnull AsyncDataStream.OutputMode outputMode,
		@Nonnull ProcessingTimeService processingTimeService,
		@Nonnull MailboxExecutor mailboxExecutor) {
	super(asyncFunction);

	setChainingStrategy(ChainingStrategy.ALWAYS);

	Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0.");
	this.capacity = capacity;

	this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode");

	this.timeout = timeout;

	this.processingTimeService = Preconditions.checkNotNull(processingTimeService);

	this.mailboxExecutor = mailboxExecutor;
}
 
Example 5
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * FLINK-5652
 * Tests that registered timers are properly canceled upon completion of a
 * {@link StreamElement} in order to avoid resource leaks because TriggerTasks hold
 * a reference on the StreamRecordQueueEntry.
 */
@Test
public void testTimeoutCleanup() throws Exception {
	OneInputStreamOperatorTestHarness<Integer, Integer> harness =
		createTestHarness(new MyAsyncFunction(), TIMEOUT, 1, AsyncDataStream.OutputMode.UNORDERED);

	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(42, 1L);
	}

	synchronized (harness.getCheckpointLock()) {
		harness.endInput();
		harness.close();
	}

	// check that we actually outputted the result of the single input
	assertEquals(Arrays.asList(new StreamRecord(42 * 2, 1L)), new ArrayList<>(harness.getOutput()));

	// check that we have cancelled our registered timeout
	assertEquals(0, harness.getProcessingTimeService().getNumActiveTimers());
}
 
Example 6
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
private void testUserExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	OneInputStreamOperatorTestHarness<Integer, Integer> harness =
		createTestHarness(new UserExceptionAsyncFunction(), TIMEOUT, 2, outputMode);

	harness.getEnvironment().setExpectedExternalFailureCause(Throwable.class);
	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}

	assertTrue(harness.getEnvironment().getActualExternalFailureCause().isPresent());
}
 
Example 7
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 6 votes vote down vote up
private void testTimeoutExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	OneInputStreamOperatorTestHarness<Integer, Integer> harness =
		createTestHarness(new NoOpAsyncFunction<>(), 10L, 2, outputMode);

	harness.getEnvironment().setExpectedExternalFailureCause(Throwable.class);
	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	harness.setProcessingTime(10L);

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}
}
 
Example 8
Source Project: Flink-CEPplus   Source File: AsyncWaitOperator.java    License: Apache License 2.0 5 votes vote down vote up
public AsyncWaitOperator(
		AsyncFunction<IN, OUT> asyncFunction,
		long timeout,
		int capacity,
		AsyncDataStream.OutputMode outputMode) {
	super(asyncFunction);
	chainingStrategy = ChainingStrategy.ALWAYS;

	Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0.");
	this.capacity = capacity;

	this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode");

	this.timeout = timeout;
}
 
Example 9
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
public TestAsyncWaitOperator(
		AsyncFunction<IN, OUT> asyncFunction,
		long timeout,
		int capacity,
		AsyncDataStream.OutputMode outputMode,
		OneShotLatch closingLatch) {
	super(asyncFunction, timeout, capacity, outputMode);

	this.closingLatch = Preconditions.checkNotNull(closingLatch);
}
 
Example 10
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testUserExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	UserExceptionAsyncFunction asyncWaitFunction = new UserExceptionAsyncFunction();
	long timeout = 2000L;

	AsyncWaitOperator<Integer, Integer> asyncWaitOperator = new AsyncWaitOperator<>(
		asyncWaitFunction,
		TIMEOUT,
		2,
		outputMode);

	final MockEnvironment mockEnvironment = createMockEnvironment();
	mockEnvironment.setExpectedExternalFailureCause(Throwable.class);

	OneInputStreamOperatorTestHarness<Integer, Integer> harness = new OneInputStreamOperatorTestHarness<>(
		asyncWaitOperator,
		IntSerializer.INSTANCE,
		mockEnvironment);

	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}

	assertTrue(harness.getEnvironment().getActualExternalFailureCause().isPresent());
}
 
Example 11
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testTimeoutExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	AsyncFunction<Integer, Integer> asyncFunction = new NoOpAsyncFunction<>();
	long timeout = 10L; // 1 milli second

	AsyncWaitOperator<Integer, Integer> asyncWaitOperator = new AsyncWaitOperator<>(
		asyncFunction,
		timeout,
		2,
		outputMode);

	final MockEnvironment mockEnvironment = createMockEnvironment();
	mockEnvironment.setExpectedExternalFailureCause(Throwable.class);

	OneInputStreamOperatorTestHarness<Integer, Integer> harness = new OneInputStreamOperatorTestHarness<>(
		asyncWaitOperator,
		IntSerializer.INSTANCE,
		mockEnvironment);

	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	harness.setProcessingTime(10L);

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}
}
 
Example 12
Source Project: flink-learning   Source File: AsyncIOAlert.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
    final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args);
    StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool);

    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(
            parameterTool.get("metrics.topic"),
            new MetricSchema(),
            properties);
    SingleOutputStreamOperator<MetricEvent> machineData = env.addSource(consumer)
            .assignTimestampsAndWatermarks(new MetricWatermark());

    AsyncDataStream.unorderedWait(machineData, new AlertRuleAsyncIOFunction(), 10000, TimeUnit.MICROSECONDS, 100)
            .map(metricEvent -> {
                List<String> ma = (List<String>) metricEvent.getFields().get("xx");
                AlertEvent alertEvent = new AlertEvent();
                alertEvent.setType(metricEvent.getName());
                alertEvent.setTrigerTime(metricEvent.getTimestamp());
                alertEvent.setMetricEvent(metricEvent);
                if (metricEvent.getTags().get("recover") != null && Boolean.valueOf(metricEvent.getTags().get("recover"))) {
                    alertEvent.setRecover(true);
                    alertEvent.setRecoverTime(metricEvent.getTimestamp());
                } else {
                    alertEvent.setRecover(false);
                }
                return alertEvent;
            })
            .print();

    env.execute("Async IO get MySQL data");
}
 
Example 13
Source Project: flink   Source File: AsyncLookupJoinHarnessTest.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private OneInputStreamOperatorTestHarness<BaseRow, BaseRow> createHarness(
		JoinType joinType,
		FilterOnTable filterOnTable) throws Exception {
	RichAsyncFunction<BaseRow, BaseRow> joinRunner;
	boolean isLeftJoin = joinType == JoinType.LEFT_JOIN;
	if (filterOnTable == FilterOnTable.WITHOUT_FILTER) {
		joinRunner = new AsyncLookupJoinRunner(
			new GeneratedFunctionWrapper(new TestingFetcherFunction()),
			new GeneratedResultFutureWrapper<>(new TestingFetcherResultFuture()),
			fetcherReturnType,
			rightRowTypeInfo,
			isLeftJoin,
			ASYNC_BUFFER_CAPACITY);
	} else {
		joinRunner = new AsyncLookupJoinWithCalcRunner(
			new GeneratedFunctionWrapper(new TestingFetcherFunction()),
			new GeneratedFunctionWrapper<>(new CalculateOnTemporalTable()),
			new GeneratedResultFutureWrapper<>(new TestingFetcherResultFuture()),
			fetcherReturnType,
			rightRowTypeInfo,
			isLeftJoin,
			ASYNC_BUFFER_CAPACITY);
	}

	AsyncWaitOperator<BaseRow, BaseRow> operator = new AsyncWaitOperator<>(
		joinRunner,
		ASYNC_TIMEOUT_MS,
		ASYNC_BUFFER_CAPACITY,
		AsyncDataStream.OutputMode.ORDERED);

	return new OneInputStreamOperatorTestHarness<>(
		operator,
		inSerializer);
}
 
Example 14
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
public TestAsyncWaitOperator(
		AsyncFunction<IN, OUT> asyncFunction,
		long timeout,
		int capacity,
		AsyncDataStream.OutputMode outputMode,
		OneShotLatch closingLatch) {
	super(asyncFunction, timeout, capacity, outputMode);

	this.closingLatch = Preconditions.checkNotNull(closingLatch);
}
 
Example 15
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testUserExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	UserExceptionAsyncFunction asyncWaitFunction = new UserExceptionAsyncFunction();
	long timeout = 2000L;

	AsyncWaitOperator<Integer, Integer> asyncWaitOperator = new AsyncWaitOperator<>(
		asyncWaitFunction,
		TIMEOUT,
		2,
		outputMode);

	final MockEnvironment mockEnvironment = createMockEnvironment();
	mockEnvironment.setExpectedExternalFailureCause(Throwable.class);

	OneInputStreamOperatorTestHarness<Integer, Integer> harness = new OneInputStreamOperatorTestHarness<>(
		asyncWaitOperator,
		IntSerializer.INSTANCE,
		mockEnvironment);

	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}

	assertTrue(harness.getEnvironment().getActualExternalFailureCause().isPresent());
}
 
Example 16
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testTimeoutExceptionHandling(AsyncDataStream.OutputMode outputMode) throws Exception {
	AsyncFunction<Integer, Integer> asyncFunction = new NoOpAsyncFunction<>();
	long timeout = 10L; // 1 milli second

	AsyncWaitOperator<Integer, Integer> asyncWaitOperator = new AsyncWaitOperator<>(
		asyncFunction,
		timeout,
		2,
		outputMode);

	final MockEnvironment mockEnvironment = createMockEnvironment();
	mockEnvironment.setExpectedExternalFailureCause(Throwable.class);

	OneInputStreamOperatorTestHarness<Integer, Integer> harness = new OneInputStreamOperatorTestHarness<>(
		asyncWaitOperator,
		IntSerializer.INSTANCE,
		mockEnvironment);

	harness.open();

	synchronized (harness.getCheckpointLock()) {
		harness.processElement(1, 1L);
	}

	harness.setProcessingTime(10L);

	synchronized (harness.getCheckpointLock()) {
		harness.close();
	}
}
 
Example 17
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This helper function is needed to check that the temporary fix for FLINK-13063 can be backwards compatible with
 * the old chaining behavior by setting the ChainingStrategy manually. TODO: remove after a proper fix for
 * FLINK-13063 is in place that allows chaining.
 */
private <IN, OUT> SingleOutputStreamOperator<OUT> addAsyncOperatorLegacyChained(
	DataStream<IN> in,
	AsyncFunction<IN, OUT> func,
	long timeout,
	int bufSize,
	AsyncDataStream.OutputMode mode) {

	TypeInformation<OUT> outTypeInfo = TypeExtractor.getUnaryOperatorReturnType(
		func,
		AsyncFunction.class,
		0,
		1,
		new int[]{1, 0},
		in.getType(),
		Utils.getCallLocationName(),
		true);

	// create transform
	AsyncWaitOperator<IN, OUT> operator = new AsyncWaitOperator<>(
		in.getExecutionEnvironment().clean(func),
		timeout,
		bufSize,
		mode);

	operator.setChainingStrategy(ChainingStrategy.ALWAYS);

	return in.transform("async wait operator", outTypeInfo, operator);
}
 
Example 18
Source Project: alchemy   Source File: SideStream.java    License: Apache License 2.0 5 votes vote down vote up
public static DataStream<Row> buildStream(StreamTableEnvironment env, SqlSelect sqlSelect, Alias leftAlias,
    Alias sideAlias, SourceDescriptor sideSource) throws Exception {
    SqlSelect leftSelect = SideParser.newSelect(sqlSelect, leftAlias.getTable(), leftAlias.getAlias(), true, false);
    // register leftTable
    Table leftTable = env.sqlQuery(leftSelect.toString());
    DataStream<Row> leftStream = env.toAppendStream(leftTable, Row.class);
    SqlSelect rightSelect
        = SideParser.newSelect(sqlSelect, sideAlias.getTable(), sideAlias.getAlias(), false, false);
    SqlJoin sqlJoin = (SqlJoin)sqlSelect.getFrom();
    List<String> equalFields = SideParser.findConditionFields(sqlJoin.getCondition(), leftAlias.getAlias());
    if (sideSource.getSide().isPartition()) {
        leftStream = leftStream.keyBy(equalFields.toArray(new String[equalFields.size()]));
    }
    RowTypeInfo sideType = createSideType(rightSelect.getSelectList(), sideSource.getSchema());
    RowTypeInfo returnType = createReturnType(leftTable.getSchema(), sideType);
    SideTable sideTable = createSideTable(leftTable.getSchema(), sideType, sqlJoin.getJoinType(), rightSelect,
        equalFields, sideAlias, sideSource.getSide());
    DataStream<Row> returnStream;
    if (sideSource.getSide().isAsync()) {
        AbstractAsyncSideFunction reqRow = sideSource.transform(sideTable);
        returnStream = AsyncDataStream.orderedWait(leftStream, reqRow, sideSource.getSide().getTimeout(),
            TimeUnit.MILLISECONDS, sideSource.getSide().getCapacity());
    } else {
        AbstractSyncSideFunction syncReqRow = sideSource.transform(sideTable);
        returnStream = leftStream.flatMap(syncReqRow);
    }
    returnStream.getTransformation().setOutputType(returnType);
    return returnStream;
}
 
Example 19
Source Project: flink-learning   Source File: AsyncIOAlert.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
    final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args);
    StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool);

    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(
            parameterTool.get("metrics.topic"),
            new MetricSchema(),
            properties);
    SingleOutputStreamOperator<MetricEvent> machineData = env.addSource(consumer)
            .assignTimestampsAndWatermarks(new MetricWatermark());

    AsyncDataStream.unorderedWait(machineData, new AlertRuleAsyncIOFunction(), 10000, TimeUnit.MICROSECONDS, 100)
            .map(metricEvent -> {
                List<String> ma = (List<String>) metricEvent.getFields().get("xx");
                AlertEvent alertEvent = new AlertEvent();
                alertEvent.setType(metricEvent.getName());
                alertEvent.setTrigerTime(metricEvent.getTimestamp());
                alertEvent.setMetricEvent(metricEvent);
                if (metricEvent.getTags().get("recover") != null && Boolean.valueOf(metricEvent.getTags().get("recover"))) {
                    alertEvent.setRecover(true);
                    alertEvent.setRecoverTime(metricEvent.getTimestamp());
                } else {
                    alertEvent.setRecover(false);
                }
                return alertEvent;
            })
            .print();

    env.execute("Async IO get MySQL data");
}
 
Example 20
Source Project: sylph   Source File: AsyncFunctionHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static DataStream<Row> translate(
        DataStream<Row> inputStream,
        RealTimeTransForm transForm)
{
    RowTypeInfo streamRowType = (RowTypeInfo) inputStream.getType();
    AsyncFunction<Row, Row> asyncFunction = new RichAsyncFunctionImpl(transForm, streamRowType);

    DataStream<Row> joinResultStream = AsyncDataStream.orderedWait(
            inputStream, asyncFunction,
            1000, TimeUnit.MILLISECONDS, // 超时时间
            100);  // 进行中的异步请求的最大数量

    return joinResultStream;
}
 
Example 21
Source Project: flink-crawler   Source File: CrawlTopologyBuilder.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create a snippet of the topology that takes a RawUrl DataStream and applies an async lengthener, then a
 * normalizer and a validator. What we get out is a CrawlStateUrl DataStream.
 * 
 * @param rawUrls
 * @return
 */
private DataStream<CrawlStateUrl> cleanUrls(DataStream<RawUrl> rawUrls) {
    return AsyncDataStream
            .unorderedWait(rawUrls, new LengthenUrlsFunction(_urlLengthener),
                    _urlLengthener.getTimeoutInSeconds(), TimeUnit.SECONDS)
            .name("LengthenUrlsFunction")
            .flatMap(new NormalizeUrlsFunction(_urlNormalizer))
            .name("NormalizeUrlsFunction")
            .flatMap(new ValidUrlsFilter(_urlFilter))
            .name("ValidUrlsFilter");
}
 
Example 22
Source Project: flink   Source File: AsyncLookupJoinHarnessTest.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private OneInputStreamOperatorTestHarness<RowData, RowData> createHarness(
		JoinType joinType,
		FilterOnTable filterOnTable) throws Exception {
	RichAsyncFunction<RowData, RowData> joinRunner;
	boolean isLeftJoin = joinType == JoinType.LEFT_JOIN;
	if (filterOnTable == FilterOnTable.WITHOUT_FILTER) {
		joinRunner = new AsyncLookupJoinRunner(
			new GeneratedFunctionWrapper(new TestingFetcherFunction()),
			new GeneratedResultFutureWrapper<>(new TestingFetcherResultFuture()),
			fetcherReturnType,
			rightRowTypeInfo,
			isLeftJoin,
			ASYNC_BUFFER_CAPACITY);
	} else {
		joinRunner = new AsyncLookupJoinWithCalcRunner(
			new GeneratedFunctionWrapper(new TestingFetcherFunction()),
			new GeneratedFunctionWrapper<>(new CalculateOnTemporalTable()),
			new GeneratedResultFutureWrapper<>(new TestingFetcherResultFuture()),
			fetcherReturnType,
			rightRowTypeInfo,
			isLeftJoin,
			ASYNC_BUFFER_CAPACITY);
	}

	return new OneInputStreamOperatorTestHarness<>(
		new AsyncWaitOperatorFactory<>(
			joinRunner,
			ASYNC_TIMEOUT_MS,
			ASYNC_BUFFER_CAPACITY,
			AsyncDataStream.OutputMode.ORDERED),
		inSerializer);
}
 
Example 23
Source Project: flink   Source File: AsyncWaitOperatorFactory.java    License: Apache License 2.0 5 votes vote down vote up
public AsyncWaitOperatorFactory(
		AsyncFunction<IN, OUT> asyncFunction,
		long timeout,
		int capacity,
		AsyncDataStream.OutputMode outputMode) {
	this.asyncFunction = asyncFunction;
	this.timeout = timeout;
	this.capacity = capacity;
	this.outputMode = outputMode;
	this.chainingStrategy = ChainingStrategy.ALWAYS;
}
 
Example 24
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This helper function is needed to check that the temporary fix for FLINK-13063 can be backwards compatible with
 * the old chaining behavior by setting the ChainingStrategy manually. TODO: remove after a proper fix for
 * FLINK-13063 is in place that allows chaining.
 */
private <IN, OUT> SingleOutputStreamOperator<OUT> addAsyncOperatorLegacyChained(
	DataStream<IN> in,
	AsyncFunction<IN, OUT> func,
	long timeout,
	int bufSize,
	AsyncDataStream.OutputMode mode) {

	TypeInformation<OUT> outTypeInfo = TypeExtractor.getUnaryOperatorReturnType(
		func,
		AsyncFunction.class,
		0,
		1,
		new int[]{1, 0},
		in.getType(),
		Utils.getCallLocationName(),
		true);

	// create transform
	AsyncWaitOperatorFactory<IN, OUT> factory = new AsyncWaitOperatorFactory<>(
		in.getExecutionEnvironment().clean(func),
		timeout,
		bufSize,
		mode);

	factory.setChainingStrategy(ChainingStrategy.ALWAYS);

	return in.transform("async wait operator", outTypeInfo, factory);
}
 
Example 25
Source Project: flink   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
private static <OUT> OneInputStreamOperatorTestHarness<Integer, OUT> createTestHarness(
		AsyncFunction<Integer, OUT> function,
		long timeout,
		int capacity,
		AsyncDataStream.OutputMode outputMode) throws Exception {

	return new OneInputStreamOperatorTestHarness<>(
		new AsyncWaitOperatorFactory<>(function, timeout, capacity, outputMode),
		IntSerializer.INSTANCE);
}
 
Example 26
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Test the AsyncWaitOperator with ordered mode and event time.
 */
@Test
public void testEventTimeOrdered() throws Exception {
	testEventTime(AsyncDataStream.OutputMode.ORDERED);
}
 
Example 27
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Test the AsyncWaitOperator with unordered mode and event time.
 */
@Test
public void testWaterMarkUnordered() throws Exception {
	testEventTime(AsyncDataStream.OutputMode.UNORDERED);
}
 
Example 28
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 4 votes vote down vote up
private void testEventTime(AsyncDataStream.OutputMode mode) throws Exception {
	final AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(
		new MyAsyncFunction(),
		TIMEOUT,
		2,
		mode);

	final OneInputStreamOperatorTestHarness<Integer, Integer> testHarness =
			new OneInputStreamOperatorTestHarness<>(operator, IntSerializer.INSTANCE);

	final long initialTime = 0L;
	final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.open();

	synchronized (testHarness.getCheckpointLock()) {
		testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
		testHarness.processElement(new StreamRecord<>(2, initialTime + 2));
		testHarness.processWatermark(new Watermark(initialTime + 2));
		testHarness.processElement(new StreamRecord<>(3, initialTime + 3));
	}

	// wait until all async collectors in the buffer have been emitted out.
	synchronized (testHarness.getCheckpointLock()) {
		testHarness.close();
	}

	expectedOutput.add(new StreamRecord<>(2, initialTime + 1));
	expectedOutput.add(new StreamRecord<>(4, initialTime + 2));
	expectedOutput.add(new Watermark(initialTime + 2));
	expectedOutput.add(new StreamRecord<>(6, initialTime + 3));

	if (AsyncDataStream.OutputMode.ORDERED == mode) {
		TestHarnessUtil.assertOutputEquals("Output with watermark was not correct.", expectedOutput, testHarness.getOutput());
	}
	else {
		Object[] jobOutputQueue = testHarness.getOutput().toArray();

		Assert.assertEquals("Watermark should be at index 2", new Watermark(initialTime + 2), jobOutputQueue[2]);
		Assert.assertEquals("StreamRecord 3 should be at the end", new StreamRecord<>(6, initialTime + 3), jobOutputQueue[3]);

		TestHarnessUtil.assertOutputEqualsSorted(
				"Output for StreamRecords does not match",
				expectedOutput,
				testHarness.getOutput(),
				new StreamRecordComparator());
	}
}
 
Example 29
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Test the AsyncWaitOperator with ordered mode and processing time.
 */
@Test
public void testProcessingTimeOrdered() throws Exception {
	testProcessingTime(AsyncDataStream.OutputMode.ORDERED);
}
 
Example 30
Source Project: Flink-CEPplus   Source File: AsyncWaitOperatorTest.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Test the AsyncWaitOperator with unordered mode and processing time.
 */
@Test
public void testProcessingUnordered() throws Exception {
	testProcessingTime(AsyncDataStream.OutputMode.UNORDERED);
}