Java Code Examples for org.apache.flink.streaming.runtime.streamrecord.StreamRecord

The following examples show how to use org.apache.flink.streaming.runtime.streamrecord.StreamRecord. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private Tuple2<Long, Long> validateElement(Object element, long nextElementValue, long currentWatermark) {
	if (element instanceof StreamRecord) {
		@SuppressWarnings("unchecked")
		StreamRecord<Long> record = (StreamRecord<Long>) element;
		assertEquals(nextElementValue, record.getValue().longValue());
		assertEquals(nextElementValue, record.getTimestamp());
		return new Tuple2<>(nextElementValue + 1, currentWatermark);
	}
	else if (element instanceof Watermark) {
		long wt = ((Watermark) element).getTimestamp();
		assertTrue(wt > currentWatermark);
		return new Tuple2<>(nextElementValue, wt);
	}
	else {
		throw new IllegalArgumentException("unrecognized element: " + element);
	}
}
 
Example 2
Source Project: Flink-CEPplus   Source File: CopyingDirectedOutput.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void collect(StreamRecord<OUT> record) {
	Set<Output<StreamRecord<OUT>>> selectedOutputs = selectOutputs(record);

	if (selectedOutputs.isEmpty()) {
		return;
	}

	Iterator<Output<StreamRecord<OUT>>> it = selectedOutputs.iterator();

	while (true) {
		Output<StreamRecord<OUT>> out = it.next();
		if (it.hasNext()) {
			// we don't have the last output
			// perform a shallow copy
			StreamRecord<OUT> shallowCopy = record.copy(record.getValue());
			out.collect(shallowCopy);
		} else {
			// this is the last output
			out.collect(record);
			break;
		}
	}
}
 
Example 3
Source Project: flink   Source File: StreamElementQueueTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testPop() {
	StreamElementQueue<Integer> queue = createStreamElementQueue(2);

	// add two elements to reach capacity
	putSuccessfully(queue, new Watermark(0L));
	ResultFuture<Integer> recordResult = putSuccessfully(queue, new StreamRecord<>(42, 1L));

	assertEquals(2, queue.size());

	// remove completed elements (watermarks are always completed)
	assertEquals(Arrays.asList(new Watermark(0L)), popCompleted(queue));
	assertEquals(1, queue.size());

	// now complete the stream record
	recordResult.complete(Collections.singleton(43));

	assertEquals(Arrays.asList(new StreamRecord<>(43, 1L)), popCompleted(queue));
	assertEquals(0, queue.size());
	assertTrue(queue.isEmpty());
}
 
Example 4
Source Project: flink   Source File: AbstractStreamOperatorTestHarness.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public <X> void collect(OutputTag<X> outputTag, StreamRecord<X> record) {
	sideOutputSerializer = TypeExtractor.getForObject(record.getValue()).createSerializer(executionConfig);

	ConcurrentLinkedQueue<Object> sideOutputList = sideOutputLists.get(outputTag);
	if (sideOutputList == null) {
		sideOutputList = new ConcurrentLinkedQueue<>();
		sideOutputLists.put(outputTag, sideOutputList);
	}
	if (record.hasTimestamp()) {
		sideOutputList.add(new StreamRecord<>(sideOutputSerializer.copy(record.getValue()), record.getTimestamp()));
	} else {
		sideOutputList.add(new StreamRecord<>(sideOutputSerializer.copy(record.getValue())));
	}

}
 
Example 5
Source Project: flink   Source File: NFATest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Tests that pruning shared buffer elements and computations state use the same window border
 * semantics (left side inclusive and right side exclusive).
 */
@Test
public void testTimeoutWindowPruningWindowBorders() throws Exception {
	List<StreamRecord<Event>> streamEvents = new ArrayList<>();

	streamEvents.add(new StreamRecord<>(new Event(1, "start", 1.0), 1L));
	streamEvents.add(new StreamRecord<>(new Event(2, "start", 2.0), 2L));
	streamEvents.add(new StreamRecord<>(new Event(3, "foobar", 3.0), 3L));
	streamEvents.add(new StreamRecord<>(new Event(4, "end", 4.0), 3L));

	List<Map<String, List<Event>>> expectedPatterns = new ArrayList<>();

	Map<String, List<Event>> secondPattern = new HashMap<>();
	secondPattern.put("start", Collections.singletonList(new Event(2, "start", 2.0)));
	secondPattern.put("end", Collections.singletonList(new Event(4, "end", 4.0)));

	expectedPatterns.add(secondPattern);

	NFA<Event> nfa = createStartEndNFA();
	NFATestHarness nfaTestHarness = NFATestHarness.forNFA(nfa).build();

	Collection<Map<String, List<Event>>> actualPatterns = nfaTestHarness.consumeRecords(streamEvents);

	assertEquals(expectedPatterns, actualPatterns);
}
 
Example 6
@Test
public void testEventTimestamp() throws Exception {
	final Event event = event().withId(1).build();
	final long timestamp = 3;

	final Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new IterativeCondition<Event>() {
		@Override
		public boolean filter(Event value, Context<Event> ctx) throws Exception {
			return ctx.timestamp() == timestamp;
		}
	});

	final NFATestHarness testHarness = forPattern(pattern).build();

	final List<List<Event>> resultingPattern = testHarness.feedRecord(new StreamRecord<>(event, timestamp));

	comparePatterns(resultingPattern, Collections.singletonList(
		Collections.singletonList(event)
	));
}
 
Example 7
Source Project: flink   Source File: NFATest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTimeoutWindowPruning() throws Exception {
	List<StreamRecord<Event>> streamEvents = new ArrayList<>();

	streamEvents.add(new StreamRecord<>(new Event(1, "start", 1.0), 1L));
	streamEvents.add(new StreamRecord<>(new Event(2, "bar", 2.0), 2L));
	streamEvents.add(new StreamRecord<>(new Event(3, "start", 3.0), 3L));
	streamEvents.add(new StreamRecord<>(new Event(4, "end", 4.0), 4L));

	List<Map<String, List<Event>>> expectedPatterns = new ArrayList<>();

	Map<String, List<Event>> secondPattern = new HashMap<>();
	secondPattern.put("start", Collections.singletonList(new Event(3, "start", 3.0)));
	secondPattern.put("end", Collections.singletonList(new Event(4, "end", 4.0)));

	expectedPatterns.add(secondPattern);

	NFA<Event> nfa = createStartEndNFA();
	NFATestHarness nfaTestHarness = NFATestHarness.forNFA(nfa).build();

	Collection<Map<String, List<Event>>> actualPatterns = nfaTestHarness.consumeRecords(streamEvents);

	assertEquals(expectedPatterns, actualPatterns);
}
 
Example 8
Source Project: Flink-CEPplus   Source File: WindowOperatorContractTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testLateWindowDropping() throws Exception {
	WindowAssigner<Integer, TimeWindow> mockAssigner = mockTimeWindowAssigner();
	Trigger<Integer, TimeWindow> mockTrigger = mockTrigger();
	InternalWindowFunction<Iterable<Integer>, Void, Integer, TimeWindow> mockWindowFunction = mockWindowFunction();

	KeyedOneInputStreamOperatorTestHarness<Integer, Integer, Void> testHarness =
			createWindowOperator(mockAssigner, mockTrigger, 20L, mockWindowFunction);

	testHarness.open();

	when(mockAssigner.assignWindows(anyInt(), anyLong(), anyAssignerContext()))
			.thenReturn(Arrays.asList(new TimeWindow(0, 2)));

	assertEquals(0, testHarness.getOutput().size());
	assertEquals(0, testHarness.numKeyedStateEntries());

	shouldFireOnElement(mockTrigger);

	// window.maxTime() == 1 plus 20L of allowed lateness
	testHarness.processWatermark(new Watermark(21));

	testHarness.processElement(new StreamRecord<>(0, 0L));

	// there should be nothing
	assertEquals(0, testHarness.numKeyedStateEntries());
	assertEquals(0, testHarness.numEventTimeTimers());
	assertEquals(0, testHarness.numProcessingTimeTimers());

	// there should be two elements now
	assertEquals(0, testHarness.extractOutputStreamRecords().size());
}
 
Example 9
Source Project: flink   Source File: TemporalRowTimeJoinOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void processElement2(StreamRecord<BaseRow> element) throws Exception {
	BaseRow row = element.getValue();
	checkNotRetraction(row);

	long rowTime = getRightTime(row);
	rightState.put(rowTime, row);
	registerSmallestTimer(rowTime); // Timer to clean up the state

	registerProcessingCleanupTimer();
}
 
Example 10
Source Project: flink   Source File: SourceStreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testNotMarkingEndOfInputWhenTaskCancelled () throws Exception {
	final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
		SourceStreamTask::new,
		BasicTypeInfo.STRING_TYPE_INFO);

	testHarness
		.setupOperatorChain(
			new OperatorID(),
			new StreamSource<>(new CancelTestSource(
				BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), "Hello")))
		.chain(
			new OperatorID(),
			new TestBoundedOneInputStreamOperator("Operator1"),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.finish();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.invoke();
	CancelTestSource.getDataProcessing().await();
	testHarness.getTask().cancel();

	try {
		testHarness.waitForTaskCompletion();
	} catch (Throwable t) {
		if (!ExceptionUtils.findThrowable(t, CancelTaskException.class).isPresent()) {
			throw t;
		}
	}

	expectedOutput.add(new StreamRecord<>("Hello"));

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example 11
Source Project: flink   Source File: OneInputStreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testHandlingEndOfInput() throws Exception {
	final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(
		OneInputStreamTask::new,
		BasicTypeInfo.STRING_TYPE_INFO,
		BasicTypeInfo.STRING_TYPE_INFO);

	testHarness
		.setupOperatorChain(new OperatorID(), new TestBoundedOneInputStreamOperator("Operator0"))
		.chain(
			new OperatorID(),
			new TestBoundedOneInputStreamOperator("Operator1"),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.finish();

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.invoke();
	testHarness.waitForTaskRunning();

	testHarness.processElement(new StreamRecord<>("Hello"));
	testHarness.endInput();

	testHarness.waitForTaskCompletion();

	expectedOutput.add(new StreamRecord<>("Hello"));
	expectedOutput.add(new StreamRecord<>("[Operator0]: Bye"));
	expectedOutput.add(new StreamRecord<>("[Operator1]: Bye"));

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example 12
Source Project: flink   Source File: WindowOperatorContractTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testWindowStateNotAvailableToMergingWindows() throws Exception {
	WindowAssigner<Integer, TimeWindow> mockAssigner = mockMergingAssigner();
	Trigger<Integer, TimeWindow> mockTrigger = mockTrigger();
	InternalWindowFunction<Iterable<Integer>, Void, Integer, TimeWindow> mockWindowFunction = mockWindowFunction();

	KeyedOneInputStreamOperatorTestHarness<Integer, Integer, Void> testHarness =
		createWindowOperator(mockAssigner, mockTrigger, 20L, mockWindowFunction);

	testHarness.open();

	when(mockTrigger.onElement(anyInt(), anyLong(), anyTimeWindow(), anyTriggerContext()))
		.thenReturn(TriggerResult.FIRE);

	when(mockAssigner.assignWindows(anyInt(), anyLong(), anyAssignerContext()))
		.thenReturn(Arrays.asList(new TimeWindow(0, 20)));

	doAnswer(new Answer<Object>() {
		@Override
		public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
			InternalWindowFunction.InternalWindowContext context = (InternalWindowFunction.InternalWindowContext) invocationOnMock.getArguments()[2];
			context.windowState().getState(valueStateDescriptor).update("hello");
			return null;
		}
	}).when(mockWindowFunction).process(anyInt(), anyTimeWindow(), anyInternalWindowContext(), anyIntIterable(), WindowOperatorContractTest.<Void>anyCollector());

	expectedException.expect(UnsupportedOperationException.class);
	expectedException.expectMessage("Per-window state is not allowed when using merging windows.");
	testHarness.processElement(new StreamRecord<>(0, 0L));
}
 
Example 13
Source Project: Flink-CEPplus   Source File: EventTimeTriggerTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Verify that state of separate windows does not leak into other windows.
 */
@Test
public void testWindowSeparationAndFiring() throws Exception {
	TriggerTestHarness<Object, TimeWindow> testHarness =
			new TriggerTestHarness<>(EventTimeTrigger.create(), new TimeWindow.Serializer());

	// inject several elements
	assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
	assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
	assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
	assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
	assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));

	assertEquals(0, testHarness.numStateEntries());
	assertEquals(0, testHarness.numProcessingTimeTimers());
	assertEquals(2, testHarness.numEventTimeTimers());
	assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
	assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));

	assertEquals(TriggerResult.FIRE, testHarness.advanceWatermark(2, new TimeWindow(0, 2)));

	assertEquals(0, testHarness.numStateEntries());
	assertEquals(0, testHarness.numProcessingTimeTimers());
	assertEquals(1, testHarness.numEventTimeTimers());
	assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
	assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));

	assertEquals(TriggerResult.FIRE, testHarness.advanceWatermark(4, new TimeWindow(2, 4)));

	assertEquals(0, testHarness.numStateEntries());
	assertEquals(0, testHarness.numProcessingTimeTimers());
	assertEquals(0, testHarness.numEventTimeTimers());
}
 
Example 14
Source Project: flink   Source File: KeyedCoProcessOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <X> void output(OutputTag<X> outputTag, X value) {
	if (outputTag == null) {
		throw new IllegalArgumentException("OutputTag must not be null.");
	}

	output.collect(outputTag, new StreamRecord<>(value, timer.getTimestamp()));
}
 
Example 15
Source Project: flink   Source File: CEPOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
private void verifyPattern(Object outputObject, Event start, SubEvent middle, Event end) {
	assertTrue(outputObject instanceof StreamRecord);

	StreamRecord<?> resultRecord = (StreamRecord<?>) outputObject;
	assertTrue(resultRecord.getValue() instanceof Map);

	@SuppressWarnings("unchecked")
	Map<String, List<Event>> patternMap = (Map<String, List<Event>>) resultRecord.getValue();
	assertEquals(start, patternMap.get("start").get(0));
	assertEquals(middle, patternMap.get("middle").get(0));
	assertEquals(end, patternMap.get("end").get(0));
}
 
Example 16
Source Project: Flink-CEPplus   Source File: TestHarnessUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Extracts the raw elements from the given output list.
 */
@SuppressWarnings("unchecked")
public static <OUT> List<OUT> getRawElementsFromOutput(Queue<Object> output) {
	List<OUT> resultElements = new LinkedList<>();
	for (Object e: output) {
		if (e instanceof StreamRecord) {
			resultElements.add(((StreamRecord<OUT>) e).getValue());
		}
	}
	return resultElements;
}
 
Example 17
Source Project: flink   Source File: LegacyKeyedCoProcessOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <X> void output(OutputTag<X> outputTag, X value) {
	if (outputTag == null) {
		throw new IllegalArgumentException("OutputTag must not be null.");
	}

	output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp()));
}
 
Example 18
Source Project: Flink-CEPplus   Source File: ProcessOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void processElement(StreamRecord<IN> element) throws Exception {
	collector.setTimestamp(element);
	context.element = element;
	userFunction.processElement(element.getValue(), context, collector);
	context.element = null;
}
 
Example 19
Source Project: flink   Source File: LegacyKeyedCoProcessOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <X> void output(OutputTag<X> outputTag, X value) {
	if (outputTag == null) {
		throw new IllegalArgumentException("OutputTag must not be null.");
	}

	output.collect(outputTag, new StreamRecord<>(value, timer.getTimestamp()));
}
 
Example 20
public ParallelFromStreamRecordsFunction(TypeSerializer<StreamRecord<T>> serializer,
                                         Iterable<StreamRecord<T>> input,
                                         Boolean flushOpenWindows
) throws IOException {
    this.serializer = serializer;
    elementsSerialized = serializeOutput(input, serializer).toByteArray();
    numElements = Iterables.size(input);
    this.flushOpenWindows = flushOpenWindows;
}
 
Example 21
@SuppressWarnings("unchecked")
private void validHarnessResult(
	KeyedOneInputStreamOperatorTestHarness<?, String, ?> harness,
	Integer expectedValue,
	String... records) throws Exception {
	for (String record : records) {
		harness.processElement(new StreamRecord<>(record, 1));
		StreamRecord<Integer> outputRecord = (StreamRecord<Integer>) harness.getOutput().poll();
		Assert.assertNotNull(outputRecord);
		Assert.assertEquals(expectedValue, outputRecord.getValue());
	}
}
 
Example 22
Source Project: beam   Source File: DoFnOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testSingleOutput() throws Exception {

  Coder<WindowedValue<String>> coder = WindowedValue.getValueOnlyCoder(StringUtf8Coder.of());

  TupleTag<String> outputTag = new TupleTag<>("main-output");

  DoFnOperator<String, String> doFnOperator =
      new DoFnOperator<>(
          new IdentityDoFn<>(),
          "stepName",
          coder,
          Collections.emptyMap(),
          outputTag,
          Collections.emptyList(),
          new DoFnOperator.MultiOutputOutputManagerFactory<>(outputTag, coder),
          WindowingStrategy.globalDefault(),
          new HashMap<>(), /* side-input mapping */
          Collections.emptyList(), /* side inputs */
          PipelineOptionsFactory.as(FlinkPipelineOptions.class),
          null,
          null,
          DoFnSchemaInformation.create(),
          Collections.emptyMap());

  OneInputStreamOperatorTestHarness<WindowedValue<String>, WindowedValue<String>> testHarness =
      new OneInputStreamOperatorTestHarness<>(doFnOperator);

  testHarness.open();

  testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("Hello")));

  assertThat(
      stripStreamRecordFromWindowedValue(testHarness.getOutput()),
      contains(WindowedValue.valueInGlobalWindow("Hello")));

  testHarness.close();
}
 
Example 23
Source Project: Flink-CEPplus   Source File: NFATestHarness.java    License: Apache License 2.0 5 votes vote down vote up
public List<List<Event>> feedRecord(StreamRecord<Event> inputEvent) throws Exception {
	final List<List<Event>> resultingPatterns = new ArrayList<>();
	final Collection<Map<String, List<Event>>> matches = consumeRecord(inputEvent);
	for (Map<String, List<Event>> p : matches) {
		List<Event> res = new ArrayList<>();
		for (List<Event> le : p.values()) {
			res.addAll(le);
		}
		resultingPatterns.add(res);
	}
	return resultingPatterns;
}
 
Example 24
Source Project: flink   Source File: SourceStreamTaskTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testMarkingEndOfInput() throws Exception {
	final StreamTaskTestHarness<String> testHarness = new StreamTaskTestHarness<>(
		SourceStreamTask::new,
		BasicTypeInfo.STRING_TYPE_INFO);

	testHarness
		.setupOperatorChain(
			new OperatorID(),
			new StreamSource<>(new FromElementsFunction<>(
				BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), "Hello")))
		.chain(
			new OperatorID(),
			new TestBoundedOneInputStreamOperator("Operator1"),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()))
		.finish();

	StreamConfig streamConfig = testHarness.getStreamConfig();
	streamConfig.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	testHarness.invoke();
	testHarness.waitForTaskCompletion();

	expectedOutput.add(new StreamRecord<>("Hello"));
	expectedOutput.add(new StreamRecord<>("[Operator1]: Bye"));

	TestHarnessUtil.assertOutputEquals("Output was not correct.",
		expectedOutput,
		testHarness.getOutput());
}
 
Example 25
Source Project: Flink-CEPplus   Source File: AsyncWaitOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void processElement(StreamRecord<IN> element) throws Exception {
	final StreamRecordQueueEntry<OUT> streamRecordBufferEntry = new StreamRecordQueueEntry<>(element);

	if (timeout > 0L) {
		// register a timeout for this AsyncStreamRecordBufferEntry
		long timeoutTimestamp = timeout + getProcessingTimeService().getCurrentProcessingTime();

		final ScheduledFuture<?> timerFuture = getProcessingTimeService().registerTimer(
			timeoutTimestamp,
			new ProcessingTimeCallback() {
				@Override
				public void onProcessingTime(long timestamp) throws Exception {
					userFunction.timeout(element.getValue(), streamRecordBufferEntry);
				}
			});

		// Cancel the timer once we've completed the stream record buffer entry. This will remove
		// the register trigger task
		streamRecordBufferEntry.onComplete(
			(StreamElementQueueEntry<Collection<OUT>> value) -> {
				timerFuture.cancel(true);
			},
			executor);
	}

	addAsyncBufferEntry(streamRecordBufferEntry);

	userFunction.asyncInvoke(element.getValue(), streamRecordBufferEntry);
}
 
Example 26
Source Project: flink   Source File: StreamTaskSelectiveReadingTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSequentialReading() throws Exception {
	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
	expectedOutput.add(new StreamRecord<>("[Operator0-1]: Hello-1"));
	expectedOutput.add(new StreamRecord<>("[Operator0-1]: Hello-2"));
	expectedOutput.add(new StreamRecord<>("[Operator0-1]: Hello-3"));
	expectedOutput.add(new StreamRecord<>("[Operator0-2]: 1"));
	expectedOutput.add(new StreamRecord<>("[Operator0-2]: 2"));
	expectedOutput.add(new StreamRecord<>("[Operator0-2]: 3"));
	expectedOutput.add(new StreamRecord<>("[Operator0-2]: 4"));

	testBase(new TestSequentialReadingStreamOperator("Operator0"), false, expectedOutput, true);
}
 
Example 27
Source Project: Flink-CEPplus   Source File: CepOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <X> void output(final OutputTag<X> outputTag, final X value) {
	final StreamRecord<X> record;
	if (isProcessingTime) {
		record = new StreamRecord<>(value);
	} else {
		record = new StreamRecord<>(value, timestamp());
	}
	output.collect(outputTag, record);
}
 
Example 28
Source Project: flink   Source File: EvictingWindowOperator.java    License: Apache License 2.0 5 votes vote down vote up
private void clearAllState(
		W window,
		ListState<StreamRecord<IN>> windowState,
		MergingWindowSet<W> mergingWindows) throws Exception {
	windowState.clear();
	triggerContext.clear();
	processContext.window = window;
	processContext.clear();
	if (mergingWindows != null) {
		mergingWindows.retireWindow(window);
		mergingWindows.persist();
	}
}
 
Example 29
Source Project: Flink-CEPplus   Source File: ProcessOperatorTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This also verifies that the timestamps ouf side-emitted records is correct.
 */
@Test
public void testSideOutput() throws Exception {
	ProcessOperator<Integer, String> operator =
		new ProcessOperator<>(new SideOutputProcessFunction());

	OneInputStreamOperatorTestHarness<Integer, String> testHarness =
		new OneInputStreamOperatorTestHarness<>(operator);

	testHarness.setup();
	testHarness.open();

	testHarness.processElement(new StreamRecord<>(42, 17L /* timestamp */));

	ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();

	expectedOutput.add(new StreamRecord<>("IN:42", 17L /* timestamp */));

	TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());

	ConcurrentLinkedQueue<StreamRecord<Integer>> expectedIntSideOutput = new ConcurrentLinkedQueue<>();
	expectedIntSideOutput.add(new StreamRecord<>(42, 17L /* timestamp */));
	ConcurrentLinkedQueue<StreamRecord<Integer>> intSideOutput =
		testHarness.getSideOutput(SideOutputProcessFunction.INTEGER_OUTPUT_TAG);
	TestHarnessUtil.assertOutputEquals(
		"Side output was not correct.",
		expectedIntSideOutput,
		intSideOutput);

	ConcurrentLinkedQueue<StreamRecord<Long>> expectedLongSideOutput = new ConcurrentLinkedQueue<>();
	expectedLongSideOutput.add(new StreamRecord<>(42L, 17L /* timestamp */));
	ConcurrentLinkedQueue<StreamRecord<Long>> longSideOutput =
		testHarness.getSideOutput(SideOutputProcessFunction.LONG_OUTPUT_TAG);
	TestHarnessUtil.assertOutputEquals(
		"Side output was not correct.",
		expectedLongSideOutput,
		longSideOutput);

	testHarness.close();
}
 
Example 30
@Override
public void processElement(StreamRecord<T> element) throws Exception {
	final long newTimestamp = userFunction.extractTimestamp(element.getValue(),
			element.hasTimestamp() ? element.getTimestamp() : Long.MIN_VALUE);

	output.collect(element.replace(element.getValue(), newTimestamp));
}