org.apache.flink.streaming.api.windowing.time.Time Java Examples
The following examples show how to use
org.apache.flink.streaming.api.windowing.time.Time.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ProcessingTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testWindowAssignment() { WindowAssigner.WindowAssignerContext mockContext = mock(WindowAssigner.WindowAssignerContext.class); ProcessingTimeSessionWindows assigner = ProcessingTimeSessionWindows.withGap(Time.milliseconds(5000)); when(mockContext.getCurrentProcessingTime()).thenReturn(0L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(0, 5000))); when(mockContext.getCurrentProcessingTime()).thenReturn(4999L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(4999, 9999))); when(mockContext.getCurrentProcessingTime()).thenReturn(5000L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(5000, 10000))); }
Example #2
Source File: FoldFunctionDemo.java From flink-simple-tutorial with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // 构建输入数据 List<Tuple2<String, Long>> data = new ArrayList<>(); Tuple2<String, Long> a = new Tuple2<>("first event", 1L); Tuple2<String, Long> b = new Tuple2<>("second event", 2L); data.add(a); data.add(b); DataStreamSource<Tuple2<String, Long>> input = env.fromCollection(data); input.keyBy(x -> x.f1) .timeWindow(Time.seconds(10), Time.seconds(1)).fold("flink", (acc, t) ->t.f0 + acc); env.execute(); }
Example #3
Source File: KafkaItemTransactionJob.java From flink-tutorials with Apache License 2.0 | 6 votes |
public DataStream<ItemTransaction> readTransactionStream(ParameterTool params, StreamExecutionEnvironment env) { // We read the ItemTransaction objects directly using the schema FlinkKafkaConsumer<ItemTransaction> transactionSource = new FlinkKafkaConsumer<>( params.getRequired(TRANSACTION_INPUT_TOPIC_KEY), new TransactionSchema(), Utils.readKafkaProperties(params, true)); transactionSource.setCommitOffsetsOnCheckpoints(true); transactionSource.setStartFromEarliest(); // In case event time processing is enabled we assign trailing watermarks for each partition transactionSource.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<ItemTransaction>(Time.minutes(1)) { @Override public long extractTimestamp(ItemTransaction transaction) { return transaction.ts; } }); return env.addSource(transactionSource) .name("Kafka Transaction Source") .uid("Kafka Transaction Source"); }
Example #4
Source File: EventTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testMergeConsecutiveWindows() { MergingWindowAssigner.MergeCallback callback = mock(MergingWindowAssigner.MergeCallback.class); EventTimeSessionWindows assigner = EventTimeSessionWindows.withGap(Time.milliseconds(5000)); assigner.mergeWindows( Lists.newArrayList( new TimeWindow(0, 1), new TimeWindow(1, 2), new TimeWindow(2, 3), new TimeWindow(4, 5), new TimeWindow(5, 6)), callback); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(0, 1), new TimeWindow(1, 2), new TimeWindow(2, 3))), eq(new TimeWindow(0, 3))); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(4, 5), new TimeWindow(5, 6))), eq(new TimeWindow(4, 6))); verify(callback, times(2)).merge(anyCollection(), Matchers.anyObject()); }
Example #5
Source File: SummaryBulkAggregation.java From gelly-streaming with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public DataStream<T> run(final DataStream<Edge<K, EV>> edgeStream) { //For parallel window support we key the edge stream by partition and apply a parallel fold per partition. //Finally, we merge all locally combined results into our final graph aggregation property. TupleTypeInfo edgeTypeInfo = (TupleTypeInfo) edgeStream.getType(); TypeInformation<S> returnType = TypeExtractor.createTypeInfo(EdgesFold.class, getUpdateFun().getClass(), 2, edgeTypeInfo.getTypeAt(0), edgeTypeInfo.getTypeAt(2)); TypeInformation<Tuple2<Integer, Edge<K, EV>>> typeInfo = new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, edgeStream.getType()); DataStream<S> partialAgg = edgeStream .map(new PartitionMapper<>()).returns(typeInfo) .keyBy(0) .timeWindow(Time.of(timeMillis, TimeUnit.MILLISECONDS)) .fold(getInitialValue(), new PartialAgg<>(getUpdateFun(),returnType)) .timeWindowAll(Time.of(timeMillis, TimeUnit.MILLISECONDS)) .reduce(getCombineFun()) .flatMap(getAggregator(edgeStream)).setParallelism(1); if (getTransform() != null) { return partialAgg.map(getTransform()); } return (DataStream<T>) partialAgg; }
Example #6
Source File: AllWindowTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("rawtypes") public void testFoldProcessingTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple3<String, String, Integer>> window = source .windowAll(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .fold(new Tuple3<>("", "", 0), new DummyFolder()); OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #7
Source File: NumberOfWikiEditsPerWindow.java From flink-examples with MIT License | 6 votes |
public static void main(String... args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<WikipediaEditEvent> edits = env.addSource(new WikipediaEditsSource()); edits .timeWindowAll(Time.minutes(1)) .apply(new AllWindowFunction<WikipediaEditEvent, Tuple3<Date, Long, Long>, TimeWindow>() { @Override public void apply(TimeWindow timeWindow, Iterable<WikipediaEditEvent> iterable, Collector<Tuple3<Date, Long, Long>> collector) throws Exception { long count = 0; long bytesChanged = 0; for (WikipediaEditEvent event : iterable) { count++; bytesChanged += event.getByteDiff(); } collector.collect(new Tuple3<>(new Date(timeWindow.getEnd()), count, bytesChanged)); } }) .print(); env.execute(); }
Example #8
Source File: TumblingProcessingTimeWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testTimeUnits() { // sanity check with one other time unit WindowAssigner.WindowAssignerContext mockContext = mock(WindowAssigner.WindowAssignerContext.class); TumblingProcessingTimeWindows assigner = TumblingProcessingTimeWindows.of(Time.seconds(5), Time.seconds(1)); when(mockContext.getCurrentProcessingTime()).thenReturn(1000L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(1000, 6000))); when(mockContext.getCurrentProcessingTime()).thenReturn(5999L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(1000, 6000))); when(mockContext.getCurrentProcessingTime()).thenReturn(6000L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(6000, 11000))); }
Example #9
Source File: AllWindowTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("rawtypes") public void testFoldWithCustomTrigger() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple3<String, String, Integer>> window1 = source .windowAll(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .trigger(CountTrigger.of(1)) .fold(new Tuple3<>("", "", 1), new DummyFolder()); OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window1.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof CountTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #10
Source File: Socket.java From blog_demos with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); //监听本地9999端口,读取字符串 DataStream<String> socketDataStream = env.socketTextStream("localhost", 9999); //每五秒钟一次,将当前五秒内所有字符串以空格分割,然后统计单词数量,打印出来 socketDataStream .flatMap(new Splitter()) .keyBy(0) .timeWindow(Time.seconds(5)) .sum(1) .print(); env.execute("API DataSource demo : socket"); }
Example #11
Source File: ProcessingTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testMergeConsecutiveWindows() { MergingWindowAssigner.MergeCallback callback = mock(MergingWindowAssigner.MergeCallback.class); ProcessingTimeSessionWindows assigner = ProcessingTimeSessionWindows.withGap(Time.milliseconds(5000)); assigner.mergeWindows( Lists.newArrayList( new TimeWindow(0, 1), new TimeWindow(1, 2), new TimeWindow(2, 3), new TimeWindow(4, 5), new TimeWindow(5, 6)), callback); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(0, 1), new TimeWindow(1, 2), new TimeWindow(2, 3))), eq(new TimeWindow(0, 3))); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(4, 5), new TimeWindow(5, 6))), eq(new TimeWindow(4, 6))); verify(callback, times(2)).merge(anyCollection(), Matchers.anyObject()); }
Example #12
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReduceWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .reduce(new ReduceFunction<File>() { @Override public File reduce(File value1, File value2) { return null; } }); validateStateDescriptorConfigured(result); }
Example #13
Source File: StateDescriptorPassingTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testReduceWindowAllState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .timeWindowAll(Time.milliseconds(1000)) .reduce(new ReduceFunction<File>() { @Override public File reduce(File value1, File value2) { return null; } }); validateStateDescriptorConfigured(result); }
Example #14
Source File: WindowTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("rawtypes") public void testFoldProcessingTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple3<String, String, Integer>> window = source .keyBy(new TupleKeySelector()) .window(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .fold(new Tuple3<>("", "", 0), new DummyFolder()); OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #15
Source File: WindowOperatorTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("unchecked") public void testSlidingEventTimeWindowsReduce() throws Exception { closeCalled.set(0); final int windowSize = 3; final int windowSlide = 1; ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig())); WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>( SlidingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS), Time.of(windowSlide, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), EventTimeTrigger.create(), 0, null /* late data output tag */); testSlidingEventTimeWindows(operator); }
Example #16
Source File: ProcessingTimeSessionWindowsTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTimeUnits() { // sanity check with one other time unit WindowAssigner.WindowAssignerContext mockContext = mock(WindowAssigner.WindowAssignerContext.class); ProcessingTimeSessionWindows assigner = ProcessingTimeSessionWindows.withGap(Time.seconds(5)); when(mockContext.getCurrentProcessingTime()).thenReturn(0L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(0, 5000))); when(mockContext.getCurrentProcessingTime()).thenReturn(4999L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(4999, 9999))); when(mockContext.getCurrentProcessingTime()).thenReturn(5000L); assertThat(assigner.assignWindows("String", Long.MIN_VALUE, mockContext), contains(timeWindow(5000, 10000))); }
Example #17
Source File: AllWindowTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("rawtypes") public void testReduceProcessingTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple2<String, Integer>> window1 = source .windowAll(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .reduce(new DummyReducer()); OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #18
Source File: TumblingEventTimeWindowsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProperties() { TumblingEventTimeWindows assigner = TumblingEventTimeWindows.of(Time.seconds(5), Time.milliseconds(100)); assertTrue(assigner.isEventTime()); assertEquals(new TimeWindow.Serializer(), assigner.getWindowSerializer(new ExecutionConfig())); assertThat(assigner.getDefaultTrigger(mock(StreamExecutionEnvironment.class)), instanceOf(EventTimeTrigger.class)); }
Example #19
Source File: AllWindowTranslationTest.java From flink with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("rawtypes") public void testApplyProcessingTimeTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple2<String, Integer>> window1 = source .windowAll(TumblingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS))) .apply(new AllWindowFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow>() { private static final long serialVersionUID = 1L; @Override public void apply( TimeWindow window, Iterable<Tuple2<String, Integer>> values, Collector<Tuple2<String, Integer>> out) throws Exception { for (Tuple2<String, Integer> in : values) { out.collect(in); } } }); OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingProcessingTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #20
Source File: ContinuousEventTimeTriggerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Verify that late elements trigger immediately and also that we don't set a timer * for those. */ @Test public void testLateElementTriggersImmediately() throws Exception { TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(ContinuousEventTimeTrigger.<TimeWindow>of(Time.hours(1)), new TimeWindow.Serializer()); testHarness.advanceWatermark(2); assertEquals(TriggerResult.FIRE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(0, testHarness.numEventTimeTimers()); }
Example #21
Source File: BoundedOutOfOrdernessTimestampExtractorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testInitializationAndRuntime() { Time maxAllowedLateness = Time.milliseconds(10L); BoundedOutOfOrdernessTimestampExtractor<Long> extractor = new LongExtractor(maxAllowedLateness); assertEquals(maxAllowedLateness.toMilliseconds(), extractor.getMaxOutOfOrdernessInMillis()); runValidTests(extractor); }
Example #22
Source File: WindowTranslationTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAggregateProcessingTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); DataStream<Tuple3<String, String, Integer>> source = env.fromElements( Tuple3.of("hello", "hallo", 1), Tuple3.of("hello", "hallo", 2)); DataStream<Integer> window1 = source .keyBy(new Tuple3KeySelector()) .window(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .aggregate(new DummyAggregationFunction()); final OneInputTransformation<Tuple3<String, String, Integer>, Integer> transform = (OneInputTransformation<Tuple3<String, String, Integer>, Integer>) window1.getTransformation(); final OneInputStreamOperator<Tuple3<String, String, Integer>, Integer> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple3<String, String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple3<String, String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof AggregatingStateDescriptor); processElementAndEnsureOutput( winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple3<>("hello", "hallo", 1)); }
Example #23
Source File: App.java From Mastering-Apache-Flink with MIT License | 5 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<TemperatureEvent> inputEventStream = env.fromElements(new TemperatureEvent("xyz", 22.0), new TemperatureEvent("xyz", 20.1), new TemperatureEvent("xyz", 21.1), new TemperatureEvent("xyz", 22.2), new TemperatureEvent("xyz", 29.1), new TemperatureEvent("xyz", 22.3), new TemperatureEvent("xyz", 22.1), new TemperatureEvent("xyz", 22.4), new TemperatureEvent("xyz", 22.7), new TemperatureEvent("xyz", 27.0)); Pattern<TemperatureEvent, ?> warningPattern = Pattern.<TemperatureEvent> begin("first") .subtype(TemperatureEvent.class).where(new FilterFunction<TemperatureEvent>() { private static final long serialVersionUID = 1L; public boolean filter(TemperatureEvent value) { if (value.getTemperature() >= 26.0) { return true; } return false; } }).within(Time.seconds(10)); DataStream<Alert> patternStream = CEP.pattern(inputEventStream, warningPattern) .select(new PatternSelectFunction<TemperatureEvent, Alert>() { private static final long serialVersionUID = 1L; public Alert select(Map<String, TemperatureEvent> event) throws Exception { return new Alert("Temperature Rise Detected"); } }); patternStream.print(); env.execute("CEP on Temperature Sensor"); }
Example #24
Source File: MergingWindowSetTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testRestoreFromState() throws Exception { @SuppressWarnings("unchecked") ListState<Tuple2<TimeWindow, TimeWindow>> mockState = mock(ListState.class); when(mockState.get()).thenReturn(Lists.newArrayList( new Tuple2<>(new TimeWindow(17, 42), new TimeWindow(42, 17)), new Tuple2<>(new TimeWindow(1, 2), new TimeWindow(3, 4)) )); MergingWindowSet<TimeWindow> windowSet = new MergingWindowSet<>(EventTimeSessionWindows.withGap(Time.milliseconds(3)), mockState); assertEquals(new TimeWindow(42, 17), windowSet.getStateWindow(new TimeWindow(17, 42))); assertEquals(new TimeWindow(3, 4), windowSet.getStateWindow(new TimeWindow(1, 2))); }
Example #25
Source File: ContinuousEventTimeTriggerTest.java From flink with Apache License 2.0 | 5 votes |
/** * Verify that clear() does not leak across windows. */ @Test public void testClear() throws Exception { TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(ContinuousEventTimeTrigger.<TimeWindow>of(Time.hours(1)), new TimeWindow.Serializer()); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2))); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4))); assertEquals(2, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(4, testHarness.numEventTimeTimers()); assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(0, 2))); assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(2, 4))); testHarness.clearTriggerState(new TimeWindow(2, 4)); assertEquals(1, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(3, testHarness.numEventTimeTimers()); assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(0, 2))); assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4))); testHarness.clearTriggerState(new TimeWindow(0, 2)); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(2, testHarness.numEventTimeTimers()); // doesn't clean up timers }
Example #26
Source File: EventTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testProperties() { EventTimeSessionWindows assigner = EventTimeSessionWindows.withGap(Time.seconds(5)); assertTrue(assigner.isEventTime()); assertEquals(new TimeWindow.Serializer(), assigner.getWindowSerializer(new ExecutionConfig())); assertThat(assigner.getDefaultTrigger(mock(StreamExecutionEnvironment.class)), instanceOf(EventTimeTrigger.class)); }
Example #27
Source File: IntervalJoinITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testBoundsCanBeExclusive() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); DataStream<Tuple2<String, Integer>> streamOne = env.fromElements( Tuple2.of("key", 0), Tuple2.of("key", 1), Tuple2.of("key", 2) ).assignTimestampsAndWatermarks(new AscendingTuple2TimestampExtractor()); DataStream<Tuple2<String, Integer>> streamTwo = env.fromElements( Tuple2.of("key", 0), Tuple2.of("key", 1), Tuple2.of("key", 2) ).assignTimestampsAndWatermarks(new AscendingTuple2TimestampExtractor()); streamOne.keyBy(new Tuple2KeyExtractor()) .intervalJoin(streamTwo.keyBy(new Tuple2KeyExtractor())) .between(Time.milliseconds(0), Time.milliseconds(2)) .upperBoundExclusive() .lowerBoundExclusive() .process(new CombineToStringJoinFunction()) .addSink(new ResultSink()); env.execute(); expectInAnyOrder( "(key,0):(key,1)", "(key,1):(key,2)" ); }
Example #28
Source File: AllWindowTranslationTest.java From flink with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("rawtypes") public void testFoldWithProcessAllWindowFunctionEventTime() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple2<String, Integer>> window = source .windowAll(TumblingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS))) .fold(new Tuple3<>("", "", 0), new DummyFolder(), new ProcessAllWindowFunction<Tuple3<String, String, Integer>, Tuple2<String, Integer>, TimeWindow>() { private static final long serialVersionUID = 1L; @Override public void process( Context ctx, Iterable<Tuple3<String, String, Integer>> values, Collector<Tuple2<String, Integer>> out) throws Exception { for (Tuple3<String, String, Integer> in : values) { out.collect(new Tuple2<>(in.f0, in.f2)); } } }); OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingEventTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #29
Source File: JoinedStreamsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSetAllowedLateness() { Time lateness = Time.milliseconds(42L); JoinedStreams.WithWindow<String, String, String, TimeWindow> withLateness = dataStream1 .join(dataStream2) .where(keySelector) .equalTo(keySelector) .window(tsAssigner) .allowedLateness(lateness); Assert.assertEquals(lateness.toMilliseconds(), withLateness.getAllowedLateness().toMilliseconds()); }
Example #30
Source File: NFACompiler.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Creates all the states between Start and Final state. * * @param sinkState the state that last state should point to (always the Final state) * @return the next state after Start in the resulting graph */ private State<T> createMiddleStates(final State<T> sinkState) { State<T> lastSink = sinkState; while (currentPattern.getPrevious() != null) { if (currentPattern.getQuantifier().getConsumingStrategy() == Quantifier.ConsumingStrategy.NOT_FOLLOW) { //skip notFollow patterns, they are converted into edge conditions } else if (currentPattern.getQuantifier().getConsumingStrategy() == Quantifier.ConsumingStrategy.NOT_NEXT) { final State<T> notNext = createState(currentPattern.getName(), State.StateType.Normal); final IterativeCondition<T> notCondition = getTakeCondition(currentPattern); final State<T> stopState = createStopState(notCondition, currentPattern.getName()); if (lastSink.isFinal()) { //so that the proceed to final is not fired notNext.addIgnore(lastSink, new RichNotCondition<>(notCondition)); } else { notNext.addProceed(lastSink, new RichNotCondition<>(notCondition)); } notNext.addProceed(stopState, notCondition); lastSink = notNext; } else { lastSink = convertPattern(lastSink); } // we traverse the pattern graph backwards followingPattern = currentPattern; currentPattern = currentPattern.getPrevious(); final Time currentWindowTime = currentPattern.getWindowTime(); if (currentWindowTime != null && currentWindowTime.toMilliseconds() < windowTime) { // the window time is the global minimum of all window times of each state windowTime = currentWindowTime.toMilliseconds(); } } return lastSink; }