org.apache.flink.api.common.functions.FilterFunction Java Examples

The following examples show how to use org.apache.flink.api.common.functions.FilterFunction. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: Flink-CEPplus   Author: ljygz   File: NamesTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testDefaultName() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<String> strs = env.fromCollection(Arrays.asList("a", "b"));

	// WARNING: The test will fail if this line is being moved down in the file (the line-number is hard-coded)
	strs.filter(new FilterFunction<String>() {
		private static final long serialVersionUID = 1L;

		@Override
		public boolean filter(String value) throws Exception {
			return value.equals("a");
		}
	}).output(new DiscardingOutputFormat<String>());
	Plan plan = env.createProgramPlan();
	testForName("Filter at testDefaultName(NamesTest.java:55)", plan);
}
 
Example #2
Source Project: Flink-CEPplus   Author: ljygz   File: GraphOperationsITCase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("serial")
@Test
public void testFilterVertices() throws Exception {
	/*
	 * Test filterOnVertices:
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Edge<Long, Long>> data = graph.filterOnVertices(new FilterFunction<Vertex<Long, Long>>() {
		public boolean filter(Vertex<Long, Long> vertex) throws Exception {
			return (vertex.getValue() > 2);
		}
	}).getEdges();

	List<Edge<Long, Long>> result = data.collect();

	expectedResult = "3,4,34\n" +
		"3,5,35\n" +
		"4,5,45\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #3
Source Project: Flink-CEPplus   Author: ljygz   File: GraphOperationsITCase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("serial")
@Test
public void testFilterEdges() throws Exception {
	/*
	 * Test filterOnEdges:
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Edge<Long, Long>> data = graph.filterOnEdges(new FilterFunction<Edge<Long, Long>>() {
		public boolean filter(Edge<Long, Long> edge) throws Exception {
			return (edge.getValue() > 34);
		}
	}).getEdges();

	List<Edge<Long, Long>> result = data.collect();

	expectedResult = "3,5,35\n" +
		"4,5,45\n" +
		"5,1,51\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #4
Source Project: Flink-CEPplus   Author: ljygz   File: SlotAllocationTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInheritOverride() {
	// verify that we can explicitly disable inheritance of the input slot sharing groups

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	FilterFunction<Long> dummyFilter = new FilterFunction<Long>() {
		@Override
		public boolean filter(Long value) {
			return false;
		}
	};

	DataStream<Long> src1 = env.generateSequence(1, 10).slotSharingGroup("group-1");
	DataStream<Long> src2 = env.generateSequence(1, 10).slotSharingGroup("group-1");

	// this should not inherit group but be in "default"
	src1.union(src2).filter(dummyFilter).slotSharingGroup("default");
	JobGraph jobGraph = env.getStreamGraph().getJobGraph();

	List<JobVertex> vertices = jobGraph.getVerticesSortedTopologicallyFromSources();

	assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(1).getSlotSharingGroup());
	assertNotEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup());
	assertNotEquals(vertices.get(1).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup());
}
 
Example #5
Source Project: flink-learning   Author: zhisheng17   File: HBaseReadMain.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.createInput(new TableInputFormat<Tuple2<String, String>>() {

        private Tuple2<String, String> reuse = new Tuple2<String, String>();

        @Override
        protected Scan getScanner() {
            Scan scan = new Scan();
            scan.addColumn(INFO, BAR);
            return scan;
        }

        @Override
        protected String getTableName() {
            return HBASE_TABLE_NAME;
        }

        @Override
        protected Tuple2<String, String> mapResultToTuple(Result result) {
            String key = Bytes.toString(result.getRow());
            String val = Bytes.toString(result.getValue(INFO, BAR));
            reuse.setField(key, 0);
            reuse.setField(val, 1);
            return reuse;
        }
    }).filter(new FilterFunction<Tuple2<String, String>>() {
        @Override
        public boolean filter(Tuple2<String, String> value) throws Exception {
            return value.f1.startsWith("zhisheng");
        }
    }).print();
}
 
Example #6
Source Project: flink   Author: flink-tpc-ds   File: NamesTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testDefaultName() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<String> strs = env.fromCollection(Arrays.asList("a", "b"));

	// WARNING: The test will fail if this line is being moved down in the file (the line-number is hard-coded)
	strs.filter(new FilterFunction<String>() {
		private static final long serialVersionUID = 1L;

		@Override
		public boolean filter(String value) throws Exception {
			return value.equals("a");
		}
	}).output(new DiscardingOutputFormat<String>());
	Plan plan = env.createProgramPlan();
	testForName("Filter at testDefaultName(NamesTest.java:55)", plan);
}
 
Example #7
Source Project: flink   Author: flink-tpc-ds   File: GraphOperationsITCase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("serial")
@Test
public void testFilterVertices() throws Exception {
	/*
	 * Test filterOnVertices:
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Edge<Long, Long>> data = graph.filterOnVertices(new FilterFunction<Vertex<Long, Long>>() {
		public boolean filter(Vertex<Long, Long> vertex) throws Exception {
			return (vertex.getValue() > 2);
		}
	}).getEdges();

	List<Edge<Long, Long>> result = data.collect();

	expectedResult = "3,4,34\n" +
		"3,5,35\n" +
		"4,5,45\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #8
Source Project: flink   Author: flink-tpc-ds   File: GraphOperationsITCase.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("serial")
@Test
public void testFilterEdges() throws Exception {
	/*
	 * Test filterOnEdges:
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Edge<Long, Long>> data = graph.filterOnEdges(new FilterFunction<Edge<Long, Long>>() {
		public boolean filter(Edge<Long, Long> edge) throws Exception {
			return (edge.getValue() > 34);
		}
	}).getEdges();

	List<Edge<Long, Long>> result = data.collect();

	expectedResult = "3,5,35\n" +
		"4,5,45\n" +
		"5,1,51\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #9
Source Project: flink   Author: flink-tpc-ds   File: SlotAllocationTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInheritOverride() {
	// verify that we can explicitly disable inheritance of the input slot sharing groups

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	FilterFunction<Long> dummyFilter = new FilterFunction<Long>() {
		@Override
		public boolean filter(Long value) {
			return false;
		}
	};

	DataStream<Long> src1 = env.generateSequence(1, 10).slotSharingGroup("group-1");
	DataStream<Long> src2 = env.generateSequence(1, 10).slotSharingGroup("group-1");

	// this should not inherit group but be in "default"
	src1.union(src2).filter(dummyFilter).slotSharingGroup("default");
	JobGraph jobGraph = env.getStreamGraph().getJobGraph();

	List<JobVertex> vertices = jobGraph.getVerticesSortedTopologicallyFromSources();

	assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(1).getSlotSharingGroup());
	assertNotEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup());
	assertNotEquals(vertices.get(1).getSlotSharingGroup(), vertices.get(2).getSlotSharingGroup());
}
 
Example #10
Source Project: bravo   Author: king   File: RocksDBCheckpointIterator.java    License: Apache License 2.0 6 votes vote down vote up
public RocksDBCheckpointIterator(IncrementalKeyedStateHandle handle, FilterFunction<String> stateFilter,
		String localPath) {
	this.localPath = localPath;
	this.cancelStreamRegistry = new CloseableRegistry();
	List<StateMetaInfoSnapshot> stateMetaInfoSnapshots = StateMetadataUtils
			.getKeyedBackendSerializationProxy(handle.getMetaStateHandle()).getStateMetaInfoSnapshots();

	stateColumnFamilyHandles = new ArrayList<>(stateMetaInfoSnapshots.size() + 1);
	List<ColumnFamilyDescriptor> stateColumnFamilyDescriptors = createAndRegisterColumnFamilyDescriptors(
			stateMetaInfoSnapshots);
	try {
		transferAllStateDataToDirectory(handle, new Path(localPath));
		this.db = openDB(localPath, stateColumnFamilyDescriptors, stateColumnFamilyHandles);
		createColumnIterators(stateFilter, stateMetaInfoSnapshots);
	} catch (Exception e) {
		throw new IllegalStateException(e);
	}
}
 
Example #11
Source Project: Alink   Author: alibaba   File: MemoryDataBridge.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List <Row> read(FilterFunction <Row> filter) {
	if (filter == null) {
		return rows;
	}

	return rows.stream()
		.filter(x -> {
			try {
				return filter.filter(x);
			} catch (Exception e) {
				throw new RuntimeException(e);
			}
		})
		.collect(Collectors.toList());
}
 
Example #12
Source Project: blog_demos   Author: zq2599   File: GenerateSequence.java    License: Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    //并行度为1
    env.setParallelism(1);

    //通过generateSequence得到Long类型的DataSource
    DataStream<Long> dataStream = env.generateSequence(1, 10);

    //做一次过滤,只保留偶数,然后打印
    dataStream.filter(new FilterFunction<Long>() {
        @Override
        public boolean filter(Long aLong) throws Exception {
            return 0L==aLong.longValue()%2L;
        }
    }).print();


    env.execute("API DataSource demo : collection");
}
 
Example #13
Source Project: Flink-CEPplus   Author: ljygz   File: OverwriteObjects.java    License: Apache License 2.0 5 votes vote down vote up
private DataSet<Tuple2<IntValue, IntValue>> getFilteredDataSet(ExecutionEnvironment env) {
	return getDataSet(env)
		.filter(new FilterFunction<Tuple2<IntValue, IntValue>>() {
			@Override
			public boolean filter(Tuple2<IntValue, IntValue> value) throws Exception {
				return (value.f0.getValue() % 2) == 0;
			}
		});
}
 
Example #14
Source Project: Flink-CEPplus   Author: ljygz   File: FilterOperator.java    License: Apache License 2.0 5 votes vote down vote up
public FilterOperator(DataSet<T> input, FilterFunction<T> function, String defaultName) {
	super(input, input.getType());

	this.function = function;
	this.defaultName = defaultName;

	UdfOperatorUtils.analyzeSingleInputUdf(this, FilterFunction.class, defaultName, function, null);
}
 
Example #15
Source Project: Flink-CEPplus   Author: ljygz   File: NamesTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGivenName() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<String> strs = env.fromCollection(Arrays.asList("a", "b"));
	strs.filter(new FilterFunction<String>() {
		private static final long serialVersionUID = 1L;
		@Override
		public boolean filter(String value) throws Exception {
			return value.equals("a");
		}
	}).name("GivenName").output(new DiscardingOutputFormat<String>());
	Plan plan = env.createProgramPlan();
	testForName("GivenName", plan);
}
 
Example #16
Source Project: Flink-CEPplus   Author: ljygz   File: UdfAnalyzerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFilterModificationException1() {
	try {
		final UdfAnalyzer ua = new UdfAnalyzer(FilterFunction.class, FilterMod1.class, "operator",
			STRING_STRING_TUPLE2_TYPE_INFO, null, null, null, null, true);
		ua.analyze();
		Assert.fail();
	}
	catch (CodeErrorException e) {
		// ok
	}
}
 
Example #17
Source Project: Flink-CEPplus   Author: ljygz   File: UdfAnalyzerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFilterModificationException2() {
	try {
		final UdfAnalyzer ua = new UdfAnalyzer(FilterFunction.class, FilterMod2.class, "operator",
			STRING_STRING_TUPLE2_TYPE_INFO, null, null, null, null, true);
		ua.analyze();
		Assert.fail();
	}
	catch (CodeErrorException e) {
		// ok
	}
}
 
Example #18
Source Project: Flink-CEPplus   Author: ljygz   File: CollectionExecutionIterationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBulkIterationWithTerminationCriterion() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);

		DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());

		DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
			public boolean filter(Integer value) {
				return value < 50;
			}
		});

		List<Integer> collected = new ArrayList<Integer>();

		iteration.closeWith(iterationResult, terminationCriterion)
				.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #19
Source Project: Flink-CEPplus   Author: ljygz   File: IncrementalSSSP.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Function that verifies whether the edge to be removed is part of the SSSP or not.
 * If it is, the src vertex will be invalidated.
 *
 * @param edgeToBeRemoved
 * @param edgesInSSSP
 * @return true or false
 */
public static boolean isInSSSP(final Edge<Long, Double> edgeToBeRemoved, DataSet<Edge<Long, Double>> edgesInSSSP) throws Exception {

	return edgesInSSSP.filter(new FilterFunction<Edge<Long, Double>>() {
		@Override
		public boolean filter(Edge<Long, Double> edge) throws Exception {
			return edge.equals(edgeToBeRemoved);
		}
	}).count() > 0;
}
 
Example #20
Source Project: Flink-CEPplus   Author: ljygz   File: Graph.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Apply filtering functions to the graph and return a sub-graph that
 * satisfies the predicates for both vertices and edges.
 *
 * @param vertexFilter the filter function for vertices.
 * @param edgeFilter the filter function for edges.
 * @return the resulting sub-graph.
 */
public Graph<K, VV, EV> subgraph(FilterFunction<Vertex<K, VV>> vertexFilter, FilterFunction<Edge<K, EV>> edgeFilter) {

	DataSet<Vertex<K, VV>> filteredVertices = this.vertices.filter(vertexFilter);

	DataSet<Edge<K, EV>> remainingEdges = this.edges.join(filteredVertices)
			.where(0).equalTo(0).with(new ProjectEdge<>())
			.join(filteredVertices).where(1).equalTo(0)
			.with(new ProjectEdge<>()).name("Subgraph");

	DataSet<Edge<K, EV>> filteredEdges = remainingEdges.filter(edgeFilter);

	return new Graph<>(filteredVertices, filteredEdges, this.context);
}
 
Example #21
Source Project: Flink-CEPplus   Author: ljygz   File: Graph.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Apply a filtering function to the graph and return a sub-graph that
 * satisfies the predicates only for the vertices.
 *
 * @param vertexFilter the filter function for vertices.
 * @return the resulting sub-graph.
 */
public Graph<K, VV, EV> filterOnVertices(FilterFunction<Vertex<K, VV>> vertexFilter) {

	DataSet<Vertex<K, VV>> filteredVertices = this.vertices.filter(vertexFilter);

	DataSet<Edge<K, EV>> remainingEdges = this.edges.join(filteredVertices)
			.where(0).equalTo(0).with(new ProjectEdge<>())
			.join(filteredVertices).where(1).equalTo(0)
			.with(new ProjectEdge<>()).name("Filter on vertices");

	return new Graph<>(filteredVertices, remainingEdges, this.context);
}
 
Example #22
Source Project: Flink-CEPplus   Author: ljygz   File: GraphOperationsITCase.java    License: Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("serial")
@Test
public void testSubGraph() throws Exception {
	/*
	 * Test subgraph:
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Edge<Long, Long>> data = graph.subgraph(
		new FilterFunction<Vertex<Long, Long>>() {
			public boolean filter(Vertex<Long, Long> vertex) throws Exception {
				return (vertex.getValue() > 2);
			}
		},
		new FilterFunction<Edge<Long, Long>>() {
			public boolean filter(Edge<Long, Long> edge) throws Exception {
				return (edge.getValue() > 34);
			}
		}).getEdges();

	List<Edge<Long, Long>> result = data.collect();

	expectedResult = "3,5,35\n" +
		"4,5,45\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #23
Source Project: Flink-CEPplus   Author: ljygz   File: FilterWithMethodReference.java    License: Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<String> input = env.fromElements("Please filter", "the words", "but not this");

	FilterFunction<String> filter = WordFilter::filter;

	DataSet<String> output = input.filter(filter);
	output.print();

	env.execute();
}
 
Example #24
Source Project: Flink-CEPplus   Author: ljygz   File: SlotAllocationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testUnion() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	FilterFunction<Long> dummyFilter = new FilterFunction<Long>() {
		@Override
		public boolean filter(Long value) {
			return false;
		}
	};

	DataStream<Long> src1 = env.generateSequence(1, 10);
	DataStream<Long> src2 = env.generateSequence(1, 10).slotSharingGroup("src-1");

	// this should not inherit group "src-1"
	src1.union(src2).filter(dummyFilter);

	DataStream<Long> src3 = env.generateSequence(1, 10).slotSharingGroup("group-1");
	DataStream<Long> src4 = env.generateSequence(1, 10).slotSharingGroup("group-1");

	// this should inherit "group-1" now
	src3.union(src4).filter(dummyFilter);

	JobGraph jobGraph = env.getStreamGraph().getJobGraph();

	List<JobVertex> vertices = jobGraph.getVerticesSortedTopologicallyFromSources();

	// first pipeline
	assertEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup());
	assertNotEquals(vertices.get(0).getSlotSharingGroup(), vertices.get(1).getSlotSharingGroup());
	assertNotEquals(vertices.get(1).getSlotSharingGroup(), vertices.get(4).getSlotSharingGroup());

	// second pipeline
	assertEquals(vertices.get(2).getSlotSharingGroup(), vertices.get(3).getSlotSharingGroup());
	assertEquals(vertices.get(2).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup());
	assertEquals(vertices.get(3).getSlotSharingGroup(), vertices.get(5).getSlotSharingGroup());
}
 
Example #25
Source Project: Flink-CEPplus   Author: ljygz   File: TaskCheckpointingBehaviourTest.java    License: Apache License 2.0 5 votes vote down vote up
public FilterOperator() {
	super(new FilterFunction<Object>() {
		@Override
		public boolean filter(Object value) {
			return false;
		}
	});
}
 
Example #26
Source Project: Flink-CEPplus   Author: ljygz   File: TaskCheckpointingBehaviourTest.java    License: Apache License 2.0 5 votes vote down vote up
public TestOperator() {
	super(new FilterFunction<Object>() {
		@Override
		public boolean filter(Object value) {
			return false;
		}
	});
}
 
Example #27
Source Project: flink   Author: flink-tpc-ds   File: OverwriteObjects.java    License: Apache License 2.0 5 votes vote down vote up
private DataSet<Tuple2<IntValue, IntValue>> getFilteredDataSet(ExecutionEnvironment env) {
	return getDataSet(env)
		.filter(new FilterFunction<Tuple2<IntValue, IntValue>>() {
			@Override
			public boolean filter(Tuple2<IntValue, IntValue> value) throws Exception {
				return (value.f0.getValue() % 2) == 0;
			}
		});
}
 
Example #28
Source Project: incubator-samoa   Author: apache   File: Utils.java    License: Apache License 2.0 5 votes vote down vote up
public static FilterFunction<SamoaType> getFilter(final String streamID) {
	return new FilterFunction<SamoaType>() {
		@Override
		public boolean filter(SamoaType o) throws Exception {
			return o.f2.equals(streamID);
		}
	};
}
 
Example #29
Source Project: flink   Author: flink-tpc-ds   File: NamesTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGivenName() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<String> strs = env.fromCollection(Arrays.asList("a", "b"));
	strs.filter(new FilterFunction<String>() {
		private static final long serialVersionUID = 1L;
		@Override
		public boolean filter(String value) throws Exception {
			return value.equals("a");
		}
	}).name("GivenName").output(new DiscardingOutputFormat<String>());
	Plan plan = env.createProgramPlan();
	testForName("GivenName", plan);
}
 
Example #30
Source Project: flink   Author: flink-tpc-ds   File: CollectionExecutionIterationTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBulkIterationWithTerminationCriterion() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);

		DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());

		DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
			public boolean filter(Integer value) {
				return value < 50;
			}
		});

		List<Integer> collected = new ArrayList<Integer>();

		iteration.closeWith(iterationResult, terminationCriterion)
				.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}