Java Code Examples for org.apache.flink.api.java.ExecutionEnvironment#createCollectionsEnvironment()

The following examples show how to use org.apache.flink.api.java.ExecutionEnvironment#createCollectionsEnvironment() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CollectionModeSuperstepITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testProgram() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongVertices(),
			TestGraphUtils.getLongLongEdges(), env).mapVertices(new AssignOneMapper());

	Graph<Long, Long, Long> result = graph.runScatterGatherIteration(
			new MessageFunction(), new UpdateFunction(), 10);

	result.getVertices().map(
		new VertexToTuple2Map<>()).output(
			new DiscardingOutputFormat<>());

	env.execute();
}
 
Example 2
Source File: TranslateTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() {
	ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

	int count = 10;

	List<Vertex<LongValue, LongValue>> vertexList = new LinkedList<>();
	List<Edge<LongValue, LongValue>> edgeList = new LinkedList<>();

	for (long l = 0; l < count; l++) {
		LongValue lv0 = new LongValue(l);
		LongValue lv1 = new LongValue(l + 1);
		LongValue lv2 = new LongValue(l + 2);
		vertexList.add(new Vertex<>(lv0, lv1));
		edgeList.add(new Edge<>(lv0, lv1, lv2));
	}

	graph = Graph.fromCollection(vertexList, edgeList, env);
}
 
Example 3
Source File: CollectionExecutionIterationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testBulkIteration() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);

		DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));

		List<Integer> collected = new ArrayList<Integer>();
		result.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 4
Source File: WordCountTable.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
	BatchTableEnvironment tEnv = BatchTableEnvironment.create(env);

	DataSet<WC> input = env.fromElements(
			new WC("Hello", 1),
			new WC("Ciao", 1),
			new WC("Hello", 1));

	Table table = tEnv.fromDataSet(input);

	Table filtered = table
			.groupBy("word")
			.select("word, frequency.sum as frequency")
			.filter("frequency = 2");

	DataSet<WC> result = tEnv.toDataSet(filtered, WC.class);

	result.print();
}
 
Example 5
Source File: CollectionModeSuperstepITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testProgram() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongVertices(),
			TestGraphUtils.getLongLongEdges(), env).mapVertices(new AssignOneMapper());

	Graph<Long, Long, Long> result = graph.runScatterGatherIteration(
			new MessageFunction(), new UpdateFunction(), 10);

	result.getVertices().map(
		new VertexToTuple2Map<>()).output(
			new DiscardingOutputFormat<>());

	env.execute();
}
 
Example 6
Source File: TranslateTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() {
	ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

	int count = 10;

	List<Vertex<LongValue, LongValue>> vertexList = new LinkedList<>();
	List<Edge<LongValue, LongValue>> edgeList = new LinkedList<>();

	for (long l = 0; l < count; l++) {
		LongValue lv0 = new LongValue(l);
		LongValue lv1 = new LongValue(l + 1);
		LongValue lv2 = new LongValue(l + 2);
		vertexList.add(new Vertex<>(lv0, lv1));
		edgeList.add(new Edge<>(lv0, lv1, lv2));
	}

	graph = Graph.fromCollection(vertexList, edgeList, env);
}
 
Example 7
Source File: CollectionExecutionIterationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testBulkIteration() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);

		DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));

		List<Integer> collected = new ArrayList<Integer>();
		result.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 8
Source File: CollectionModeSuperstepITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testProgram() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongVertices(),
			TestGraphUtils.getLongLongEdges(), env).mapVertices(new AssignOneMapper());

	Graph<Long, Long, Long> result = graph.runScatterGatherIteration(
			new MessageFunction(), new UpdateFunction(), 10);

	result.getVertices().map(
		new VertexToTuple2Map<>()).output(
			new DiscardingOutputFormat<>());

	env.execute();
}
 
Example 9
Source File: BipartiteGraphTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private BipartiteGraph<Integer, Integer, String, String, String> createBipartiteGraph() {
	ExecutionEnvironment executionEnvironment = ExecutionEnvironment.createCollectionsEnvironment();

	DataSet<Vertex<Integer, String>> topVertices = executionEnvironment.fromCollection(Arrays.asList(
		new Vertex<>(4, "top4"),
		new Vertex<>(5, "top5"),
		new Vertex<>(6, "top6")
	));

	DataSet<Vertex<Integer, String>> bottomVertices = executionEnvironment.fromCollection(Arrays.asList(
		new Vertex<>(1, "bottom1"),
		new Vertex<>(2, "bottom2"),
		new Vertex<>(3, "bottom3")
	));

	DataSet<BipartiteEdge<Integer, Integer, String>> edges = executionEnvironment.fromCollection(Arrays.asList(
		new BipartiteEdge<>(4, 1, "4-1"),
		new BipartiteEdge<>(5, 1, "5-1"),
		new BipartiteEdge<>(5, 2, "5-2"),
		new BipartiteEdge<>(6, 2, "6-2"),
		new BipartiteEdge<>(6, 3, "6-3")
	));

	return BipartiteGraph.fromDataSet(topVertices, bottomVertices, edges, executionEnvironment);
}
 
Example 10
Source File: CollectionExecutionWithBroadcastVariableTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnaryOp() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		DataSet<String> bcData = env.fromElements(SUFFIX);

		List<String> result = new ArrayList<String>();

		env.fromElements(TEST_DATA)
				.map(new SuffixAppender()).withBroadcastSet(bcData, BC_VAR_NAME)
				.output(new LocalCollectionOutputFormat<String>(result));

		env.execute();

		assertEquals(TEST_DATA.length, result.size());
		for (String s : result) {
			assertTrue(s.indexOf(SUFFIX) > 0);
		}
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 11
Source File: CollectionExecutionWithBroadcastVariableTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnaryOp() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		DataSet<String> bcData = env.fromElements(SUFFIX);

		List<String> result = new ArrayList<String>();

		env.fromElements(TEST_DATA)
				.map(new SuffixAppender()).withBroadcastSet(bcData, BC_VAR_NAME)
				.output(new LocalCollectionOutputFormat<String>(result));

		env.execute();

		assertEquals(TEST_DATA.length, result.size());
		for (String s : result) {
			assertTrue(s.indexOf(SUFFIX) > 0);
		}
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 12
Source File: CollectionExecutionIterationTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testBulkIteration() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);

		DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));

		List<Integer> collected = new ArrayList<Integer>();
		result.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 13
Source File: BipartiteGraphTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private BipartiteGraph<Integer, Integer, String, String, String> createBipartiteGraph() {
	ExecutionEnvironment executionEnvironment = ExecutionEnvironment.createCollectionsEnvironment();

	DataSet<Vertex<Integer, String>> topVertices = executionEnvironment.fromCollection(Arrays.asList(
		new Vertex<>(4, "top4"),
		new Vertex<>(5, "top5"),
		new Vertex<>(6, "top6")
	));

	DataSet<Vertex<Integer, String>> bottomVertices = executionEnvironment.fromCollection(Arrays.asList(
		new Vertex<>(1, "bottom1"),
		new Vertex<>(2, "bottom2"),
		new Vertex<>(3, "bottom3")
	));

	DataSet<BipartiteEdge<Integer, Integer, String>> edges = executionEnvironment.fromCollection(Arrays.asList(
		new BipartiteEdge<>(4, 1, "4-1"),
		new BipartiteEdge<>(5, 1, "5-1"),
		new BipartiteEdge<>(5, 2, "5-2"),
		new BipartiteEdge<>(6, 2, "6-2"),
		new BipartiteEdge<>(6, 3, "6-3")
	));

	return BipartiteGraph.fromDataSet(topVertices, bottomVertices, edges, executionEnvironment);
}
 
Example 14
Source File: CollectionExecutionIterationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testBulkIterationWithTerminationCriterion() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);

		DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());

		DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
			public boolean filter(Integer value) {
				return value < 50;
			}
		});

		List<Integer> collected = new ArrayList<Integer>();

		iteration.closeWith(iterationResult, terminationCriterion)
				.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 15
Source File: FlinkCollectionsEnvBenchMark.java    From marble with Apache License 2.0 5 votes vote down vote up
public double runSqlForJoin(int limit, String sql) throws Throwable {
  Stopwatch s = Stopwatch.createStarted();
  try (Connection connection = BenchMarkUtil.getDBConnection()) {
    ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
    BatchTableEnvironment tEnv = TableEnvironment.getTableEnvironment(env);
    String fetchSql1 = BenchMarkUtil.generateFetchSql("item1", "i_item_sk", limit);
    ResultSet resultSet1 = connection
        .createStatement()
        .executeQuery(fetchSql1);
    RowTypeInfo rowTypeInfo1 = typeOfJdbc(resultSet1.getMetaData());
    DataSet ds1 = env.createInput(
        JDBCInputFormat.buildJDBCInputFormat()
            .setDrivername(BenchMarkUtil.DB_DRIVER)
            .setDBUrl(BenchMarkUtil.DB_CONNECTION_URL)
            .setQuery(fetchSql1)
            .setRowTypeInfo(rowTypeInfo1)
            .finish()
    );
    ds1.collect();
    tEnv.registerDataSet("item1", ds1);

    String fetchSql2 = BenchMarkUtil.generateFetchSql("item2", "i_item_sk", limit);
    ResultSet resultSet2 = connection
        .createStatement()
        .executeQuery(fetchSql2);
    RowTypeInfo rowTypeInfo2 = typeOfJdbc(resultSet2.getMetaData());
    DataSet ds2 = env.createInput(
        JDBCInputFormat.buildJDBCInputFormat()
            .setDrivername(BenchMarkUtil.DB_DRIVER)
            .setDBUrl(BenchMarkUtil.DB_CONNECTION_URL)
            .setQuery(fetchSql2)
            .setRowTypeInfo(rowTypeInfo2)
            .finish()
    );
    ds2.collect();
    tEnv.registerDataSet("item2", ds2);
    s.stop();
    return s.elapsed(TimeUnit.MICROSECONDS) * 0.001 + sqlQuery(tEnv, sql);
  }
}
 
Example 16
Source File: CollectionExecutionIterationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testBulkIterationWithTerminationCriterion() {
	try {
		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

		IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);

		DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());

		DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
			public boolean filter(Integer value) {
				return value < 50;
			}
		});

		List<Integer> collected = new ArrayList<Integer>();

		iteration.closeWith(iterationResult, terminationCriterion)
				.output(new LocalCollectionOutputFormat<Integer>(collected));

		env.execute();

		assertEquals(1, collected.size());
		assertEquals(56, collected.get(0).intValue());
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example 17
Source File: GraphGeneratorTestBase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Before
public void setup() {
	env = ExecutionEnvironment.createCollectionsEnvironment();
}
 
Example 18
Source File: AsmTestBase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Before
public void setup() throws Exception {
	env = ExecutionEnvironment.createCollectionsEnvironment();
	env.getConfig().enableObjectReuse();

	// a "fish" graph
	Object[][] edges = new Object[][]{
		new Object[]{0, 1},
		new Object[]{0, 2},
		new Object[]{2, 1},
		new Object[]{2, 3},
		new Object[]{3, 1},
		new Object[]{3, 4},
		new Object[]{5, 3},
	};

	List<Edge<IntValue, NullValue>> directedEdgeList = new LinkedList<>();

	for (Object[] edge : edges) {
		directedEdgeList.add(new Edge<>(new IntValue((int) edge[0]), new IntValue((int) edge[1]), NullValue.getInstance()));
	}

	directedSimpleGraph = Graph.fromCollection(directedEdgeList, env);
	undirectedSimpleGraph = directedSimpleGraph
		.getUndirected();

	// complete graph
	completeGraph = new CompleteGraph(env, completeGraphVertexCount)
		.generate();

	// empty graph with vertices but no edges
	emptyGraphWithVertices = new EmptyGraph(env, emptyGraphVertexCount)
		.generate();

	// empty graph with no vertices or edges
	emptyGraphWithoutVertices = new EmptyGraph(env, 0)
		.generate();

	// star graph
	starGraph = new StarGraph(env, starGraphVertexCount)
		.generate();
}
 
Example 19
Source File: CountTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Before
public void setup() throws Exception {
	env = ExecutionEnvironment.createCollectionsEnvironment();
	env.getConfig().enableObjectReuse();
}
 
Example 20
Source File: OperatorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
public MockOperator() {
	super(ExecutionEnvironment.createCollectionsEnvironment(), ValueTypeInfo.NULL_VALUE_TYPE_INFO);
}