org.apache.flink.graph.Graph Java Examples

The following examples show how to use org.apache.flink.graph.Graph. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReduceOnNeighborMethodsITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSumOfInNeighborsNoValue() throws Exception {
	/*
	 * Get the sum of in-neighbor values
	 * times the edge weights for each vertex
        */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Tuple2<Long, Long>> verticesWithSum =
		graph.groupReduceOnNeighbors(new SumInNeighborsNoValue(), EdgeDirection.IN);
	List<Tuple2<Long, Long>> result = verticesWithSum.collect();

	expectedResult = "1,255\n" +
		"2,12\n" +
		"3,59\n" +
		"4,102\n" +
		"5,285\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #2
Source File: SummarizationITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithVertexAndEdgeLongValues() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> input = Graph.fromDataSet(
			SummarizationData.getVertices(env),
			SummarizationData.getEdges(env),
			env)
		.run(new TranslateVertexValues<>(new StringToLong()))
		.run(new TranslateEdgeValues<>(new StringToLong()));

	List<Vertex<Long, Summarization.VertexValue<Long>>> summarizedVertices = new ArrayList<>();
	List<Edge<Long, EdgeValue<Long>>> summarizedEdges = new ArrayList<>();

	Graph<Long, Summarization.VertexValue<Long>, EdgeValue<Long>> output =
		input.run(new Summarization<>());

	output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices));
	output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges));

	env.execute();

	validateVertices(SummarizationData.EXPECTED_VERTICES, summarizedVertices);
	validateEdges(SummarizationData.EXPECTED_EDGES_WITH_VALUES, summarizedEdges);
}
 
Example #3
Source File: GlobalClusteringCoefficient.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public GlobalClusteringCoefficient<K, VV, EV> run(Graph<K, VV, EV> input)
		throws Exception {
	super.run(input);

	triangleCount = new Count<>();

	DataSet<TriangleListing.Result<K>> triangles = input
		.run(new TriangleListing<K, VV, EV>()
			.setSortTriangleVertices(false)
			.setParallelism(parallelism));

	triangleCount.run(triangles);

	vertexMetrics = new VertexMetrics<K, VV, EV>()
		.setParallelism(parallelism);

	input.run(vertexMetrics);

	return this;
}
 
Example #4
Source File: GraphCreationWithMapperITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithTuple2ValueMapper() throws Exception {
	/*
	 * Test create() with edge dataset and a mapper that assigns a Tuple2 as value
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	Graph<Long, Tuple2<Long, Long>, Long> graph = Graph.fromDataSet(
		TestGraphUtils.getLongLongEdgeData(env), new AssignTuple2ValueMapper(), env);

	DataSet<Vertex<Long, Tuple2<Long, Long>>> data = graph.getVertices();
	List<Vertex<Long, Tuple2<Long, Long>>> result = data.collect();

	expectedResult = "1,(2,42)\n" +
		"2,(4,42)\n" +
		"3,(6,42)\n" +
		"4,(8,42)\n" +
		"5,(10,42)\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #5
Source File: RMatGraph.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1);

	// Edges
	int cyclesPerEdge = noiseEnabled ? 5 * scale : scale;

	List<BlockInfo<T>> generatorBlocks = randomGenerableFactory
		.getRandomGenerables(edgeCount, cyclesPerEdge);

	DataSet<Edge<LongValue, NullValue>> edges = env
		.fromCollection(generatorBlocks)
			.name("Random generators")
		.rebalance()
			.setParallelism(parallelism)
			.name("Rebalance")
		.flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise))
			.setParallelism(parallelism)
			.name("RMat graph edges");

	// Vertices
	DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism);

	// Graph
	return Graph.fromDataSet(vertices, edges, env);
}
 
Example #6
Source File: EdgeDegreesPair.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public DataSet<Edge<K, Tuple3<EV, Degrees, Degrees>>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// s, t, d(s)
	DataSet<Edge<K, Tuple2<EV, Degrees>>> edgeSourceDegrees = input
		.run(new EdgeSourceDegrees<K, VV, EV>()
			.setParallelism(parallelism));

	// t, d(t)
	DataSet<Vertex<K, Degrees>> vertexDegrees = input
		.run(new VertexDegrees<K, VV, EV>()
			.setParallelism(parallelism));

	// s, t, (d(s), d(t))
	return edgeSourceDegrees
		.join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND)
		.where(1)
		.equalTo(0)
		.with(new JoinEdgeDegreeWithVertexDegree<>())
			.setParallelism(parallelism)
			.name("Edge target degree");
}
 
Example #7
Source File: GraphCreationWithMapperITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithConstantValueMapper() throws Exception {
	/*
	 * Test create() with edge dataset with String key type
	 * and a mapper that assigns a double constant as value
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	Graph<String, Double, Long> graph = Graph.fromDataSet(TestGraphUtils.getStringLongEdgeData(env),
		new AssignDoubleConstantMapper(), env);

	DataSet<Vertex<String, Double>> data = graph.getVertices();
	List<Vertex<String, Double>> result = data.collect();

	expectedResult = "1,0.1\n" +
		"2,0.1\n" +
		"3,0.1\n" +
		"4,0.1\n" +
		"5,0.1\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #8
Source File: ReduceOnNeighborMethodsITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSumOfOutNeighborsNoValue() throws Exception {
	/*
	 * Get the sum of out-neighbor values
	 * for each vertex
        */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Tuple2<Long, Long>> verticesWithSumOfOutNeighborValues =
		graph.reduceOnNeighbors(new SumNeighbors(), EdgeDirection.OUT);
	List<Tuple2<Long, Long>> result = verticesWithSumOfOutNeighborValues.collect();

	expectedResult = "1,5\n" +
		"2,3\n" +
		"3,9\n" +
		"4,5\n" +
		"5,1\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #9
Source File: GraphCreationITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateWithoutVertexValues() throws Exception {
	/*
	 * Test create() with edge dataset and no vertex values
     */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	Graph<Long, NullValue, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Vertex<Long, NullValue>> data = graph.getVertices();
	List<Vertex<Long, NullValue>> result = data.collect();

	expectedResult = "1,(null)\n" +
		"2,(null)\n" +
		"3,(null)\n" +
		"4,(null)\n" +
		"5,(null)\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #10
Source File: DegreesITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testInDegreesWithNoInEdge() throws Exception {
	/*
	 * Test inDegrees() no ingoing edge
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeDataWithZeroDegree(env), env);

	DataSet<Tuple2<Long, LongValue>> data = graph.inDegrees();
	List<Tuple2<Long, LongValue>> result = data.collect();

	expectedResult = "1,0\n" +
		"2,1\n" +
		"3,1\n" +
		"4,1\n" +
		"5,3\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #11
Source File: ReduceOnEdgesWithExceptionITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Test groupReduceOnEdges() with an edge having a trgId that does not exist in the vertex DataSet.
 */
@Test
public void testGroupReduceOnEdgesInvalidEdgeTrgId() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(PARALLELISM);

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
			TestGraphUtils.getLongLongEdgeInvalidTrgData(env), env);

	try {
		DataSet<Tuple2<Long, Long>> verticesWithAllNeighbors =
				graph.groupReduceOnEdges(new SelectNeighborsValueGreaterThanFour(), EdgeDirection.ALL);

		verticesWithAllNeighbors.output(new DiscardingOutputFormat<>());
		env.execute();

		fail("Expected an exception.");
	} catch (Exception e) {
		// We expect the job to fail with an exception
	}
}
 
Example #12
Source File: ReduceOnNeighborsWithExceptionITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Test groupReduceOnNeighbors() -NeighborsFunctionWithVertexValue-
 * with an edge having a srcId that does not exist in the vertex DataSet.
 */
@Test
public void testGroupReduceOnNeighborsWithVVInvalidEdgeSrcId() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(PARALLELISM);
	env.getConfig().disableSysoutLogging();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
			TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env);

	try {
		DataSet<Tuple2<Long, Long>> verticesWithSumOfOutNeighborValues =
				graph.groupReduceOnNeighbors(new SumAllNeighbors(), EdgeDirection.ALL);

		verticesWithSumOfOutNeighborValues.output(new DiscardingOutputFormat<>());
		env.execute();

		fail("Expected an exception.");
	} catch (Exception e) {
		// We expect the job to fail with an exception
	}
}
 
Example #13
Source File: MapVerticesITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithtuple1Value() throws Exception {
	/*
	 * Test mapVertices() and change the value type to a Tuple1
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	DataSet<Vertex<Long, Tuple1<Long>>> mappedVertices = graph.mapVertices(new ToTuple1Mapper()).getVertices();
	List<Vertex<Long, Tuple1<Long>>> result = mappedVertices.collect();

	expectedResult = "1,(1)\n" +
		"2,(2)\n" +
		"3,(3)\n" +
		"4,(4)\n" +
		"5,(5)\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #14
Source File: CompleteGraphTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGraphMetrics() throws Exception {
	int vertexCount = 10;

	Graph<LongValue, NullValue, NullValue> graph = new CompleteGraph(env, vertexCount)
		.generate();

	assertEquals(vertexCount, graph.numberOfVertices());
	assertEquals(vertexCount * (vertexCount - 1), graph.numberOfEdges());

	long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue();
	long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue();
	long maxInDegree = graph.inDegrees().max(1).collect().get(0).f1.getValue();
	long maxOutDegree = graph.outDegrees().max(1).collect().get(0).f1.getValue();

	assertEquals(vertexCount - 1, minInDegree);
	assertEquals(vertexCount - 1, minOutDegree);
	assertEquals(vertexCount - 1, maxInDegree);
	assertEquals(vertexCount - 1, maxOutDegree);
}
 
Example #15
Source File: VertexMetrics.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public VertexMetrics<K, VV, EV> run(Graph<K, VV, EV> input)
		throws Exception {
	super.run(input);

	DataSet<Vertex<K, LongValue>> vertexDegree = input
		.run(new VertexDegree<K, VV, EV>()
			.setIncludeZeroDegreeVertices(includeZeroDegreeVertices)
			.setReduceOnTargetId(reduceOnTargetId)
			.setParallelism(parallelism));

	vertexMetricsHelper = new VertexMetricsHelper<>();

	vertexDegree
		.output(vertexMetricsHelper)
			.name("Vertex metrics");

	return this;
}
 
Example #16
Source File: CommunityDetectionTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSingletonEdgeGraph() throws Exception {
	Graph<LongValue, Long, Double> result = new SingletonEdgeGraph(env, 1)
		.generate()
		.mapVertices(v -> v.getId().getValue(),
			new TypeHint<Vertex<LongValue, Long>>(){}.getTypeInfo())
		.mapEdges(e -> 1.0,
			new TypeHint<Edge<LongValue, Double>>(){}.getTypeInfo())
		.run(new CommunityDetection<>(10, 0.5));

	String expectedResult =
		"(0,0)\n" +
		"(1,1)\n";

	TestBaseUtils.compareResultAsText(result.getVertices().collect(), expectedResult);
}
 
Example #17
Source File: SingletonEdgeGraphTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGraphMetrics() throws Exception {
	int vertexPairCount = 10;

	Graph<LongValue, NullValue, NullValue> graph = new SingletonEdgeGraph(env, vertexPairCount)
		.generate();

	assertEquals(2 * vertexPairCount, graph.numberOfVertices());
	assertEquals(2 * vertexPairCount, graph.numberOfEdges());

	long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue();
	long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue();
	long maxInDegree = graph.inDegrees().max(1).collect().get(0).f1.getValue();
	long maxOutDegree = graph.outDegrees().max(1).collect().get(0).f1.getValue();

	assertEquals(1, minInDegree);
	assertEquals(1, minOutDegree);
	assertEquals(1, maxInDegree);
	assertEquals(1, maxOutDegree);
}
 
Example #18
Source File: ConnectedComponentsWithRandomisedEdgesITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected void testProgram() throws Exception {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	DataSet<Long> vertexIds = env.generateSequence(1, NUM_VERTICES);
	DataSet<String> edgeString = env.fromElements(ConnectedComponentsData.getRandomOddEvenEdges(NUM_EDGES, NUM_VERTICES, SEED).split("\n"));

	DataSet<Edge<Long, NullValue>> edges = edgeString.map(new EdgeParser());

	DataSet<Vertex<Long, Long>> initialVertices = vertexIds.map(new IdAssigner());

	Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);

	DataSet<Vertex<Long, Long>> result = graph.run(new ConnectedComponents<>(100));

	result.writeAsCsv(resultPath, "\n", " ");
	env.execute();
}
 
Example #19
Source File: CommunityDetectionTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithSingletonEdgeGraph() throws Exception {
	Graph<LongValue, Long, Double> result = new SingletonEdgeGraph(env, 1)
		.generate()
		.mapVertices(v -> v.getId().getValue(),
			new TypeHint<Vertex<LongValue, Long>>(){}.getTypeInfo())
		.mapEdges(e -> 1.0,
			new TypeHint<Edge<LongValue, Double>>(){}.getTypeInfo())
		.run(new CommunityDetection<>(10, 0.5));

	String expectedResult =
		"(0,0)\n" +
		"(1,1)\n";

	TestBaseUtils.compareResultAsText(result.getVertices().collect(), expectedResult);
}
 
Example #20
Source File: TriadicCensus.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public TriadicCensus<K, VV, EV> run(Graph<K, VV, EV> input)
		throws Exception {
	super.run(input);

	triangleListingHelper = new TriangleListingHelper<>();

	input
		.run(new TriangleListing<K, VV, EV>()
			.setParallelism(parallelism))
		.output(triangleListingHelper)
			.name("Triangle counts");

	vertexDegreesHelper = new VertexDegreesHelper<>();

	input
		.run(new VertexDegrees<K, VV, EV>()
			.setParallelism(parallelism))
		.output(vertexDegreesHelper)
			.name("Edge and triplet counts");

	return this;
}
 
Example #21
Source File: StarGraph.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	Preconditions.checkState(vertexCount >= 2);

	// Vertices
	DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);

	// Edges
	LongValueSequenceIterator iterator = new LongValueSequenceIterator(1, this.vertexCount - 1);

	DataSet<Edge<LongValue, NullValue>> edges = env
		.fromParallelCollection(iterator, LongValue.class)
			.setParallelism(parallelism)
			.name("Edge iterators")
		.flatMap(new LinkVertexToCenter())
			.setParallelism(parallelism)
			.name("Star graph edges");

	// Graph
	return Graph.fromDataSet(vertices, edges, env);
}
 
Example #22
Source File: SingletonEdgeGraph.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
	Preconditions.checkState(vertexPairCount > 0);

	// Vertices
	long vertexCount = 2 * vertexPairCount;

	DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);

	// Edges
	LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, vertexCount - 1);

	DataSet<Edge<LongValue, NullValue>> edges = env
		.fromParallelCollection(iterator, LongValue.class)
			.setParallelism(parallelism)
			.name("Edge iterators")
		.map(new LinkVertexToSingletonNeighbor())
			.setParallelism(parallelism)
			.name("Complete graph edges");

	// Graph
	return Graph.fromDataSet(vertices, edges, env);
}
 
Example #23
Source File: GraphMutationsITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAddVertexExisting() throws Exception {
	/*
	 * Test addVertex() -- add an existing vertex
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
			TestGraphUtils.getLongLongEdgeData(env), env);

	graph = graph.addVertex(new Vertex<>(1L, 1L));

	DataSet<Vertex<Long, Long>> data = graph.getVertices();
	List<Vertex<Long, Long>> result = data.collect();

	expectedResult = "1,1\n" +
			"2,2\n" +
			"3,3\n" +
			"4,4\n" +
			"5,5\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #24
Source File: JoinWithVerticesITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithDifferentType() throws Exception {
	/*
	 * Test joinWithVertices with the input DataSet passed as a parameter containing
	 * less elements than the vertex DataSet and of a different type(Boolean)
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	Graph<Long, Long, Long> res = graph.joinWithVertices(graph.getVertices().first(3)
		.map(new ProjectIdWithTrue()), new DoubleIfTrueMapper());

	DataSet<Vertex<Long, Long>> data = res.getVertices();
	List<Vertex<Long, Long>> result = data.collect();

	expectedResult = "1,2\n" +
		"2,4\n" +
		"3,6\n" +
		"4,4\n" +
		"5,5\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #25
Source File: ReduceOnEdgesWithExceptionITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Test groupReduceOnEdges() with an edge having a srcId that does not exist in the vertex DataSet.
 */
@Test
public void testGroupReduceOnEdgesInvalidEdgeSrcId() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(PARALLELISM);

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
			TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env);

	try {
		DataSet<Tuple2<Long, Long>> verticesWithAllNeighbors =
				graph.groupReduceOnEdges(new SelectNeighborsValueGreaterThanFour(), EdgeDirection.ALL);

		verticesWithAllNeighbors.output(new DiscardingOutputFormat<>());
		env.execute();

		fail("Expected an exception.");
	} catch (Exception e) {
		// We expect the job to fail with an exception
	}
}
 
Example #26
Source File: DegreesWithExceptionITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Test outDegrees() with an edge having a srcId that does not exist in the vertex DataSet.
 */
@Test
public void testOutDegreesInvalidEdgeSrcId() throws Exception {

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(PARALLELISM);

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
			TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env);

	try {
		graph.outDegrees().output(new DiscardingOutputFormat<>());
		env.execute();

		fail("graph.outDegrees() did not fail.");
	} catch (Exception e) {
		// We expect the job to fail with an exception
	}
}
 
Example #27
Source File: PathGraphTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGraphMetrics() throws Exception {
	int vertexCount = 100;

	Graph<LongValue, NullValue, NullValue> graph = new PathGraph(env, vertexCount)
		.generate();

	assertEquals(vertexCount, graph.numberOfVertices());
	assertEquals(2 * (vertexCount - 1), graph.numberOfEdges());

	long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue();
	long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue();
	long maxInDegree = graph.inDegrees().max(1).collect().get(0).f1.getValue();
	long maxOutDegree = graph.outDegrees().max(1).collect().get(0).f1.getValue();

	assertEquals(1, minInDegree);
	assertEquals(1, minOutDegree);
	assertEquals(2, maxInDegree);
	assertEquals(2, maxOutDegree);
}
 
Example #28
Source File: JoinWithVerticesITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testWithCustomType() throws Exception {
	/*
	 * Test joinWithVertices with a DataSet containing custom parametrised type input values
	 */
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
		TestGraphUtils.getLongLongEdgeData(env), env);

	Graph<Long, Long, Long> res = graph.joinWithVertices(TestGraphUtils.getLongCustomTuple2Data(env),
		new CustomValueMapper());

	DataSet<Vertex<Long, Long>> data = res.getVertices();
	List<Vertex<Long, Long>> result = data.collect();

	expectedResult = "1,10\n" +
		"2,20\n" +
		"3,30\n" +
		"4,40\n" +
		"5,5\n";

	compareResultAsTuples(result, expectedResult);
}
 
Example #29
Source File: Simplify.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public Graph<K, VV, EV> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// Edges
	DataSet<Edge<K, EV>> edges = input
		.getEdges()
		.filter(new RemoveSelfLoops<>())
			.setParallelism(parallelism)
			.name("Remove self-loops")
		.distinct(0, 1)
			.setCombineHint(CombineHint.NONE)
			.setParallelism(parallelism)
			.name("Remove duplicate edges");

	// Graph
	return Graph.fromDataSet(input.getVertices(), edges, input.getContext());
}
 
Example #30
Source File: LabelPropagationITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleIteration() throws Exception {
	/*
	 * Test one iteration of label propagation example with a simple graph
	 */
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	Graph<Long, Long, NullValue> inputGraph = Graph.fromDataSet(
		LabelPropagationData.getDefaultVertexSet(env),
		LabelPropagationData.getDefaultEdgeDataSet(env), env);

	List<Vertex<Long, Long>> result = inputGraph
		.run(new LabelPropagation<>(1))
		.collect();

	expectedResult = LabelPropagationData.LABELS_AFTER_1_ITERATION;
	compareResultAsTuples(result, expectedResult);
}