org.apache.flink.graph.Graph Java Examples
The following examples show how to use
org.apache.flink.graph.Graph.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: flink Author: apache File: ReduceOnEdgesMethodsITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testAllOutNeighborsNoValue() throws Exception { /* * Get the all the out-neighbors for each vertex except for the vertex with id 5. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); DataSet<Tuple2<Long, Long>> verticesWithAllOutNeighbors = graph.groupReduceOnEdges(new SelectOutNeighborsExcludeFive(), EdgeDirection.OUT); List<Tuple2<Long, Long>> result = verticesWithAllOutNeighbors.collect(); expectedResult = "1,2\n" + "1,3\n" + "2,3\n" + "3,4\n" + "3,5\n" + "4,5"; compareResultAsTuples(result, expectedResult); }
Example #2
Source Project: flink Author: apache File: CommunityDetectionTest.java License: Apache License 2.0 | 6 votes |
@Test public void testWithSingletonEdgeGraph() throws Exception { Graph<LongValue, Long, Double> result = new SingletonEdgeGraph(env, 1) .generate() .mapVertices(v -> v.getId().getValue(), new TypeHint<Vertex<LongValue, Long>>(){}.getTypeInfo()) .mapEdges(e -> 1.0, new TypeHint<Edge<LongValue, Double>>(){}.getTypeInfo()) .run(new CommunityDetection<>(10, 0.5)); String expectedResult = "(0,0)\n" + "(1,1)\n"; TestBaseUtils.compareResultAsText(result.getVertices().collect(), expectedResult); }
Example #3
Source Project: flink Author: apache File: GSAConnectedComponents.java License: Apache License 2.0 | 6 votes |
@Override public DataSet<Vertex<K, VV>> run(Graph<K, VV, EV> graph) throws Exception { // get type information for vertex value TypeInformation<VV> valueTypeInfo = ((TupleTypeInfo<?>) graph.getVertices().getType()).getTypeAt(1); Graph<K, VV, NullValue> undirectedGraph = graph .mapEdges(new MapTo<>(NullValue.getInstance())) .getUndirected(); return undirectedGraph.runGatherSumApplyIteration( new GatherNeighborIds<>(valueTypeInfo), new SelectMinId<>(valueTypeInfo), new UpdateComponentId<>(valueTypeInfo), maxIterations).getVertices(); }
Example #4
Source Project: flink Author: apache File: RMatGraph.java License: Apache License 2.0 | 6 votes |
@Override public Graph<LongValue, NullValue, NullValue> generate() { int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1); // Edges int cyclesPerEdge = noiseEnabled ? 5 * scale : scale; List<BlockInfo<T>> generatorBlocks = randomGenerableFactory .getRandomGenerables(edgeCount, cyclesPerEdge); DataSet<Edge<LongValue, NullValue>> edges = env .fromCollection(generatorBlocks) .name("Random generators") .rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise)) .setParallelism(parallelism) .name("RMat graph edges"); // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism); // Graph return Graph.fromDataSet(vertices, edges, env); }
Example #5
Source Project: flink Author: flink-tpc-ds File: GlobalClusteringCoefficient.java License: Apache License 2.0 | 6 votes |
@Override public GlobalClusteringCoefficient<K, VV, EV> run(Graph<K, VV, EV> input) throws Exception { super.run(input); triangleCount = new Count<>(); DataSet<TriangleListing.Result<K>> triangles = input .run(new TriangleListing<K, VV, EV>() .setSortTriangleVertices(false) .setParallelism(parallelism)); triangleCount.run(triangles); vertexMetrics = new VertexMetrics<K, VV, EV>() .setParallelism(parallelism); input.run(vertexMetrics); return this; }
Example #6
Source Project: flink Author: apache File: GraphCreationITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testCreateWithoutVertexValues() throws Exception { /* * Test create() with edge dataset and no vertex values */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, NullValue, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongEdgeData(env), env); DataSet<Vertex<Long, NullValue>> data = graph.getVertices(); List<Vertex<Long, NullValue>> result = data.collect(); expectedResult = "1,(null)\n" + "2,(null)\n" + "3,(null)\n" + "4,(null)\n" + "5,(null)\n"; compareResultAsTuples(result, expectedResult); }
Example #7
Source Project: Flink-CEPplus Author: ljygz File: EdgeDegreesPair.java License: Apache License 2.0 | 6 votes |
@Override public DataSet<Edge<K, Tuple3<EV, Degrees, Degrees>>> runInternal(Graph<K, VV, EV> input) throws Exception { // s, t, d(s) DataSet<Edge<K, Tuple2<EV, Degrees>>> edgeSourceDegrees = input .run(new EdgeSourceDegrees<K, VV, EV>() .setParallelism(parallelism)); // t, d(t) DataSet<Vertex<K, Degrees>> vertexDegrees = input .run(new VertexDegrees<K, VV, EV>() .setParallelism(parallelism)); // s, t, (d(s), d(t)) return edgeSourceDegrees .join(vertexDegrees, JoinHint.REPARTITION_HASH_SECOND) .where(1) .equalTo(0) .with(new JoinEdgeDegreeWithVertexDegree<>()) .setParallelism(parallelism) .name("Edge target degree"); }
Example #8
Source Project: Flink-CEPplus Author: ljygz File: GraphCreationWithMapperITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testWithConstantValueMapper() throws Exception { /* * Test create() with edge dataset with String key type * and a mapper that assigns a double constant as value */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<String, Double, Long> graph = Graph.fromDataSet(TestGraphUtils.getStringLongEdgeData(env), new AssignDoubleConstantMapper(), env); DataSet<Vertex<String, Double>> data = graph.getVertices(); List<Vertex<String, Double>> result = data.collect(); expectedResult = "1,0.1\n" + "2,0.1\n" + "3,0.1\n" + "4,0.1\n" + "5,0.1\n"; compareResultAsTuples(result, expectedResult); }
Example #9
Source Project: flink Author: flink-tpc-ds File: FromCollectionITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testFromCollectionEdgesWithInitialValue() throws Exception { /* * Test fromCollection(edges) with vertices initialised by a * function that takes the id and doubles it */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongEdges(), new InitVerticesMapper(), env); DataSet<Vertex<Long, Long>> data = graph.getVertices(); List<Vertex<Long, Long>> result = data.collect(); expectedResult = "1,2\n" + "2,4\n" + "3,6\n" + "4,8\n" + "5,10\n"; compareResultAsTuples(result, expectedResult); }
Example #10
Source Project: flink Author: flink-tpc-ds File: ReduceOnNeighborMethodsITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testSumOfOutNeighborsNoValue() throws Exception { /* * Get the sum of out-neighbor values * for each vertex */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); DataSet<Tuple2<Long, Long>> verticesWithSumOfOutNeighborValues = graph.reduceOnNeighbors(new SumNeighbors(), EdgeDirection.OUT); List<Tuple2<Long, Long>> result = verticesWithSumOfOutNeighborValues.collect(); expectedResult = "1,5\n" + "2,3\n" + "3,9\n" + "4,5\n" + "5,1\n"; compareResultAsTuples(result, expectedResult); }
Example #11
Source Project: Flink-CEPplus Author: ljygz File: TranslateTest.java License: Apache License 2.0 | 6 votes |
@Before public void setup() { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); int count = 10; List<Vertex<LongValue, LongValue>> vertexList = new LinkedList<>(); List<Edge<LongValue, LongValue>> edgeList = new LinkedList<>(); for (long l = 0; l < count; l++) { LongValue lv0 = new LongValue(l); LongValue lv1 = new LongValue(l + 1); LongValue lv2 = new LongValue(l + 2); vertexList.add(new Vertex<>(lv0, lv1)); edgeList.add(new Edge<>(lv0, lv1, lv2)); } graph = Graph.fromCollection(vertexList, edgeList, env); }
Example #12
Source Project: Flink-CEPplus Author: ljygz File: GraphCreationWithMapperITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testWithTuple2ValueMapper() throws Exception { /* * Test create() with edge dataset and a mapper that assigns a Tuple2 as value */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Tuple2<Long, Long>, Long> graph = Graph.fromDataSet( TestGraphUtils.getLongLongEdgeData(env), new AssignTuple2ValueMapper(), env); DataSet<Vertex<Long, Tuple2<Long, Long>>> data = graph.getVertices(); List<Vertex<Long, Tuple2<Long, Long>>> result = data.collect(); expectedResult = "1,(2,42)\n" + "2,(4,42)\n" + "3,(6,42)\n" + "4,(8,42)\n" + "5,(10,42)\n"; compareResultAsTuples(result, expectedResult); }
Example #13
Source Project: flink Author: flink-tpc-ds File: MapVerticesITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testWithStringValue() throws Exception { /* * Test mapVertices() and change the value type to String */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); DataSet<Vertex<Long, String>> mappedVertices = graph.mapVertices(new ToStringMapper()).getVertices(); List<Vertex<Long, String>> result = mappedVertices.collect(); expectedResult = "1,one\n" + "2,two\n" + "3,three\n" + "4,four\n" + "5,five\n"; compareResultAsTuples(result, expectedResult); }
Example #14
Source Project: flink Author: apache File: SingletonEdgeGraph.java License: Apache License 2.0 | 6 votes |
@Override public Graph<LongValue, NullValue, NullValue> generate() { Preconditions.checkState(vertexPairCount > 0); // Vertices long vertexCount = 2 * vertexPairCount; DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount); // Edges LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, vertexCount - 1); DataSet<Edge<LongValue, NullValue>> edges = env .fromParallelCollection(iterator, LongValue.class) .setParallelism(parallelism) .name("Edge iterators") .map(new LinkVertexToSingletonNeighbor()) .setParallelism(parallelism) .name("Complete graph edges"); // Graph return Graph.fromDataSet(vertices, edges, env); }
Example #15
Source Project: flink Author: flink-tpc-ds File: SummarizationITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testWithVertexAndEdgeLongValues() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> input = Graph.fromDataSet( SummarizationData.getVertices(env), SummarizationData.getEdges(env), env) .run(new TranslateVertexValues<>(new StringToLong())) .run(new TranslateEdgeValues<>(new StringToLong())); List<Vertex<Long, Summarization.VertexValue<Long>>> summarizedVertices = new ArrayList<>(); List<Edge<Long, EdgeValue<Long>>> summarizedEdges = new ArrayList<>(); Graph<Long, Summarization.VertexValue<Long>, EdgeValue<Long>> output = input.run(new Summarization<>()); output.getVertices().output(new LocalCollectionOutputFormat<>(summarizedVertices)); output.getEdges().output(new LocalCollectionOutputFormat<>(summarizedEdges)); env.execute(); validateVertices(SummarizationData.EXPECTED_VERTICES, summarizedVertices); validateEdges(SummarizationData.EXPECTED_EDGES_WITH_VALUES, summarizedEdges); }
Example #16
Source Project: flink Author: flink-tpc-ds File: DegreesITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testInDegreesWithNoInEdge() throws Exception { /* * Test inDegrees() no ingoing edge */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeDataWithZeroDegree(env), env); DataSet<Tuple2<Long, LongValue>> data = graph.inDegrees(); List<Tuple2<Long, LongValue>> result = data.collect(); expectedResult = "1,0\n" + "2,1\n" + "3,1\n" + "4,1\n" + "5,3\n"; compareResultAsTuples(result, expectedResult); }
Example #17
Source Project: flink Author: apache File: ReduceOnEdgesWithExceptionITCase.java License: Apache License 2.0 | 6 votes |
/** * Test groupReduceOnEdges() with an edge having a trgId that does not exist in the vertex DataSet. */ @Test public void testGroupReduceOnEdgesInvalidEdgeTrgId() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(PARALLELISM); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeInvalidTrgData(env), env); try { DataSet<Tuple2<Long, Long>> verticesWithAllNeighbors = graph.groupReduceOnEdges(new SelectNeighborsValueGreaterThanFour(), EdgeDirection.ALL); verticesWithAllNeighbors.output(new DiscardingOutputFormat<>()); env.execute(); fail("Expected an exception."); } catch (Exception e) { // We expect the job to fail with an exception } }
Example #18
Source Project: flink Author: flink-tpc-ds File: ReduceOnNeighborsWithExceptionITCase.java License: Apache License 2.0 | 6 votes |
/** * Test groupReduceOnNeighbors() -NeighborsFunctionWithVertexValue- * with an edge having a srcId that does not exist in the vertex DataSet. */ @Test public void testGroupReduceOnNeighborsWithVVInvalidEdgeSrcId() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(PARALLELISM); env.getConfig().disableSysoutLogging(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env); try { DataSet<Tuple2<Long, Long>> verticesWithSumOfOutNeighborValues = graph.groupReduceOnNeighbors(new SumAllNeighbors(), EdgeDirection.ALL); verticesWithSumOfOutNeighborValues.output(new DiscardingOutputFormat<>()); env.execute(); fail("Expected an exception."); } catch (Exception e) { // We expect the job to fail with an exception } }
Example #19
Source Project: flink Author: apache File: MapVerticesITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testWithtuple1Value() throws Exception { /* * Test mapVertices() and change the value type to a Tuple1 */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); DataSet<Vertex<Long, Tuple1<Long>>> mappedVertices = graph.mapVertices(new ToTuple1Mapper()).getVertices(); List<Vertex<Long, Tuple1<Long>>> result = mappedVertices.collect(); expectedResult = "1,(1)\n" + "2,(2)\n" + "3,(3)\n" + "4,(4)\n" + "5,(5)\n"; compareResultAsTuples(result, expectedResult); }
Example #20
Source Project: flink Author: flink-tpc-ds File: CompleteGraphTest.java License: Apache License 2.0 | 6 votes |
@Test public void testGraphMetrics() throws Exception { int vertexCount = 10; Graph<LongValue, NullValue, NullValue> graph = new CompleteGraph(env, vertexCount) .generate(); assertEquals(vertexCount, graph.numberOfVertices()); assertEquals(vertexCount * (vertexCount - 1), graph.numberOfEdges()); long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue(); long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue(); long maxInDegree = graph.inDegrees().max(1).collect().get(0).f1.getValue(); long maxOutDegree = graph.outDegrees().max(1).collect().get(0).f1.getValue(); assertEquals(vertexCount - 1, minInDegree); assertEquals(vertexCount - 1, minOutDegree); assertEquals(vertexCount - 1, maxInDegree); assertEquals(vertexCount - 1, maxOutDegree); }
Example #21
Source Project: Flink-CEPplus Author: ljygz File: VertexMetrics.java License: Apache License 2.0 | 6 votes |
@Override public VertexMetrics<K, VV, EV> run(Graph<K, VV, EV> input) throws Exception { super.run(input); DataSet<Vertex<K, LongValue>> vertexDegree = input .run(new VertexDegree<K, VV, EV>() .setIncludeZeroDegreeVertices(includeZeroDegreeVertices) .setReduceOnTargetId(reduceOnTargetId) .setParallelism(parallelism)); vertexMetricsHelper = new VertexMetricsHelper<>(); vertexDegree .output(vertexMetricsHelper) .name("Vertex metrics"); return this; }
Example #22
Source Project: flink Author: flink-tpc-ds File: ReduceOnNeighborMethodsITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testSumOfInNeighborsNoValue() throws Exception { /* * Get the sum of in-neighbor values * times the edge weights for each vertex */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); DataSet<Tuple2<Long, Long>> verticesWithSum = graph.groupReduceOnNeighbors(new SumInNeighborsNoValue(), EdgeDirection.IN); List<Tuple2<Long, Long>> result = verticesWithSum.collect(); expectedResult = "1,255\n" + "2,12\n" + "3,59\n" + "4,102\n" + "5,285\n"; compareResultAsTuples(result, expectedResult); }
Example #23
Source Project: flink Author: flink-tpc-ds File: SingletonEdgeGraphTest.java License: Apache License 2.0 | 6 votes |
@Test public void testGraphMetrics() throws Exception { int vertexPairCount = 10; Graph<LongValue, NullValue, NullValue> graph = new SingletonEdgeGraph(env, vertexPairCount) .generate(); assertEquals(2 * vertexPairCount, graph.numberOfVertices()); assertEquals(2 * vertexPairCount, graph.numberOfEdges()); long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue(); long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue(); long maxInDegree = graph.inDegrees().max(1).collect().get(0).f1.getValue(); long maxOutDegree = graph.outDegrees().max(1).collect().get(0).f1.getValue(); assertEquals(1, minInDegree); assertEquals(1, minOutDegree); assertEquals(1, maxInDegree); assertEquals(1, maxOutDegree); }
Example #24
Source Project: flink Author: flink-tpc-ds File: ConnectedComponentsWithRandomisedEdgesITCase.java License: Apache License 2.0 | 6 votes |
@Override protected void testProgram() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Long> vertexIds = env.generateSequence(1, NUM_VERTICES); DataSet<String> edgeString = env.fromElements(ConnectedComponentsData.getRandomOddEvenEdges(NUM_EDGES, NUM_VERTICES, SEED).split("\n")); DataSet<Edge<Long, NullValue>> edges = edgeString.map(new EdgeParser()); DataSet<Vertex<Long, Long>> initialVertices = vertexIds.map(new IdAssigner()); Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env); DataSet<Vertex<Long, Long>> result = graph.run(new ConnectedComponents<>(100)); result.writeAsCsv(resultPath, "\n", " "); env.execute(); }
Example #25
Source Project: Flink-CEPplus Author: ljygz File: CommunityDetectionTest.java License: Apache License 2.0 | 6 votes |
@Test public void testWithSingletonEdgeGraph() throws Exception { Graph<LongValue, Long, Double> result = new SingletonEdgeGraph(env, 1) .generate() .mapVertices(v -> v.getId().getValue(), new TypeHint<Vertex<LongValue, Long>>(){}.getTypeInfo()) .mapEdges(e -> 1.0, new TypeHint<Edge<LongValue, Double>>(){}.getTypeInfo()) .run(new CommunityDetection<>(10, 0.5)); String expectedResult = "(0,0)\n" + "(1,1)\n"; TestBaseUtils.compareResultAsText(result.getVertices().collect(), expectedResult); }
Example #26
Source Project: flink Author: flink-tpc-ds File: TriadicCensus.java License: Apache License 2.0 | 6 votes |
@Override public TriadicCensus<K, VV, EV> run(Graph<K, VV, EV> input) throws Exception { super.run(input); triangleListingHelper = new TriangleListingHelper<>(); input .run(new TriangleListing<K, VV, EV>() .setParallelism(parallelism)) .output(triangleListingHelper) .name("Triangle counts"); vertexDegreesHelper = new VertexDegreesHelper<>(); input .run(new VertexDegrees<K, VV, EV>() .setParallelism(parallelism)) .output(vertexDegreesHelper) .name("Edge and triplet counts"); return this; }
Example #27
Source Project: Flink-CEPplus Author: ljygz File: StarGraph.java License: Apache License 2.0 | 6 votes |
@Override public Graph<LongValue, NullValue, NullValue> generate() { Preconditions.checkState(vertexCount >= 2); // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount); // Edges LongValueSequenceIterator iterator = new LongValueSequenceIterator(1, this.vertexCount - 1); DataSet<Edge<LongValue, NullValue>> edges = env .fromParallelCollection(iterator, LongValue.class) .setParallelism(parallelism) .name("Edge iterators") .flatMap(new LinkVertexToCenter()) .setParallelism(parallelism) .name("Star graph edges"); // Graph return Graph.fromDataSet(vertices, edges, env); }
Example #28
Source Project: Flink-CEPplus Author: ljygz File: SingletonEdgeGraph.java License: Apache License 2.0 | 6 votes |
@Override public Graph<LongValue, NullValue, NullValue> generate() { Preconditions.checkState(vertexPairCount > 0); // Vertices long vertexCount = 2 * vertexPairCount; DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount); // Edges LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, vertexCount - 1); DataSet<Edge<LongValue, NullValue>> edges = env .fromParallelCollection(iterator, LongValue.class) .setParallelism(parallelism) .name("Edge iterators") .map(new LinkVertexToSingletonNeighbor()) .setParallelism(parallelism) .name("Complete graph edges"); // Graph return Graph.fromDataSet(vertices, edges, env); }
Example #29
Source Project: flink Author: apache File: GraphMutationsITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testAddVertexExisting() throws Exception { /* * Test addVertex() -- add an existing vertex */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); graph = graph.addVertex(new Vertex<>(1L, 1L)); DataSet<Vertex<Long, Long>> data = graph.getVertices(); List<Vertex<Long, Long>> result = data.collect(); expectedResult = "1,1\n" + "2,2\n" + "3,3\n" + "4,4\n" + "5,5\n"; compareResultAsTuples(result, expectedResult); }
Example #30
Source Project: Flink-CEPplus Author: ljygz File: JoinWithVerticesITCase.java License: Apache License 2.0 | 6 votes |
@Test public void testWithDifferentType() throws Exception { /* * Test joinWithVertices with the input DataSet passed as a parameter containing * less elements than the vertex DataSet and of a different type(Boolean) */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env), TestGraphUtils.getLongLongEdgeData(env), env); Graph<Long, Long, Long> res = graph.joinWithVertices(graph.getVertices().first(3) .map(new ProjectIdWithTrue()), new DoubleIfTrueMapper()); DataSet<Vertex<Long, Long>> data = res.getVertices(); List<Vertex<Long, Long>> result = data.collect(); expectedResult = "1,2\n" + "2,4\n" + "3,6\n" + "4,4\n" + "5,5\n"; compareResultAsTuples(result, expectedResult); }