org.apache.flink.graph.asm.degree.annotate.undirected.EdgeDegreePair Java Examples

The following examples show how to use org.apache.flink.graph.asm.degree.annotate.undirected.EdgeDegreePair. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: EdgeMetrics.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public EdgeMetrics<K, VV, EV> run(Graph<K, VV, EV> input)
		throws Exception {
	super.run(input);

	// s, t, (d(s), d(t))
	DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> edgeDegreePair = input
		.run(new EdgeDegreePair<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId)
			.setParallelism(parallelism));

	// s, d(s), count of (u, v) where deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
	DataSet<Tuple3<K, LongValue, LongValue>> edgeStats = edgeDegreePair
		.map(new EdgeStats<>())
			.setParallelism(parallelism)
			.name("Edge stats")
		.groupBy(0)
		.reduce(new SumEdgeStats<>())
		.setCombineHint(CombineHint.HASH)
			.setParallelism(parallelism)
			.name("Sum edge stats");

	edgeMetricsHelper = new EdgeMetricsHelper<>();

	edgeStats
		.output(edgeMetricsHelper)
			.setParallelism(parallelism)
			.name("Edge metrics");

	return this;
}
 
Example #2
Source File: EdgeMetrics.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public EdgeMetrics<K, VV, EV> run(Graph<K, VV, EV> input)
		throws Exception {
	super.run(input);

	// s, t, (d(s), d(t))
	DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> edgeDegreePair = input
		.run(new EdgeDegreePair<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId)
			.setParallelism(parallelism));

	// s, d(s), count of (u, v) where deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
	DataSet<Tuple3<K, LongValue, LongValue>> edgeStats = edgeDegreePair
		.map(new EdgeStats<>())
			.setParallelism(parallelism)
			.name("Edge stats")
		.groupBy(0)
		.reduce(new SumEdgeStats<>())
		.setCombineHint(CombineHint.HASH)
			.setParallelism(parallelism)
			.name("Sum edge stats");

	edgeMetricsHelper = new EdgeMetricsHelper<>();

	edgeStats
		.output(edgeMetricsHelper)
			.setParallelism(parallelism)
			.name("Edge metrics");

	return this;
}
 
Example #3
Source File: EdgeMetrics.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public EdgeMetrics<K, VV, EV> run(Graph<K, VV, EV> input)
		throws Exception {
	super.run(input);

	// s, t, (d(s), d(t))
	DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> edgeDegreePair = input
		.run(new EdgeDegreePair<K, VV, EV>()
			.setReduceOnTargetId(reduceOnTargetId)
			.setParallelism(parallelism));

	// s, d(s), count of (u, v) where deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
	DataSet<Tuple3<K, LongValue, LongValue>> edgeStats = edgeDegreePair
		.map(new EdgeStats<>())
			.setParallelism(parallelism)
			.name("Edge stats")
		.groupBy(0)
		.reduce(new SumEdgeStats<>())
		.setCombineHint(CombineHint.HASH)
			.setParallelism(parallelism)
			.name("Sum edge stats");

	edgeMetricsHelper = new EdgeMetricsHelper<>();

	edgeStats
		.output(edgeMetricsHelper)
			.setParallelism(parallelism)
			.name("Edge metrics");

	return this;
}
 
Example #4
Source File: TriangleListing.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// u, v where u < v
	DataSet<Tuple2<K, K>> filteredByID = input
		.getEdges()
		.flatMap(new FilterByID<>())
			.setParallelism(parallelism)
			.name("Filter by ID");

	// u, v, (edge value, deg(u), deg(v))
	DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> pairDegree = input
		.run(new EdgeDegreePair<K, VV, EV>()
			.setParallelism(parallelism));

	// u, v where deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
	DataSet<Tuple2<K, K>> filteredByDegree = pairDegree
		.flatMap(new FilterByDegree<>())
			.setParallelism(parallelism)
			.name("Filter by degree");

	// u, v, w where (u, v) and (u, w) are edges in graph, v < w
	DataSet<Tuple3<K, K, K>> triplets = filteredByDegree
		.groupBy(0)
		.sortGroup(1, Order.ASCENDING)
		.reduceGroup(new GenerateTriplets<>())
			.name("Generate triplets");

	// u, v, w where (u, v), (u, w), and (v, w) are edges in graph, v < w
	DataSet<Result<K>> triangles = triplets
		.join(filteredByID, JoinOperatorBase.JoinHint.REPARTITION_HASH_SECOND)
		.where(1, 2)
		.equalTo(0, 1)
		.with(new ProjectTriangles<>())
			.name("Triangle listing");

	if (permuteResults) {
		triangles = triangles
			.flatMap(new PermuteResult<>())
				.name("Permute triangle vertices");
	} else if (sortTriangleVertices.get()) {
		triangles = triangles
			.map(new SortTriangleVertices<>())
				.name("Sort triangle vertices");
	}

	return triangles;
}
 
Example #5
Source File: TriangleListing.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// u, v where u < v
	DataSet<Tuple2<K, K>> filteredByID = input
		.getEdges()
		.flatMap(new FilterByID<>())
			.setParallelism(parallelism)
			.name("Filter by ID");

	// u, v, (edge value, deg(u), deg(v))
	DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> pairDegree = input
		.run(new EdgeDegreePair<K, VV, EV>()
			.setParallelism(parallelism));

	// u, v where deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
	DataSet<Tuple2<K, K>> filteredByDegree = pairDegree
		.flatMap(new FilterByDegree<>())
			.setParallelism(parallelism)
			.name("Filter by degree");

	// u, v, w where (u, v) and (u, w) are edges in graph, v < w
	DataSet<Tuple3<K, K, K>> triplets = filteredByDegree
		.groupBy(0)
		.sortGroup(1, Order.ASCENDING)
		.reduceGroup(new GenerateTriplets<>())
			.name("Generate triplets");

	// u, v, w where (u, v), (u, w), and (v, w) are edges in graph, v < w
	DataSet<Result<K>> triangles = triplets
		.join(filteredByID, JoinOperatorBase.JoinHint.REPARTITION_HASH_SECOND)
		.where(1, 2)
		.equalTo(0, 1)
		.with(new ProjectTriangles<>())
			.name("Triangle listing");

	if (permuteResults) {
		triangles = triangles
			.flatMap(new PermuteResult<>())
				.name("Permute triangle vertices");
	} else if (sortTriangleVertices.get()) {
		triangles = triangles
			.map(new SortTriangleVertices<>())
				.name("Sort triangle vertices");
	}

	return triangles;
}
 
Example #6
Source File: TriangleListing.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
		throws Exception {
	// u, v where u < v
	DataSet<Tuple2<K, K>> filteredByID = input
		.getEdges()
		.flatMap(new FilterByID<>())
			.setParallelism(parallelism)
			.name("Filter by ID");

	// u, v, (edge value, deg(u), deg(v))
	DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> pairDegree = input
		.run(new EdgeDegreePair<K, VV, EV>()
			.setParallelism(parallelism));

	// u, v where deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
	DataSet<Tuple2<K, K>> filteredByDegree = pairDegree
		.flatMap(new FilterByDegree<>())
			.setParallelism(parallelism)
			.name("Filter by degree");

	// u, v, w where (u, v) and (u, w) are edges in graph, v < w
	DataSet<Tuple3<K, K, K>> triplets = filteredByDegree
		.groupBy(0)
		.sortGroup(1, Order.ASCENDING)
		.reduceGroup(new GenerateTriplets<>())
			.name("Generate triplets");

	// u, v, w where (u, v), (u, w), and (v, w) are edges in graph, v < w
	DataSet<Result<K>> triangles = triplets
		.join(filteredByID, JoinOperatorBase.JoinHint.REPARTITION_HASH_SECOND)
		.where(1, 2)
		.equalTo(0, 1)
		.with(new ProjectTriangles<>())
			.name("Triangle listing");

	if (permuteResults) {
		triangles = triangles
			.flatMap(new PermuteResult<>())
				.name("Permute triangle vertices");
	} else if (sortTriangleVertices.get()) {
		triangles = triangles
			.map(new SortTriangleVertices<>())
				.name("Sort triangle vertices");
	}

	return triangles;
}