Java Code Examples for org.apache.flink.runtime.jobgraph.JobGraph#getVertices()

The following examples show how to use org.apache.flink.runtime.jobgraph.JobGraph#getVertices() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that there are no collisions with two identical intermediate nodes connected to the
 * same predecessor.
 *
 * <pre>
 *             /-> [ (map) ] -> [ (sink) ]
 * [ (src) ] -+
 *             \-> [ (map) ] -> [ (sink) ]
 * </pre>
 */
@Test
public void testNodeHashIdenticalNodes() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(4);
	env.disableOperatorChaining();

	DataStream<String> src = env.addSource(new NoOpSourceFunction());

	src.map(new NoOpMapFunction()).addSink(new DiscardingSink<>());

	src.map(new NoOpMapFunction()).addSink(new DiscardingSink<>());

	JobGraph jobGraph = env.getStreamGraph().getJobGraph();
	Set<JobVertexID> vertexIds = new HashSet<>();
	for (JobVertex vertex : jobGraph.getVertices()) {
		assertTrue(vertexIds.add(vertex.getID()));
	}
}
 
Example 2
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that there are no collisions with two identical intermediate nodes connected to the
 * same predecessor.
 *
 * <pre>
 *             /-> [ (map) ] -> [ (sink) ]
 * [ (src) ] -+
 *             \-> [ (map) ] -> [ (sink) ]
 * </pre>
 */
@Test
public void testNodeHashIdenticalNodes() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(4);
	env.disableOperatorChaining();

	DataStream<String> src = env.addSource(new NoOpSourceFunction());

	src.map(new NoOpMapFunction()).addSink(new DiscardingSink<>());

	src.map(new NoOpMapFunction()).addSink(new DiscardingSink<>());

	JobGraph jobGraph = env.getStreamGraph().getJobGraph();
	Set<JobVertexID> vertexIds = new HashSet<>();
	for (JobVertex vertex : jobGraph.getVertices()) {
		assertTrue(vertexIds.add(vertex.getID()));
	}
}
 
Example 3
Source File: Dispatcher.java    From flink with Apache License 2.0 6 votes vote down vote up
private boolean isPartialResourceConfigured(JobGraph jobGraph) {
	boolean hasVerticesWithUnknownResource = false;
	boolean hasVerticesWithConfiguredResource = false;

	for (JobVertex jobVertex : jobGraph.getVertices()) {
		if (jobVertex.getMinResources() == ResourceSpec.UNKNOWN) {
			hasVerticesWithUnknownResource = true;
		} else {
			hasVerticesWithConfiguredResource = true;
		}

		if (hasVerticesWithUnknownResource && hasVerticesWithConfiguredResource) {
			return true;
		}
	}

	return false;
}
 
Example 4
Source File: Dispatcher.java    From flink with Apache License 2.0 6 votes vote down vote up
private boolean isPartialResourceConfigured(JobGraph jobGraph) {
	boolean hasVerticesWithUnknownResource = false;
	boolean hasVerticesWithConfiguredResource = false;

	for (JobVertex jobVertex : jobGraph.getVertices()) {
		if (jobVertex.getMinResources() == ResourceSpec.UNKNOWN) {
			hasVerticesWithUnknownResource = true;
		} else {
			hasVerticesWithConfiguredResource = true;
		}

		if (hasVerticesWithUnknownResource && hasVerticesWithConfiguredResource) {
			return true;
		}
	}

	return false;
}
 
Example 5
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that there are no collisions with two identical intermediate nodes connected to the
 * same predecessor.
 *
 * <pre>
 *             /-> [ (map) ] -> [ (sink) ]
 * [ (src) ] -+
 *             \-> [ (map) ] -> [ (sink) ]
 * </pre>
 */
@Test
public void testNodeHashIdenticalNodes() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(4);
	env.disableOperatorChaining();

	DataStream<String> src = env.addSource(new NoOpSourceFunction());

	src.map(new NoOpMapFunction()).addSink(new NoOpSinkFunction());

	src.map(new NoOpMapFunction()).addSink(new NoOpSinkFunction());

	JobGraph jobGraph = env.getStreamGraph().getJobGraph();
	Set<JobVertexID> vertexIds = new HashSet<>();
	for (JobVertex vertex : jobGraph.getVertices()) {
		assertTrue(vertexIds.add(vertex.getID()));
	}
}
 
Example 6
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that no {@link JobVertexID} of the {@link JobGraph} is contained in the given map.
 */
private void verifyIdsNotEqual(JobGraph jobGraph, Map<JobVertexID, String> ids) {
	// Verify same number of vertices
	assertEquals(jobGraph.getNumberOfVertices(), ids.size());

	// Verify that all IDs->name mappings are identical
	for (JobVertex vertex : jobGraph.getVertices()) {
		assertFalse(ids.containsKey(vertex.getID()));
	}
}
 
Example 7
Source File: TempInIterationsTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testTempInIterationTest() throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple2<Long, Long>> input = env.readCsvFile("file:///does/not/exist").types(Long.class, Long.class);

	DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration =
			input.iterateDelta(input, 1, 0);

	DataSet<Tuple2<Long, Long>> update = iteration.getWorkset()
			.join(iteration.getSolutionSet()).where(0).equalTo(0)
				.with(new DummyFlatJoinFunction<Tuple2<Long, Long>>());

	iteration.closeWith(update, update)
			.output(new DiscardingOutputFormat<Tuple2<Long, Long>>());


	Plan plan = env.createProgramPlan();
	OptimizedPlan oPlan = (new Optimizer(new Configuration())).compile(plan);

	JobGraphGenerator jgg = new JobGraphGenerator();
	JobGraph jg = jgg.compileJobGraph(oPlan);

	boolean solutionSetUpdateChecked = false;
	for(JobVertex v : jg.getVertices()) {
		if(v.getName().equals("SolutionSet Delta")) {

			// check if input of solution set delta is temped
			TaskConfig tc = new TaskConfig(v.getConfiguration());
			assertTrue(tc.isInputAsynchronouslyMaterialized(0));
			solutionSetUpdateChecked = true;
		}
	}
	assertTrue(solutionSetUpdateChecked);

}
 
Example 8
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns a {@link JobVertexID} to vertex name mapping for the given graph.
 */
private Map<JobVertexID, String> rememberIds(JobGraph jobGraph) {
	final Map<JobVertexID, String> ids = new HashMap<>();
	for (JobVertex vertex : jobGraph.getVertices()) {
		ids.put(vertex.getID(), vertex.getName());
	}
	return ids;
}
 
Example 9
Source File: JsonGeneratorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private void checkVertexExists(String vertexId, JobGraph graph) {
	// validate that the vertex has a valid 
	JobVertexID id = JobVertexID.fromHexString(vertexId);
	for (JobVertex vertex : graph.getVertices()) {
		if (vertex.getID().equals(id)) {
			return;
		}
	}
	fail("could not find vertex with id " + vertexId + " in JobGraph");
}
 
Example 10
Source File: JsonGeneratorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void checkVertexExists(String vertexId, JobGraph graph) {
	// validate that the vertex has a valid 
	JobVertexID id = JobVertexID.fromHexString(vertexId);
	for (JobVertex vertex : graph.getVertices()) {
		if (vertex.getID().equals(id)) {
			return;
		}
	}
	fail("could not find vertex with id " + vertexId + " in JobGraph");
}
 
Example 11
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Returns a {@link JobVertexID} to vertex name mapping for the given graph.
 */
private Map<JobVertexID, String> rememberIds(JobGraph jobGraph) {
	final Map<JobVertexID, String> ids = new HashMap<>();
	for (JobVertex vertex : jobGraph.getVertices()) {
		ids.put(vertex.getID(), vertex.getName());
	}
	return ids;
}
 
Example 12
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that each {@link JobVertexID} of the {@link JobGraph} is contained in the given map
 * and mapped to the same vertex name.
 */
private void verifyIdsEqual(JobGraph jobGraph, Map<JobVertexID, String> ids) {
	// Verify same number of vertices
	assertEquals(jobGraph.getNumberOfVertices(), ids.size());

	// Verify that all IDs->name mappings are identical
	for (JobVertex vertex : jobGraph.getVertices()) {
		String expectedName = ids.get(vertex.getID());
		assertNotNull(expectedName);
		assertEquals(expectedName, vertex.getName());
	}
}
 
Example 13
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that no {@link JobVertexID} of the {@link JobGraph} is contained in the given map.
 */
private void verifyIdsNotEqual(JobGraph jobGraph, Map<JobVertexID, String> ids) {
	// Verify same number of vertices
	assertEquals(jobGraph.getNumberOfVertices(), ids.size());

	// Verify that all IDs->name mappings are identical
	for (JobVertex vertex : jobGraph.getVertices()) {
		assertFalse(ids.containsKey(vertex.getID()));
	}
}
 
Example 14
Source File: TempInIterationsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTempInIterationTest() throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple2<Long, Long>> input = env.readCsvFile("file:///does/not/exist").types(Long.class, Long.class);

	DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration =
			input.iterateDelta(input, 1, 0);

	DataSet<Tuple2<Long, Long>> update = iteration.getWorkset()
			.join(iteration.getSolutionSet()).where(0).equalTo(0)
				.with(new DummyFlatJoinFunction<Tuple2<Long, Long>>());

	iteration.closeWith(update, update)
			.output(new DiscardingOutputFormat<Tuple2<Long, Long>>());


	Plan plan = env.createProgramPlan();
	OptimizedPlan oPlan = (new Optimizer(new Configuration())).compile(plan);

	JobGraphGenerator jgg = new JobGraphGenerator();
	JobGraph jg = jgg.compileJobGraph(oPlan);

	boolean solutionSetUpdateChecked = false;
	for(JobVertex v : jg.getVertices()) {
		if(v.getName().equals("SolutionSet Delta")) {

			// check if input of solution set delta is temped
			TaskConfig tc = new TaskConfig(v.getConfiguration());
			assertTrue(tc.isInputAsynchronouslyMaterialized(0));
			solutionSetUpdateChecked = true;
		}
	}
	assertTrue(solutionSetUpdateChecked);

}
 
Example 15
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Returns a {@link JobVertexID} to vertex name mapping for the given graph.
 */
private Map<JobVertexID, String> rememberIds(JobGraph jobGraph) {
	final Map<JobVertexID, String> ids = new HashMap<>();
	for (JobVertex vertex : jobGraph.getVertices()) {
		ids.put(vertex.getID(), vertex.getName());
	}
	return ids;
}
 
Example 16
Source File: TempInIterationsTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTempInIterationTest() throws Exception {

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple2<Long, Long>> input = env.readCsvFile("file:///does/not/exist").types(Long.class, Long.class);

	DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration =
			input.iterateDelta(input, 1, 0);

	DataSet<Tuple2<Long, Long>> update = iteration.getWorkset()
			.join(iteration.getSolutionSet()).where(0).equalTo(0)
				.with(new DummyFlatJoinFunction<Tuple2<Long, Long>>());

	iteration.closeWith(update, update)
			.output(new DiscardingOutputFormat<Tuple2<Long, Long>>());


	Plan plan = env.createProgramPlan();
	OptimizedPlan oPlan = (new Optimizer(new Configuration())).compile(plan);

	JobGraphGenerator jgg = new JobGraphGenerator();
	JobGraph jg = jgg.compileJobGraph(oPlan);

	boolean solutionSetUpdateChecked = false;
	for(JobVertex v : jg.getVertices()) {
		if(v.getName().equals("SolutionSet Delta")) {

			// check if input of solution set delta is temped
			TaskConfig tc = new TaskConfig(v.getConfiguration());
			assertTrue(tc.isInputAsynchronouslyMaterialized(0));
			solutionSetUpdateChecked = true;
		}
	}
	assertTrue(solutionSetUpdateChecked);

}
 
Example 17
Source File: SchedulerTestingUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static void enableCheckpointing(final JobGraph jobGraph, @Nullable StateBackend stateBackend) {
	final List<JobVertexID> triggerVertices = new ArrayList<>();
	final List<JobVertexID> allVertices = new ArrayList<>();

	for (JobVertex vertex : jobGraph.getVertices()) {
		if (vertex.isInputVertex()) {
			triggerVertices.add(vertex.getID());
		}
		allVertices.add(vertex.getID());
	}

	final CheckpointCoordinatorConfiguration config = new CheckpointCoordinatorConfiguration(
		Long.MAX_VALUE, // disable periodical checkpointing
		DEFAULT_CHECKPOINT_TIMEOUT_MS,
		0,
		1,
		CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION,
		false,
		false,
		false,
		0);

	SerializedValue<StateBackend> serializedStateBackend = null;
	if (stateBackend != null) {
		try {
			serializedStateBackend = new SerializedValue<>(stateBackend);
		} catch (IOException e) {
			throw new RuntimeException("could not serialize state backend", e);
		}
	}

	jobGraph.setSnapshotSettings(new JobCheckpointingSettings(
			triggerVertices, allVertices, allVertices,
			config, serializedStateBackend));
}
 
Example 18
Source File: StreamingJobGraphGeneratorNodeHashTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Verifies that each {@link JobVertexID} of the {@link JobGraph} is contained in the given map
 * and mapped to the same vertex name.
 */
private void verifyIdsEqual(JobGraph jobGraph, Map<JobVertexID, String> ids) {
	// Verify same number of vertices
	assertEquals(jobGraph.getNumberOfVertices(), ids.size());

	// Verify that all IDs->name mappings are identical
	for (JobVertex vertex : jobGraph.getVertices()) {
		String expectedName = ids.get(vertex.getID());
		assertNotNull(expectedName);
		assertEquals(expectedName, vertex.getName());
	}
}
 
Example 19
Source File: StreamingJobGraphGeneratorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Test iteration job, check slot sharing group and co-location group.
 */
@Test
public void testIteration() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Integer> source = env.fromElements(1, 2, 3).name("source");
	IterativeStream<Integer> iteration = source.iterate(3000);
	iteration.name("iteration").setParallelism(2);
	DataStream<Integer> map = iteration.map(x -> x + 1).name("map").setParallelism(2);
	DataStream<Integer> filter = map.filter((x) -> false).name("filter").setParallelism(2);
	iteration.closeWith(filter).print();

	JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());

	SlotSharingGroup slotSharingGroup = jobGraph.getVerticesAsArray()[0].getSlotSharingGroup();
	assertNotNull(slotSharingGroup);

	CoLocationGroup iterationSourceCoLocationGroup = null;
	CoLocationGroup iterationSinkCoLocationGroup = null;

	for (JobVertex jobVertex : jobGraph.getVertices()) {
		// all vertices have same slot sharing group by default
		assertEquals(slotSharingGroup, jobVertex.getSlotSharingGroup());

		// all iteration vertices have same co-location group,
		// others have no co-location group by default
		if (jobVertex.getName().startsWith(StreamGraph.ITERATION_SOURCE_NAME_PREFIX)) {
			iterationSourceCoLocationGroup = jobVertex.getCoLocationGroup();
			assertTrue(iterationSourceCoLocationGroup.getVertices().contains(jobVertex));
		} else if (jobVertex.getName().startsWith(StreamGraph.ITERATION_SINK_NAME_PREFIX)) {
			iterationSinkCoLocationGroup = jobVertex.getCoLocationGroup();
			assertTrue(iterationSinkCoLocationGroup.getVertices().contains(jobVertex));
		} else {
			assertNull(jobVertex.getCoLocationGroup());
		}
	}

	assertNotNull(iterationSourceCoLocationGroup);
	assertNotNull(iterationSinkCoLocationGroup);
	assertEquals(iterationSourceCoLocationGroup, iterationSinkCoLocationGroup);
}
 
Example 20
Source File: StreamingJobGraphGeneratorTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Test slot sharing group is enabled or disabled for iteration.
 */
@Test
public void testDisableSlotSharingForIteration() {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Integer> source = env.fromElements(1, 2, 3).name("source");
	IterativeStream<Integer> iteration = source.iterate(3000);
	iteration.name("iteration").setParallelism(2);
	DataStream<Integer> map = iteration.map(x -> x + 1).name("map").setParallelism(2);
	DataStream<Integer> filter = map.filter((x) -> false).name("filter").setParallelism(2);
	iteration.closeWith(filter).print();

	List<Transformation<?>> transformations = new ArrayList<>();
	transformations.add(source.getTransformation());
	transformations.add(iteration.getTransformation());
	transformations.add(map.getTransformation());
	transformations.add(filter.getTransformation());
	// when slot sharing group is disabled
	// all job vertices except iteration vertex would have no slot sharing group
	// iteration vertices would be set slot sharing group automatically
	StreamGraphGenerator generator = new StreamGraphGenerator(transformations, env.getConfig(), env.getCheckpointConfig());
	generator.setSlotSharingEnabled(false);

	JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(generator.generate());

	SlotSharingGroup iterationSourceSlotSharingGroup = null;
	SlotSharingGroup iterationSinkSlotSharingGroup = null;

	CoLocationGroup iterationSourceCoLocationGroup = null;
	CoLocationGroup iterationSinkCoLocationGroup = null;

	for (JobVertex jobVertex : jobGraph.getVertices()) {
		if (jobVertex.getName().startsWith(StreamGraph.ITERATION_SOURCE_NAME_PREFIX)) {
			iterationSourceSlotSharingGroup = jobVertex.getSlotSharingGroup();
			iterationSourceCoLocationGroup = jobVertex.getCoLocationGroup();
		} else if (jobVertex.getName().startsWith(StreamGraph.ITERATION_SINK_NAME_PREFIX)) {
			iterationSinkSlotSharingGroup = jobVertex.getSlotSharingGroup();
			iterationSinkCoLocationGroup = jobVertex.getCoLocationGroup();
		} else {
			assertNull(jobVertex.getSlotSharingGroup());
		}
	}

	assertNotNull(iterationSourceSlotSharingGroup);
	assertNotNull(iterationSinkSlotSharingGroup);
	assertEquals(iterationSourceSlotSharingGroup, iterationSinkSlotSharingGroup);

	assertNotNull(iterationSourceCoLocationGroup);
	assertNotNull(iterationSinkCoLocationGroup);
	assertEquals(iterationSourceCoLocationGroup, iterationSinkCoLocationGroup);
}