org.apache.flink.runtime.rest.messages.JobVertexIdPathParameter Java Examples

The following examples show how to use org.apache.flink.runtime.rest.messages.JobVertexIdPathParameter. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AggregatingSubtasksMetricsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Nonnull
@Override
Collection<? extends MetricStore.ComponentMetricStore> getStores(MetricStore store, HandlerRequest<EmptyRequestBody, AggregatedSubtaskMetricsParameters> request) {
	JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	JobVertexID taskID = request.getPathParameter(JobVertexIdPathParameter.class);

	Collection<String> subtaskRanges = request.getQueryParameter(SubtasksFilterQueryParameter.class);
	if (subtaskRanges.isEmpty()) {
		MetricStore.TaskMetricStore taskMetricStore = store.getTaskMetricStore(jobID.toString(), taskID.toString());
		if (taskMetricStore != null) {
			return taskMetricStore.getAllSubtaskMetricStores();
		} else {
			return Collections.emptyList();
		}
	} else {
		Iterable<Integer> subtasks = getIntegerRangeFromString(subtaskRanges);
		Collection<MetricStore.ComponentMetricStore> subtaskStores = new ArrayList<>(8);
		for (int subtask : subtasks) {
			MetricStore.ComponentMetricStore subtaskMetricStore = store.getSubtaskMetricStore(jobID.toString(), taskID.toString(), subtask);
			if (subtaskMetricStore != null) {
				subtaskStores.add(subtaskMetricStore);
			}
		}
		return subtaskStores;
	}
}
 
Example #2
Source File: JobVertexBackPressureHandlerTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testAbsentBackPressure() throws Exception {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID_BACK_PRESSURE_STATS_ABSENT.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, new JobVertexID().toString());

	final HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request =
		new HandlerRequest<>(
			EmptyRequestBody.getInstance(),
			new JobVertexMessageParameters(), pathParameters, Collections.emptyMap());

	final CompletableFuture<JobVertexBackPressureInfo> jobVertexBackPressureInfoCompletableFuture =
		jobVertexBackPressureHandler.handleRequest(request, restfulGateway);
	final JobVertexBackPressureInfo jobVertexBackPressureInfo = jobVertexBackPressureInfoCompletableFuture.get();

	assertThat(jobVertexBackPressureInfo.getStatus(), equalTo(VertexBackPressureStatus.DEPRECATED));
}
 
Example #3
Source File: AggregatingSubtasksMetricsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Nonnull
@Override
Collection<? extends MetricStore.ComponentMetricStore> getStores(MetricStore store, HandlerRequest<EmptyRequestBody, AggregatedSubtaskMetricsParameters> request) {
	JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	JobVertexID taskID = request.getPathParameter(JobVertexIdPathParameter.class);

	Collection<String> subtaskRanges = request.getQueryParameter(SubtasksFilterQueryParameter.class);
	if (subtaskRanges.isEmpty()) {
		MetricStore.TaskMetricStore taskMetricStore = store.getTaskMetricStore(jobID.toString(), taskID.toString());
		if (taskMetricStore != null) {
			return taskMetricStore.getAllSubtaskMetricStores();
		} else {
			return Collections.emptyList();
		}
	} else {
		Iterable<Integer> subtasks = getIntegerRangeFromString(subtaskRanges);
		Collection<MetricStore.ComponentMetricStore> subtaskStores = new ArrayList<>(8);
		for (int subtask : subtasks) {
			MetricStore.ComponentMetricStore subtaskMetricStore = store.getSubtaskMetricStore(jobID.toString(), taskID.toString(), subtask);
			if (subtaskMetricStore != null) {
				subtaskStores.add(subtaskMetricStore);
			}
		}
		return subtaskStores;
	}
}
 
Example #4
Source File: JobVertexBackPressureHandlerTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAbsentBackPressure() throws Exception {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID_BACK_PRESSURE_STATS_ABSENT.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, new JobVertexID().toString());

	final HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request =
		new HandlerRequest<>(
			EmptyRequestBody.getInstance(),
			new JobVertexMessageParameters(), pathParameters, Collections.emptyMap());

	final CompletableFuture<JobVertexBackPressureInfo> jobVertexBackPressureInfoCompletableFuture =
		jobVertexBackPressureHandler.handleRequest(request, restfulGateway);
	final JobVertexBackPressureInfo jobVertexBackPressureInfo = jobVertexBackPressureInfoCompletableFuture.get();

	assertThat(jobVertexBackPressureInfo.getStatus(), equalTo(VertexBackPressureStatus.DEPRECATED));
}
 
Example #5
Source File: TaskCheckpointStatisticDetailsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	CheckpointStatsSnapshot stats = graph.getCheckpointStatsSnapshot();
	if (stats == null) {
		return Collections.emptyList();
	}
	CheckpointStatsHistory history = stats.getHistory();
	List<ArchivedJson> archive = new ArrayList<>(history.getCheckpoints().size());
	for (AbstractCheckpointStats checkpoint : history.getCheckpoints()) {
		for (TaskStateStats subtaskStats : checkpoint.getAllTaskStateStats()) {
			ResponseBody json = createCheckpointDetails(checkpoint, subtaskStats);
			String path = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + CheckpointIdPathParameter.KEY, String.valueOf(checkpoint.getCheckpointId()))
				.replace(':' + JobVertexIdPathParameter.KEY, subtaskStats.getJobVertexId().toString());
			archive.add(new ArchivedJson(path, json));
		}
	}
	return archive;
}
 
Example #6
Source File: SubtaskCurrentAttemptDetailsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, SubtaskMessageParameters> request,
		AccessExecutionVertex executionVertex) throws RestHandlerException {

	final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();

	final MutableIOMetrics ioMetrics = new MutableIOMetrics();

	final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);

	ioMetrics.addIOMetrics(
		execution,
		metricFetcher,
		jobID.toString(),
		jobVertexID.toString()
	);

	return SubtaskExecutionAttemptDetailsInfo.create(execution, ioMetrics);
}
 
Example #7
Source File: TaskCheckpointStatisticDetailsHandler.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	CheckpointStatsSnapshot stats = graph.getCheckpointStatsSnapshot();
	if (stats == null) {
		return Collections.emptyList();
	}
	CheckpointStatsHistory history = stats.getHistory();
	List<ArchivedJson> archive = new ArrayList<>(history.getCheckpoints().size());
	for (AbstractCheckpointStats checkpoint : history.getCheckpoints()) {
		for (TaskStateStats subtaskStats : checkpoint.getAllTaskStateStats()) {
			ResponseBody json = createCheckpointDetails(checkpoint, subtaskStats);
			String path = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + CheckpointIdPathParameter.KEY, String.valueOf(checkpoint.getCheckpointId()))
				.replace(':' + JobVertexIdPathParameter.KEY, subtaskStats.getJobVertexId().toString());
			archive.add(new ArchivedJson(path, json));
		}
	}
	return archive;
}
 
Example #8
Source File: AggregatingSubtasksMetricsHandler.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Nonnull
@Override
Collection<? extends MetricStore.ComponentMetricStore> getStores(MetricStore store, HandlerRequest<EmptyRequestBody, AggregatedSubtaskMetricsParameters> request) {
	JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	JobVertexID taskID = request.getPathParameter(JobVertexIdPathParameter.class);

	Collection<String> subtaskRanges = request.getQueryParameter(SubtasksFilterQueryParameter.class);
	if (subtaskRanges.isEmpty()) {
		MetricStore.TaskMetricStore taskMetricStore = store.getTaskMetricStore(jobID.toString(), taskID.toString());
		if (taskMetricStore != null) {
			return taskMetricStore.getAllSubtaskMetricStores();
		} else {
			return Collections.emptyList();
		}
	} else {
		Iterable<Integer> subtasks = getIntegerRangeFromString(subtaskRanges);
		Collection<MetricStore.ComponentMetricStore> subtaskStores = new ArrayList<>(8);
		for (int subtask : subtasks) {
			MetricStore.ComponentMetricStore subtaskMetricStore = store.getSubtaskMetricStore(jobID.toString(), taskID.toString(), subtask);
			if (subtaskMetricStore != null) {
				subtaskStores.add(subtaskMetricStore);
			}
		}
		return subtaskStores;
	}
}
 
Example #9
Source File: SubtaskCurrentAttemptDetailsHandler.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, SubtaskMessageParameters> request,
		AccessExecutionVertex executionVertex) throws RestHandlerException {

	final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();

	final MutableIOMetrics ioMetrics = new MutableIOMetrics();

	final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);

	ioMetrics.addIOMetrics(
		execution,
		metricFetcher,
		jobID.toString(),
		jobVertexID.toString()
	);

	return SubtaskExecutionAttemptDetailsInfo.create(execution, ioMetrics);
}
 
Example #10
Source File: TaskCheckpointStatisticDetailsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	CheckpointStatsSnapshot stats = graph.getCheckpointStatsSnapshot();
	if (stats == null) {
		return Collections.emptyList();
	}
	CheckpointStatsHistory history = stats.getHistory();
	List<ArchivedJson> archive = new ArrayList<>(history.getCheckpoints().size());
	for (AbstractCheckpointStats checkpoint : history.getCheckpoints()) {
		for (TaskStateStats subtaskStats : checkpoint.getAllTaskStateStats()) {
			ResponseBody json = createCheckpointDetails(checkpoint, subtaskStats);
			String path = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + CheckpointIdPathParameter.KEY, String.valueOf(checkpoint.getCheckpointId()))
				.replace(':' + JobVertexIdPathParameter.KEY, subtaskStats.getJobVertexId().toString());
			archive.add(new ArchivedJson(path, json));
		}
	}
	return archive;
}
 
Example #11
Source File: JobVertexBackPressureHandlerTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAbsentBackPressure() throws Exception {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID_BACK_PRESSURE_STATS_ABSENT.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, new JobVertexID().toString());

	final HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request =
		new HandlerRequest<>(
			EmptyRequestBody.getInstance(),
			new JobVertexMessageParameters(), pathParameters, Collections.emptyMap());

	final CompletableFuture<JobVertexBackPressureInfo> jobVertexBackPressureInfoCompletableFuture =
		jobVertexBackPressureHandler.handleRequest(request, restfulGateway);
	final JobVertexBackPressureInfo jobVertexBackPressureInfo = jobVertexBackPressureInfoCompletableFuture.get();

	assertThat(jobVertexBackPressureInfo.getStatus(), equalTo(VertexBackPressureStatus.DEPRECATED));
}
 
Example #12
Source File: JobVertexBackPressureHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetBackPressure() throws Exception {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID_BACK_PRESSURE_STATS_AVAILABLE.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, new JobVertexID().toString());

	final HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request =
		new HandlerRequest<>(
			EmptyRequestBody.getInstance(),
			new JobVertexMessageParameters(), pathParameters, Collections.emptyMap());

	final CompletableFuture<JobVertexBackPressureInfo> jobVertexBackPressureInfoCompletableFuture =
		jobVertexBackPressureHandler.handleRequest(request, restfulGateway);
	final JobVertexBackPressureInfo jobVertexBackPressureInfo = jobVertexBackPressureInfoCompletableFuture.get();

	assertThat(jobVertexBackPressureInfo.getStatus(), equalTo(VertexBackPressureStatus.OK));
	assertThat(jobVertexBackPressureInfo.getBackpressureLevel(), equalTo(HIGH));

	assertThat(jobVertexBackPressureInfo.getSubtasks()
		.stream()
		.map(JobVertexBackPressureInfo.SubtaskBackPressureInfo::getRatio)
		.collect(Collectors.toList()), contains(1.0, 0.5, 0.1));

	assertThat(jobVertexBackPressureInfo.getSubtasks()
		.stream()
		.map(JobVertexBackPressureInfo.SubtaskBackPressureInfo::getBackpressureLevel)
		.collect(Collectors.toList()), contains(HIGH, LOW, OK));

	assertThat(jobVertexBackPressureInfo.getSubtasks()
		.stream()
		.map(JobVertexBackPressureInfo.SubtaskBackPressureInfo::getSubtask)
		.collect(Collectors.toList()), contains(0, 1, 2));
}
 
Example #13
Source File: SubtaskMetricsHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
Map<String, String> getPathParameters() {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID);
	pathParameters.put(JobVertexIdPathParameter.KEY, TEST_VERTEX_ID);
	pathParameters.put(SubtaskIndexPathParameter.KEY, Integer.toString(TEST_SUBTASK_INDEX));
	return pathParameters;
}
 
Example #14
Source File: JobVertexDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	Collection<? extends AccessExecutionJobVertex> vertices = graph.getAllVertices().values();
	List<ArchivedJson> archive = new ArrayList<>(vertices.size());
	for (AccessExecutionJobVertex task : vertices) {
		ResponseBody json = createJobVertexDetailsInfo(task, graph.getJobID(), null);
		String path = getMessageHeaders().getTargetRestEndpointURL()
			.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
			.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString());
		archive.add(new ArchivedJson(path, json));
	}
	return archive;
}
 
Example #15
Source File: JobVertexWatermarksHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Before
public void before() throws Exception {
	taskMetricStore = Mockito.mock(MetricStore.TaskMetricStore.class);

	MetricStore metricStore = Mockito.mock(MetricStore.class);
	Mockito.when(metricStore.getTaskMetricStore(TEST_JOB_ID.toString(), TEST_VERTEX_ID.toString()))
		.thenReturn(taskMetricStore);

	metricFetcher = Mockito.mock(MetricFetcher.class);
	Mockito.when(metricFetcher.getMetricStore()).thenReturn(metricStore);

	watermarkHandler = new JobVertexWatermarksHandler(
		Mockito.mock(LeaderGatewayRetriever.class),
		Time.seconds(1),
		Collections.emptyMap(),
		metricFetcher,
		NoOpExecutionGraphCache.INSTANCE,
		Mockito.mock(Executor.class));

	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, TEST_VERTEX_ID.toString());

	request = new HandlerRequest<>(EmptyRequestBody.getInstance(), new JobVertexMessageParameters(),
			pathParameters, Collections.emptyMap());

	vertex = Mockito.mock(AccessExecutionJobVertex.class);
	Mockito.when(vertex.getJobVertexId()).thenReturn(TEST_VERTEX_ID);

	AccessExecutionVertex firstTask = Mockito.mock(AccessExecutionVertex.class);
	AccessExecutionVertex secondTask = Mockito.mock(AccessExecutionVertex.class);
	Mockito.when(firstTask.getParallelSubtaskIndex()).thenReturn(0);
	Mockito.when(secondTask.getParallelSubtaskIndex()).thenReturn(1);

	AccessExecutionVertex[] accessExecutionVertices = {firstTask, secondTask};
	Mockito.when(vertex.getTaskVertices()).thenReturn(accessExecutionVertices);
}
 
Example #16
Source File: JobVertexDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected JobVertexDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionGraph executionGraph) throws NotFoundException {
	JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);
	AccessExecutionJobVertex jobVertex = executionGraph.getJobVertex(jobVertexID);

	if (jobVertex == null) {
		throw new NotFoundException(String.format("JobVertex %s not found", jobVertexID));
	}

	return createJobVertexDetailsInfo(jobVertex, jobID, metricFetcher);
}
 
Example #17
Source File: JobVertexBackPressureHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected CompletableFuture<JobVertexBackPressureInfo> handleRequest(
		@Nonnull HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		@Nonnull RestfulGateway gateway) throws RestHandlerException {
	final JobID jobId = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexId = request.getPathParameter(JobVertexIdPathParameter.class);
	return gateway
		.requestOperatorBackPressureStats(jobId, jobVertexId)
		.thenApply(
			operatorBackPressureStats ->
				operatorBackPressureStats.getOperatorBackPressureStats().map(
					JobVertexBackPressureHandler::createJobVertexBackPressureInfo).orElse(
					JobVertexBackPressureInfo.deprecated()));
}
 
Example #18
Source File: SubtaskExecutionAttemptAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = createAccumulatorInfo(subtask.getCurrentExecutionAttempt());
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null){
					ResponseBody json = createAccumulatorInfo(attempt);
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}
 
Example #19
Source File: AggregatingSubtasksMetricsHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> getPathParameters() {
	Map<String, String> pathParameters = new HashMap<>(4);
	pathParameters.put(JobIDPathParameter.KEY, JOB_ID.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, TASK_ID.toString());
	return pathParameters;
}
 
Example #20
Source File: JobVertexTaskManagersHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	Collection<? extends AccessExecutionJobVertex> vertices = graph.getAllVertices().values();
	List<ArchivedJson> archive = new ArrayList<>(vertices.size());
	for (AccessExecutionJobVertex task : vertices) {
		ResponseBody json = createJobVertexTaskManagersInfo(task, graph.getJobID(), null);
		String path = getMessageHeaders().getTargetRestEndpointURL()
			.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
			.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString());
		archive.add(new ArchivedJson(path, json));
	}
	return archive;
}
 
Example #21
Source File: SubtaskCurrentAttemptDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, SubtaskMessageParameters> request,
		AccessExecutionVertex executionVertex) throws RestHandlerException {

	final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();

	final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);

	return SubtaskExecutionAttemptDetailsInfo.create(execution, metricFetcher, jobID, jobVertexID);
}
 
Example #22
Source File: SubtasksTimesHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	Collection<? extends AccessExecutionJobVertex> allVertices = graph.getAllVertices().values();
	List<ArchivedJson> archive = new ArrayList<>(allVertices.size());
	for (AccessExecutionJobVertex task : allVertices) {
		ResponseBody json = createSubtaskTimesInfo(task);
		String path = getMessageHeaders().getTargetRestEndpointURL()
			.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
			.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString());
		archive.add(new ArchivedJson(path, json));
	}
	return archive;
}
 
Example #23
Source File: JobVertexBackPressureHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetBackPressure() throws Exception {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID_BACK_PRESSURE_STATS_AVAILABLE.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, new JobVertexID().toString());

	final HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request =
		new HandlerRequest<>(
			EmptyRequestBody.getInstance(),
			new JobVertexMessageParameters(), pathParameters, Collections.emptyMap());

	final CompletableFuture<JobVertexBackPressureInfo> jobVertexBackPressureInfoCompletableFuture =
		jobVertexBackPressureHandler.handleRequest(request, restfulGateway);
	final JobVertexBackPressureInfo jobVertexBackPressureInfo = jobVertexBackPressureInfoCompletableFuture.get();

	assertThat(jobVertexBackPressureInfo.getStatus(), equalTo(VertexBackPressureStatus.OK));
	assertThat(jobVertexBackPressureInfo.getBackpressureLevel(), equalTo(HIGH));

	assertThat(jobVertexBackPressureInfo.getSubtasks()
		.stream()
		.map(JobVertexBackPressureInfo.SubtaskBackPressureInfo::getRatio)
		.collect(Collectors.toList()), contains(1.0, 0.5, 0.1));

	assertThat(jobVertexBackPressureInfo.getSubtasks()
		.stream()
		.map(JobVertexBackPressureInfo.SubtaskBackPressureInfo::getBackpressureLevel)
		.collect(Collectors.toList()), contains(HIGH, LOW, OK));

	assertThat(jobVertexBackPressureInfo.getSubtasks()
		.stream()
		.map(JobVertexBackPressureInfo.SubtaskBackPressureInfo::getSubtask)
		.collect(Collectors.toList()), contains(0, 1, 2));
}
 
Example #24
Source File: SubtaskMetricsHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
Map<String, String> getPathParameters() {
	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID);
	pathParameters.put(JobVertexIdPathParameter.KEY, TEST_VERTEX_ID);
	pathParameters.put(SubtaskIndexPathParameter.KEY, Integer.toString(TEST_SUBTASK_INDEX));
	return pathParameters;
}
 
Example #25
Source File: AggregatingSubtasksMetricsHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected Map<String, String> getPathParameters() {
	Map<String, String> pathParameters = new HashMap<>(4);
	pathParameters.put(JobIDPathParameter.KEY, JOB_ID.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, TASK_ID.toString());
	return pathParameters;
}
 
Example #26
Source File: JobVertexDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	Collection<? extends AccessExecutionJobVertex> vertices = graph.getAllVertices().values();
	List<ArchivedJson> archive = new ArrayList<>(vertices.size());
	for (AccessExecutionJobVertex task : vertices) {
		ResponseBody json = createJobVertexDetailsInfo(task, graph.getJobID(), null);
		String path = getMessageHeaders().getTargetRestEndpointURL()
			.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
			.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString());
		archive.add(new ArchivedJson(path, json));
	}
	return archive;
}
 
Example #27
Source File: JobVertexDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected JobVertexDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionGraph executionGraph) throws NotFoundException {
	JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);
	AccessExecutionJobVertex jobVertex = executionGraph.getJobVertex(jobVertexID);

	if (jobVertex == null) {
		throw new NotFoundException(String.format("JobVertex %s not found", jobVertexID));
	}

	return createJobVertexDetailsInfo(jobVertex, jobID, metricFetcher);
}
 
Example #28
Source File: SubtaskExecutionAttemptAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = createAccumulatorInfo(subtask.getCurrentExecutionAttempt());
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null){
					ResponseBody json = createAccumulatorInfo(attempt);
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}
 
Example #29
Source File: JobVertexBackPressureHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected CompletableFuture<JobVertexBackPressureInfo> handleRequest(
		@Nonnull HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		@Nonnull RestfulGateway gateway) throws RestHandlerException {
	final JobID jobId = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexId = request.getPathParameter(JobVertexIdPathParameter.class);
	return gateway
		.requestOperatorBackPressureStats(jobId, jobVertexId)
		.thenApply(
			operatorBackPressureStats ->
				operatorBackPressureStats.getOperatorBackPressureStats().map(
					JobVertexBackPressureHandler::createJobVertexBackPressureInfo).orElse(
					JobVertexBackPressureInfo.deprecated()));
}
 
Example #30
Source File: JobVertexTaskManagersHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected JobVertexTaskManagersInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionGraph executionGraph) throws RestHandlerException {
	JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);
	AccessExecutionJobVertex jobVertex = executionGraph.getJobVertex(jobVertexID);

	if (jobVertex == null) {
		throw new NotFoundException(String.format("JobVertex %s not found", jobVertexID));
	}

	return createJobVertexTaskManagersInfo(jobVertex, jobID, metricFetcher);
}