org.apache.flink.runtime.executiongraph.AccessExecutionVertex Java Examples

The following examples show how to use org.apache.flink.runtime.executiongraph.AccessExecutionVertex. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractSubtaskAttemptHandler.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
protected R handleRequest(HandlerRequest<EmptyRequestBody, M> request, AccessExecutionVertex executionVertex) throws RestHandlerException {
	final Integer attemptNumber = request.getPathParameter(SubtaskAttemptPathParameter.class);

	final AccessExecution currentAttempt = executionVertex.getCurrentExecutionAttempt();
	if (attemptNumber == currentAttempt.getAttemptNumber()) {
		return handleRequest(request, currentAttempt);
	} else if (attemptNumber >= 0 && attemptNumber < currentAttempt.getAttemptNumber()) {
		final AccessExecution execution = executionVertex.getPriorExecutionAttempt(attemptNumber);

		if (execution != null) {
			return handleRequest(request, execution);
		} else {
			throw new RestHandlerException("Attempt " + attemptNumber + " not found in subtask " +
				executionVertex.getTaskNameWithSubtaskIndex(), HttpResponseStatus.NOT_FOUND);
		}
	} else {
		throw new RestHandlerException("Invalid attempt num " + attemptNumber, HttpResponseStatus.NOT_FOUND);
	}
}
 
Example #2
Source File: SubtaskCurrentAttemptDetailsHandler.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, SubtaskMessageParameters> request,
		AccessExecutionVertex executionVertex) throws RestHandlerException {

	final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();

	final MutableIOMetrics ioMetrics = new MutableIOMetrics();

	final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);

	ioMetrics.addIOMetrics(
		execution,
		metricFetcher,
		jobID.toString(),
		jobVertexID.toString()
	);

	return SubtaskExecutionAttemptDetailsInfo.create(execution, ioMetrics);
}
 
Example #3
Source File: JobVertexDetailsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
private static JobVertexDetailsInfo createJobVertexDetailsInfo(AccessExecutionJobVertex jobVertex, JobID jobID, @Nullable MetricFetcher metricFetcher) {
	List<SubtaskExecutionAttemptDetailsInfo> subtasks = new ArrayList<>();
	final long now = System.currentTimeMillis();
	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		final AccessExecution execution = vertex.getCurrentExecutionAttempt();
		final JobVertexID jobVertexID = jobVertex.getJobVertexId();
		subtasks.add(SubtaskExecutionAttemptDetailsInfo.create(execution, metricFetcher, jobID, jobVertexID));
	}

	return new JobVertexDetailsInfo(
		jobVertex.getJobVertexId(),
		jobVertex.getName(),
		jobVertex.getParallelism(),
		now,
		subtasks);
}
 
Example #4
Source File: SubtaskCurrentAttemptDetailsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, SubtaskMessageParameters> request,
		AccessExecutionVertex executionVertex) throws RestHandlerException {

	final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();

	final MutableIOMetrics ioMetrics = new MutableIOMetrics();

	final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);

	ioMetrics.addIOMetrics(
		execution,
		metricFetcher,
		jobID.toString(),
		jobVertexID.toString()
	);

	return SubtaskExecutionAttemptDetailsInfo.create(execution, ioMetrics);
}
 
Example #5
Source File: AbstractSubtaskAttemptHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected R handleRequest(HandlerRequest<EmptyRequestBody, M> request, AccessExecutionVertex executionVertex) throws RestHandlerException {
	final Integer attemptNumber = request.getPathParameter(SubtaskAttemptPathParameter.class);

	final AccessExecution currentAttempt = executionVertex.getCurrentExecutionAttempt();
	if (attemptNumber == currentAttempt.getAttemptNumber()) {
		return handleRequest(request, currentAttempt);
	} else if (attemptNumber >= 0 && attemptNumber < currentAttempt.getAttemptNumber()) {
		final AccessExecution execution = executionVertex.getPriorExecutionAttempt(attemptNumber);

		if (execution != null) {
			return handleRequest(request, execution);
		} else {
			throw new RestHandlerException("Attempt " + attemptNumber + " not found in subtask " +
				executionVertex.getTaskNameWithSubtaskIndex(), HttpResponseStatus.NOT_FOUND);
		}
	} else {
		throw new RestHandlerException("Invalid attempt num " + attemptNumber, HttpResponseStatus.NOT_FOUND);
	}
}
 
Example #6
Source File: AbstractSubtaskAttemptHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected R handleRequest(HandlerRequest<EmptyRequestBody, M> request, AccessExecutionVertex executionVertex) throws RestHandlerException {
	final Integer attemptNumber = request.getPathParameter(SubtaskAttemptPathParameter.class);

	final AccessExecution currentAttempt = executionVertex.getCurrentExecutionAttempt();
	if (attemptNumber == currentAttempt.getAttemptNumber()) {
		return handleRequest(request, currentAttempt);
	} else if (attemptNumber >= 0 && attemptNumber < currentAttempt.getAttemptNumber()) {
		final AccessExecution execution = executionVertex.getPriorExecutionAttempt(attemptNumber);

		if (execution != null) {
			return handleRequest(request, execution);
		} else {
			throw new RestHandlerException("Attempt " + attemptNumber + " not found in subtask " +
				executionVertex.getTaskNameWithSubtaskIndex(), HttpResponseStatus.NOT_FOUND);
		}
	} else {
		throw new RestHandlerException("Invalid attempt num " + attemptNumber, HttpResponseStatus.NOT_FOUND);
	}
}
 
Example #7
Source File: JobMasterTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Collection<AccessExecution> getExecutions(final JobMasterGateway jobMasterGateway) {
	final ArchivedExecutionGraph archivedExecutionGraph = requestExecutionGraph(jobMasterGateway);

	return archivedExecutionGraph.getAllVertices().values()
		.stream()
		.flatMap(vertex -> Arrays.stream(vertex.getTaskVertices()))
		.map(AccessExecutionVertex::getCurrentExecutionAttempt)
		.collect(Collectors.toList());
}
 
Example #8
Source File: JobVertexWatermarksHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Before
public void before() throws Exception {
	taskMetricStore = Mockito.mock(MetricStore.TaskMetricStore.class);

	MetricStore metricStore = Mockito.mock(MetricStore.class);
	Mockito.when(metricStore.getTaskMetricStore(TEST_JOB_ID.toString(), TEST_VERTEX_ID.toString()))
		.thenReturn(taskMetricStore);

	metricFetcher = Mockito.mock(MetricFetcher.class);
	Mockito.when(metricFetcher.getMetricStore()).thenReturn(metricStore);

	watermarkHandler = new JobVertexWatermarksHandler(
		Mockito.mock(LeaderGatewayRetriever.class),
		Time.seconds(1),
		Collections.emptyMap(),
		metricFetcher,
		NoOpExecutionGraphCache.INSTANCE,
		Mockito.mock(Executor.class));

	final Map<String, String> pathParameters = new HashMap<>();
	pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID.toString());
	pathParameters.put(JobVertexIdPathParameter.KEY, TEST_VERTEX_ID.toString());

	request = new HandlerRequest<>(EmptyRequestBody.getInstance(), new JobVertexMessageParameters(),
			pathParameters, Collections.emptyMap());

	vertex = Mockito.mock(AccessExecutionJobVertex.class);
	Mockito.when(vertex.getJobVertexId()).thenReturn(TEST_VERTEX_ID);

	AccessExecutionVertex firstTask = Mockito.mock(AccessExecutionVertex.class);
	AccessExecutionVertex secondTask = Mockito.mock(AccessExecutionVertex.class);
	Mockito.when(firstTask.getParallelSubtaskIndex()).thenReturn(0);
	Mockito.when(secondTask.getParallelSubtaskIndex()).thenReturn(1);

	AccessExecutionVertex[] accessExecutionVertices = {firstTask, secondTask};
	Mockito.when(vertex.getTaskVertices()).thenReturn(accessExecutionVertices);
}
 
Example #9
Source File: JobMasterTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static List<AccessExecution> getExecutions(final JobMasterGateway jobMasterGateway, final JobVertexID jobVertexId) {
	final ArchivedExecutionGraph archivedExecutionGraph = requestExecutionGraph(jobMasterGateway);

	return Optional.ofNullable(archivedExecutionGraph.getAllVertices().get(jobVertexId))
		.map(accessExecutionJobVertex -> Arrays.asList(accessExecutionJobVertex.getTaskVertices()))
		.orElse(Collections.emptyList())
		.stream()
		.map(AccessExecutionVertex::getCurrentExecutionAttempt)
		.collect(Collectors.toList());
}
 
Example #10
Source File: WebMonitorUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static JobDetails createDetailsForJob(AccessExecutionGraph job) {
	JobStatus status = job.getState();

	long started = job.getStatusTimestamp(JobStatus.CREATED);
	long finished = status.isGloballyTerminalState() ? job.getStatusTimestamp(status) : -1L;
	long duration = (finished >= 0L ? finished : System.currentTimeMillis()) - started;

	int[] countsPerStatus = new int[ExecutionState.values().length];
	long lastChanged = 0;
	int numTotalTasks = 0;

	for (AccessExecutionJobVertex ejv : job.getVerticesTopologically()) {
		AccessExecutionVertex[] vertices = ejv.getTaskVertices();
		numTotalTasks += vertices.length;

		for (AccessExecutionVertex vertex : vertices) {
			ExecutionState state = vertex.getExecutionState();
			countsPerStatus[state.ordinal()]++;
			lastChanged = Math.max(lastChanged, vertex.getStateTimestamp(state));
		}
	}

	lastChanged = Math.max(lastChanged, finished);

	return new JobDetails(
		job.getJobID(),
		job.getJobName(),
		started,
		finished,
		duration,
		status,
		lastChanged,
		countsPerStatus,
		numTotalTasks);
}
 
Example #11
Source File: SubtasksTimesHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
private static SubtasksTimesInfo createSubtaskTimesInfo(AccessExecutionJobVertex jobVertex) {
	final String id = jobVertex.getJobVertexId().toString();
	final String name = jobVertex.getName();
	final long now = System.currentTimeMillis();
	final List<SubtasksTimesInfo.SubtaskTimeInfo> subtasks = new ArrayList<>();

	int num = 0;
	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {

		long[] timestamps = vertex.getCurrentExecutionAttempt().getStateTimestamps();
		ExecutionState status = vertex.getExecutionState();

		long scheduledTime = timestamps[ExecutionState.SCHEDULED.ordinal()];

		long start = scheduledTime > 0 ? scheduledTime : -1;
		long end = status.isTerminal() ? timestamps[status.ordinal()] : now;
		long duration = start >= 0 ? end - start : -1L;

		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		Map<ExecutionState, Long> timestampMap = new HashMap<>(ExecutionState.values().length);
		for (ExecutionState state : ExecutionState.values()) {
			timestampMap.put(state, timestamps[state.ordinal()]);
		}

		subtasks.add(new SubtasksTimesInfo.SubtaskTimeInfo(
			num++,
			locationString,
			duration,
			timestampMap));
	}
	return new SubtasksTimesInfo(id, name, now, subtasks);
}
 
Example #12
Source File: AbstractSubtaskHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected R handleRequest(
		HandlerRequest<EmptyRequestBody, M> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	final Integer subtaskIndex = request.getPathParameter(SubtaskIndexPathParameter.class);
	final AccessExecutionVertex[] executionVertices = jobVertex.getTaskVertices();

	if (subtaskIndex >= executionVertices.length || subtaskIndex < 0) {
		throw new RestHandlerException("Invalid subtask index for vertex " + jobVertex.getJobVertexId(), HttpResponseStatus.NOT_FOUND);
	}

	return handleRequest(request, executionVertices[subtaskIndex]);
}
 
Example #13
Source File: SubtaskCurrentAttemptDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected SubtaskExecutionAttemptDetailsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, SubtaskMessageParameters> request,
		AccessExecutionVertex executionVertex) throws RestHandlerException {

	final AccessExecution execution = executionVertex.getCurrentExecutionAttempt();

	final JobID jobID = request.getPathParameter(JobIDPathParameter.class);
	final JobVertexID jobVertexID = request.getPathParameter(JobVertexIdPathParameter.class);

	return SubtaskExecutionAttemptDetailsInfo.create(execution, metricFetcher, jobID, jobVertexID);
}
 
Example #14
Source File: JobVertexWatermarksHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected MetricCollectionResponseBody handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	String jobID = request.getPathParameter(JobIDPathParameter.class).toString();
	String taskID = jobVertex.getJobVertexId().toString();

	metricFetcher.update();
	MetricStore.TaskMetricStore taskMetricStore = metricFetcher.getMetricStore().getTaskMetricStore(jobID, taskID);
	if (taskMetricStore == null) {
		return new MetricCollectionResponseBody(Collections.emptyList());
	}

	AccessExecutionVertex[] taskVertices = jobVertex.getTaskVertices();
	List<Metric> metrics = new ArrayList<>(taskVertices.length);

	for (AccessExecutionVertex taskVertex : taskVertices) {
		String id = taskVertex.getParallelSubtaskIndex() + "." + MetricNames.IO_CURRENT_INPUT_WATERMARK;
		String watermarkValue = taskMetricStore.getMetric(id);
		if (watermarkValue != null) {
			metrics.add(new Metric(id, watermarkValue));
		}
	}

	return new MetricCollectionResponseBody(metrics);
}
 
Example #15
Source File: SubtaskExecutionAttemptDetailsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = SubtaskExecutionAttemptDetailsInfo.create(subtask.getCurrentExecutionAttempt(), null, graph.getJobID(), task.getJobVertexId());
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null) {
					ResponseBody json = SubtaskExecutionAttemptDetailsInfo.create(attempt, null, graph.getJobID(), task.getJobVertexId());
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}
 
Example #16
Source File: SubtasksAllAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected SubtasksAllAccumulatorsInfo handleRequest(HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException {
	JobVertexID jobVertexId = jobVertex.getJobVertexId();
	int parallelism = jobVertex.getParallelism();

	final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>();

	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		StringifiedAccumulatorResult[] accs = vertex.getCurrentExecutionAttempt().getUserAccumulatorsStringified();
		List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length);
		for (StringifiedAccumulatorResult acc : accs) {
			userAccumulators.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
		}

		subtaskAccumulatorsInfos.add(
			new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo(
				vertex.getCurrentExecutionAttempt().getParallelSubtaskIndex(),
				vertex.getCurrentExecutionAttempt().getAttemptNumber(),
				locationString,
				userAccumulators
			));
	}

	return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos);
}
 
Example #17
Source File: JobExceptionsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
private static JobExceptionsInfo createJobExceptionsInfo(AccessExecutionGraph executionGraph, int exceptionToReportMaxSize) {
	ErrorInfo rootException = executionGraph.getFailureInfo();
	String rootExceptionMessage = null;
	Long rootTimestamp = null;
	if (rootException != null) {
		rootExceptionMessage = rootException.getExceptionAsString();
		rootTimestamp = rootException.getTimestamp();
	}

	List<JobExceptionsInfo.ExecutionExceptionInfo> taskExceptionList = new ArrayList<>();
	boolean truncated = false;
	for (AccessExecutionVertex task : executionGraph.getAllExecutionVertices()) {
		String t = task.getFailureCauseAsString();
		if (t != null && !t.equals(ExceptionUtils.STRINGIFIED_NULL_EXCEPTION)) {
			if (taskExceptionList.size() >= exceptionToReportMaxSize) {
				truncated = true;
				break;
			}

			TaskManagerLocation location = task.getCurrentAssignedResourceLocation();
			String locationString = location != null ?
				location.getFQDNHostname() + ':' + location.dataPort() : "(unassigned)";
			long timestamp = task.getStateTimestamp(ExecutionState.FAILED);
			taskExceptionList.add(new JobExceptionsInfo.ExecutionExceptionInfo(
				t,
				task.getTaskNameWithSubtaskIndex(),
				locationString,
				timestamp == 0 ? -1 : timestamp));
		}
	}

	return new JobExceptionsInfo(rootExceptionMessage, rootTimestamp, taskExceptionList, truncated);
}
 
Example #18
Source File: SubtaskExecutionAttemptAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = createAccumulatorInfo(subtask.getCurrentExecutionAttempt());
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null){
					ResponseBody json = createAccumulatorInfo(attempt);
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}
 
Example #19
Source File: JobMasterTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Collection<AccessExecution> getExecutions(final JobMasterGateway jobMasterGateway) {
	final ArchivedExecutionGraph archivedExecutionGraph = requestExecutionGraph(jobMasterGateway);

	return archivedExecutionGraph.getAllVertices().values()
		.stream()
		.flatMap(vertex -> Arrays.stream(vertex.getTaskVertices()))
		.map(AccessExecutionVertex::getCurrentExecutionAttempt)
		.collect(Collectors.toList());
}
 
Example #20
Source File: JobMasterTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static List<AccessExecution> getExecutions(final JobMasterGateway jobMasterGateway, final JobVertexID jobVertexId) {
	final ArchivedExecutionGraph archivedExecutionGraph = requestExecutionGraph(jobMasterGateway);

	return Optional.ofNullable(archivedExecutionGraph.getAllVertices().get(jobVertexId))
		.map(accessExecutionJobVertex -> Arrays.asList(accessExecutionJobVertex.getTaskVertices()))
		.orElse(Collections.emptyList())
		.stream()
		.map(AccessExecutionVertex::getCurrentExecutionAttempt)
		.collect(Collectors.toList());
}
 
Example #21
Source File: WebMonitorUtils.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static JobDetails createDetailsForJob(AccessExecutionGraph job) {
	JobStatus status = job.getState();

	long started = job.getStatusTimestamp(JobStatus.CREATED);
	long finished = status.isGloballyTerminalState() ? job.getStatusTimestamp(status) : -1L;
	long duration = (finished >= 0L ? finished : System.currentTimeMillis()) - started;

	int[] countsPerStatus = new int[ExecutionState.values().length];
	long lastChanged = 0;
	int numTotalTasks = 0;

	for (AccessExecutionJobVertex ejv : job.getVerticesTopologically()) {
		AccessExecutionVertex[] vertices = ejv.getTaskVertices();
		numTotalTasks += vertices.length;

		for (AccessExecutionVertex vertex : vertices) {
			ExecutionState state = vertex.getExecutionState();
			countsPerStatus[state.ordinal()]++;
			lastChanged = Math.max(lastChanged, vertex.getStateTimestamp(state));
		}
	}

	lastChanged = Math.max(lastChanged, finished);

	return new JobDetails(
		job.getJobID(),
		job.getJobName(),
		started,
		finished,
		duration,
		status,
		lastChanged,
		countsPerStatus,
		numTotalTasks);
}
 
Example #22
Source File: WebMonitorUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static JobDetails createDetailsForJob(AccessExecutionGraph job) {
	JobStatus status = job.getState();

	long started = job.getStatusTimestamp(JobStatus.CREATED);
	long finished = status.isGloballyTerminalState() ? job.getStatusTimestamp(status) : -1L;
	long duration = (finished >= 0L ? finished : System.currentTimeMillis()) - started;

	int[] countsPerStatus = new int[ExecutionState.values().length];
	long lastChanged = 0;
	int numTotalTasks = 0;

	for (AccessExecutionJobVertex ejv : job.getVerticesTopologically()) {
		AccessExecutionVertex[] vertices = ejv.getTaskVertices();
		numTotalTasks += vertices.length;

		for (AccessExecutionVertex vertex : vertices) {
			ExecutionState state = vertex.getExecutionState();
			countsPerStatus[state.ordinal()]++;
			lastChanged = Math.max(lastChanged, vertex.getStateTimestamp(state));
		}
	}

	lastChanged = Math.max(lastChanged, finished);

	return new JobDetails(
		job.getJobID(),
		job.getJobName(),
		started,
		finished,
		duration,
		status,
		lastChanged,
		countsPerStatus,
		numTotalTasks);
}
 
Example #23
Source File: SubtasksTimesHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static SubtasksTimesInfo createSubtaskTimesInfo(AccessExecutionJobVertex jobVertex) {
	final String id = jobVertex.getJobVertexId().toString();
	final String name = jobVertex.getName();
	final long now = System.currentTimeMillis();
	final List<SubtasksTimesInfo.SubtaskTimeInfo> subtasks = new ArrayList<>();

	int num = 0;
	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {

		long[] timestamps = vertex.getCurrentExecutionAttempt().getStateTimestamps();
		ExecutionState status = vertex.getExecutionState();

		long scheduledTime = timestamps[ExecutionState.SCHEDULED.ordinal()];

		long start = scheduledTime > 0 ? scheduledTime : -1;
		long end = status.isTerminal() ? timestamps[status.ordinal()] : now;
		long duration = start >= 0 ? end - start : -1L;

		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		Map<ExecutionState, Long> timestampMap = new HashMap<>(ExecutionState.values().length);
		for (ExecutionState state : ExecutionState.values()) {
			timestampMap.put(state, timestamps[state.ordinal()]);
		}

		subtasks.add(new SubtasksTimesInfo.SubtaskTimeInfo(
			num++,
			locationString,
			duration,
			timestampMap));
	}
	return new SubtasksTimesInfo(id, name, now, subtasks);
}
 
Example #24
Source File: AbstractSubtaskHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected R handleRequest(
		HandlerRequest<EmptyRequestBody, M> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	final Integer subtaskIndex = request.getPathParameter(SubtaskIndexPathParameter.class);
	final AccessExecutionVertex[] executionVertices = jobVertex.getTaskVertices();

	if (subtaskIndex >= executionVertices.length || subtaskIndex < 0) {
		throw new RestHandlerException("Invalid subtask index for vertex " + jobVertex.getJobVertexId(), HttpResponseStatus.NOT_FOUND);
	}

	return handleRequest(request, executionVertices[subtaskIndex]);
}
 
Example #25
Source File: SubtaskExecutionAttemptDetailsHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = createDetailsInfo(subtask.getCurrentExecutionAttempt(), graph.getJobID(), task.getJobVertexId(), null);
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null) {
					ResponseBody json = createDetailsInfo(attempt, graph.getJobID(), task.getJobVertexId(), null);
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}
 
Example #26
Source File: SubtasksAllAccumulatorsHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected SubtasksAllAccumulatorsInfo handleRequest(HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException {
	JobVertexID jobVertexId = jobVertex.getJobVertexId();
	int parallelism = jobVertex.getParallelism();

	final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>();

	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		StringifiedAccumulatorResult[] accs = vertex.getCurrentExecutionAttempt().getUserAccumulatorsStringified();
		List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length);
		for (StringifiedAccumulatorResult acc : accs) {
			userAccumulators.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
		}

		subtaskAccumulatorsInfos.add(
			new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo(
				vertex.getCurrentExecutionAttempt().getParallelSubtaskIndex(),
				vertex.getCurrentExecutionAttempt().getAttemptNumber(),
				locationString,
				userAccumulators
			));
	}

	return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos);
}
 
Example #27
Source File: JobExceptionsHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static JobExceptionsInfo createJobExceptionsInfo(AccessExecutionGraph executionGraph) {
	ErrorInfo rootException = executionGraph.getFailureInfo();
	String rootExceptionMessage = null;
	Long rootTimestamp = null;
	if (rootException != null) {
		rootExceptionMessage = rootException.getExceptionAsString();
		rootTimestamp = rootException.getTimestamp();
	}

	List<JobExceptionsInfo.ExecutionExceptionInfo> taskExceptionList = new ArrayList<>();
	boolean truncated = false;
	for (AccessExecutionVertex task : executionGraph.getAllExecutionVertices()) {
		String t = task.getFailureCauseAsString();
		if (t != null && !t.equals(ExceptionUtils.STRINGIFIED_NULL_EXCEPTION)) {
			if (taskExceptionList.size() >= MAX_NUMBER_EXCEPTION_TO_REPORT) {
				truncated = true;
				break;
			}

			TaskManagerLocation location = task.getCurrentAssignedResourceLocation();
			String locationString = location != null ?
				location.getFQDNHostname() + ':' + location.dataPort() : "(unassigned)";
			long timestamp = task.getStateTimestamp(ExecutionState.FAILED);
			taskExceptionList.add(new JobExceptionsInfo.ExecutionExceptionInfo(
				t,
				task.getTaskNameWithSubtaskIndex(),
				locationString,
				timestamp == 0 ? -1 : timestamp));
		}
	}

	return new JobExceptionsInfo(rootExceptionMessage, rootTimestamp, taskExceptionList, truncated);
}
 
Example #28
Source File: SubtaskExecutionAttemptAccumulatorsHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = createAccumulatorInfo(subtask.getCurrentExecutionAttempt());
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null){
					ResponseBody json = createAccumulatorInfo(attempt);
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}
 
Example #29
Source File: ArchivedJobGenerationUtils.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static AccessExecutionVertex getTestSubtask() throws Exception {
	synchronized (lock) {
		if (originalJob == null) {
			generateArchivedJob();
		}
	}
	return originalSubtask;
}
 
Example #30
Source File: SubtaskExecutionAttemptAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
	List<ArchivedJson> archive = new ArrayList<>(16);
	for (AccessExecutionJobVertex task : graph.getAllVertices().values()) {
		for (AccessExecutionVertex subtask : task.getTaskVertices()) {
			ResponseBody curAttemptJson = createAccumulatorInfo(subtask.getCurrentExecutionAttempt());
			String curAttemptPath = getMessageHeaders().getTargetRestEndpointURL()
				.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
				.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
				.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
				.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(subtask.getCurrentExecutionAttempt().getAttemptNumber()));

			archive.add(new ArchivedJson(curAttemptPath, curAttemptJson));

			for (int x = 0; x < subtask.getCurrentExecutionAttempt().getAttemptNumber(); x++) {
				AccessExecution attempt = subtask.getPriorExecutionAttempt(x);
				if (attempt != null){
					ResponseBody json = createAccumulatorInfo(attempt);
					String path = getMessageHeaders().getTargetRestEndpointURL()
						.replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString())
						.replace(':' + JobVertexIdPathParameter.KEY, task.getJobVertexId().toString())
						.replace(':' + SubtaskIndexPathParameter.KEY, String.valueOf(subtask.getParallelSubtaskIndex()))
						.replace(':' + SubtaskAttemptPathParameter.KEY, String.valueOf(attempt.getAttemptNumber()));
					archive.add(new ArchivedJson(path, json));
				}
			}
		}
	}
	return archive;
}