org.apache.flink.runtime.accumulators.StringifiedAccumulatorResult Java Examples

The following examples show how to use org.apache.flink.runtime.accumulators.StringifiedAccumulatorResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JobVertexAccumulatorsHandler.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
protected JobVertexAccumulatorsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
	ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(
			new UserAccumulator(
				acc.getName(),
				acc.getType(),
				acc.getValue()));
	}

	return new JobVertexAccumulatorsInfo(jobVertex.getJobVertexId().toString(), userAccumulatorList);
}
 
Example #2
Source File: ArchivedExecutionGraphBuilder.java    From flink with Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionGraph build() {
	JobID jobID = this.jobID != null ? this.jobID : new JobID();
	String jobName = this.jobName != null ? this.jobName : "job_" + RANDOM.nextInt();

	if (tasks == null) {
		tasks = Collections.emptyMap();
	}

	return new ArchivedExecutionGraph(
		jobID,
		jobName,
		tasks,
		verticesInCreationOrder != null ? verticesInCreationOrder : new ArrayList<>(tasks.values()),
		stateTimestamps != null ? stateTimestamps : new long[JobStatus.values().length],
		state != null ? state : JobStatus.FINISHED,
		failureCause,
		jsonPlan != null ? jsonPlan : "{\"jobid\":\"" + jobID + "\", \"name\":\"" + jobName + "\", \"nodes\":[]}",
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0],
		serializedUserAccumulators != null ? serializedUserAccumulators : Collections.emptyMap(),
		archivedExecutionConfig != null ? archivedExecutionConfig : new ArchivedExecutionConfigBuilder().build(),
		isStoppable,
		null,
		null
	);
}
 
Example #3
Source File: ArchivedExecutionGraphBuilder.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionGraph build() {
	JobID jobID = this.jobID != null ? this.jobID : new JobID();
	String jobName = this.jobName != null ? this.jobName : "job_" + RANDOM.nextInt();

	if (tasks == null) {
		tasks = Collections.emptyMap();
	}

	return new ArchivedExecutionGraph(
		jobID,
		jobName,
		tasks,
		verticesInCreationOrder != null ? verticesInCreationOrder : new ArrayList<>(tasks.values()),
		stateTimestamps != null ? stateTimestamps : new long[JobStatus.values().length],
		state != null ? state : JobStatus.FINISHED,
		failureCause,
		jsonPlan != null ? jsonPlan : "{\"jobid\":\"" + jobID + "\", \"name\":\"" + jobName + "\", \"nodes\":[]}",
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0],
		serializedUserAccumulators != null ? serializedUserAccumulators : Collections.emptyMap(),
		archivedExecutionConfig != null ? archivedExecutionConfig : new ArchivedExecutionConfigBuilder().build(),
		isStoppable,
		null,
		null
	);
}
 
Example #4
Source File: ExecutionGraphCacheTest.java    From flink with Apache License 2.0 6 votes vote down vote up
public SuspendableAccessExecutionGraph(JobID jobId) {
	super(
		jobId,
		"ExecutionGraphCacheTest",
		Collections.emptyMap(),
		Collections.emptyList(),
		new long[0],
		JobStatus.RUNNING,
		new ErrorInfo(new FlinkException("Test"), 42L),
		"",
		new StringifiedAccumulatorResult[0],
		Collections.emptyMap(),
		new ArchivedExecutionConfig(new ExecutionConfig()),
		false,
		null,
		null);

	jobStatus = super.getState();
}
 
Example #5
Source File: ExecutionGraphCacheTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public SuspendableAccessExecutionGraph(JobID jobId) {
	super(
		jobId,
		"ExecutionGraphCacheTest",
		Collections.emptyMap(),
		Collections.emptyList(),
		new long[0],
		JobStatus.RUNNING,
		new ErrorInfo(new FlinkException("Test"), 42L),
		"",
		new StringifiedAccumulatorResult[0],
		Collections.emptyMap(),
		new ArchivedExecutionConfig(new ExecutionConfig()),
		false,
		null,
		null);

	jobStatus = super.getState();
}
 
Example #6
Source File: ArchivedExecutionJobVertex.java    From flink with Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionJobVertex(
		ArchivedExecutionVertex[] taskVertices,
		JobVertexID id,
		String name,
		int parallelism,
		int maxParallelism,
		ResourceProfile resourceProfile,
		StringifiedAccumulatorResult[] archivedUserAccumulators) {
	this.taskVertices = taskVertices;
	this.id = id;
	this.name = name;
	this.parallelism = parallelism;
	this.maxParallelism = maxParallelism;
	this.resourceProfile = resourceProfile;
	this.archivedUserAccumulators = archivedUserAccumulators;
}
 
Example #7
Source File: JobVertexAccumulatorsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected JobVertexAccumulatorsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
	ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(
			new UserAccumulator(
				acc.getName(),
				acc.getType(),
				acc.getValue()));
	}

	return new JobVertexAccumulatorsInfo(jobVertex.getJobVertexId().toString(), userAccumulatorList);
}
 
Example #8
Source File: ArchivedExecution.java    From flink with Apache License 2.0 6 votes vote down vote up
public ArchivedExecution(
		StringifiedAccumulatorResult[] userAccumulators, IOMetrics ioMetrics,
		ExecutionAttemptID attemptId, int attemptNumber, ExecutionState state, String failureCause,
		TaskManagerLocation assignedResourceLocation, AllocationID assignedAllocationID,  int parallelSubtaskIndex,
		long[] stateTimestamps) {
	this.userAccumulators = userAccumulators;
	this.ioMetrics = ioMetrics;
	this.failureCause = failureCause;
	this.assignedResourceLocation = assignedResourceLocation;
	this.attemptNumber = attemptNumber;
	this.attemptId = attemptId;
	this.state = state;
	this.stateTimestamps = stateTimestamps;
	this.parallelSubtaskIndex = parallelSubtaskIndex;
	this.assignedAllocationID = assignedAllocationID;
}
 
Example #9
Source File: JobVertexAccumulatorsHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected JobVertexAccumulatorsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
	ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(
			new UserAccumulator(
				acc.getName(),
				acc.getType(),
				acc.getValue()));
	}

	return new JobVertexAccumulatorsInfo(jobVertex.getJobVertexId().toString(), userAccumulatorList);
}
 
Example #10
Source File: ArchivedExecution.java    From flink with Apache License 2.0 6 votes vote down vote up
public ArchivedExecution(
		StringifiedAccumulatorResult[] userAccumulators, IOMetrics ioMetrics,
		ExecutionAttemptID attemptId, int attemptNumber, ExecutionState state, String failureCause,
		TaskManagerLocation assignedResourceLocation, AllocationID assignedAllocationID,  int parallelSubtaskIndex,
		long[] stateTimestamps) {
	this.userAccumulators = userAccumulators;
	this.ioMetrics = ioMetrics;
	this.failureCause = failureCause;
	this.assignedResourceLocation = assignedResourceLocation;
	this.attemptNumber = attemptNumber;
	this.attemptId = attemptId;
	this.state = state;
	this.stateTimestamps = stateTimestamps;
	this.parallelSubtaskIndex = parallelSubtaskIndex;
	this.assignedAllocationID = assignedAllocationID;
}
 
Example #11
Source File: ArchivedExecutionJobVertex.java    From flink with Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionJobVertex(
		ArchivedExecutionVertex[] taskVertices,
		JobVertexID id,
		String name,
		int parallelism,
		int maxParallelism,
		ResourceProfile resourceProfile,
		StringifiedAccumulatorResult[] archivedUserAccumulators) {
	this.taskVertices = taskVertices;
	this.id = id;
	this.name = name;
	this.parallelism = parallelism;
	this.maxParallelism = maxParallelism;
	this.resourceProfile = resourceProfile;
	this.archivedUserAccumulators = archivedUserAccumulators;
}
 
Example #12
Source File: ArchivedExecutionGraphBuilder.java    From flink with Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionGraph build() {
	JobID jobID = this.jobID != null ? this.jobID : new JobID();
	String jobName = this.jobName != null ? this.jobName : "job_" + RANDOM.nextInt();

	if (tasks == null) {
		tasks = Collections.emptyMap();
	}

	return new ArchivedExecutionGraph(
		jobID,
		jobName,
		tasks,
		verticesInCreationOrder != null ? verticesInCreationOrder : new ArrayList<>(tasks.values()),
		stateTimestamps != null ? stateTimestamps : new long[JobStatus.values().length],
		state != null ? state : JobStatus.FINISHED,
		failureCause,
		jsonPlan != null ? jsonPlan : "{\"jobid\":\"" + jobID + "\", \"name\":\"" + jobName + "\", \"nodes\":[]}",
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0],
		serializedUserAccumulators != null ? serializedUserAccumulators : Collections.emptyMap(),
		archivedExecutionConfig != null ? archivedExecutionConfig : new ArchivedExecutionConfigBuilder().build(),
		isStoppable,
		null,
		null,
		"stateBackendName"
	);
}
 
Example #13
Source File: ArchivedExecution.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public ArchivedExecution(
		StringifiedAccumulatorResult[] userAccumulators, IOMetrics ioMetrics,
		ExecutionAttemptID attemptId, int attemptNumber, ExecutionState state, String failureCause,
		TaskManagerLocation assignedResourceLocation, AllocationID assignedAllocationID,  int parallelSubtaskIndex,
		long[] stateTimestamps) {
	this.userAccumulators = userAccumulators;
	this.ioMetrics = ioMetrics;
	this.failureCause = failureCause;
	this.assignedResourceLocation = assignedResourceLocation;
	this.attemptNumber = attemptNumber;
	this.attemptId = attemptId;
	this.state = state;
	this.stateTimestamps = stateTimestamps;
	this.parallelSubtaskIndex = parallelSubtaskIndex;
	this.assignedAllocationID = assignedAllocationID;
}
 
Example #14
Source File: DefaultExecutionGraphCacheTest.java    From flink with Apache License 2.0 6 votes vote down vote up
public SuspendableAccessExecutionGraph(JobID jobId) {
	super(
		jobId,
		"DefaultExecutionGraphCacheTest",
		Collections.emptyMap(),
		Collections.emptyList(),
		new long[0],
		JobStatus.RUNNING,
		new ErrorInfo(new FlinkException("Test"), 42L),
		"",
		new StringifiedAccumulatorResult[0],
		Collections.emptyMap(),
		new ArchivedExecutionConfig(new ExecutionConfig()),
		false,
		null,
		null,
		"stateBackendName");

	jobStatus = super.getState();
}
 
Example #15
Source File: Execution.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators =
		userAccumulators == null ?
			null :
			userAccumulators.entrySet()
				.stream()
				.collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue())));
	return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators);
}
 
Example #16
Source File: ArchivedExecutionGraph.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionGraph(
		JobID jobID,
		String jobName,
		Map<JobVertexID, ArchivedExecutionJobVertex> tasks,
		List<ArchivedExecutionJobVertex> verticesInCreationOrder,
		long[] stateTimestamps,
		JobStatus state,
		@Nullable ErrorInfo failureCause,
		String jsonPlan,
		StringifiedAccumulatorResult[] archivedUserAccumulators,
		Map<String, SerializedValue<OptionalFailure<Object>>> serializedUserAccumulators,
		ArchivedExecutionConfig executionConfig,
		boolean isStoppable,
		@Nullable CheckpointCoordinatorConfiguration jobCheckpointingConfiguration,
		@Nullable CheckpointStatsSnapshot checkpointStatsSnapshot) {

	this.jobID = Preconditions.checkNotNull(jobID);
	this.jobName = Preconditions.checkNotNull(jobName);
	this.tasks = Preconditions.checkNotNull(tasks);
	this.verticesInCreationOrder = Preconditions.checkNotNull(verticesInCreationOrder);
	this.stateTimestamps = Preconditions.checkNotNull(stateTimestamps);
	this.state = Preconditions.checkNotNull(state);
	this.failureCause = failureCause;
	this.jsonPlan = Preconditions.checkNotNull(jsonPlan);
	this.archivedUserAccumulators = Preconditions.checkNotNull(archivedUserAccumulators);
	this.serializedUserAccumulators = Preconditions.checkNotNull(serializedUserAccumulators);
	this.archivedExecutionConfig = Preconditions.checkNotNull(executionConfig);
	this.isStoppable = isStoppable;
	this.jobCheckpointingConfiguration = jobCheckpointingConfiguration;
	this.checkpointStatsSnapshot = checkpointStatsSnapshot;
}
 
Example #17
Source File: ExecutionJobVertex.java    From flink with Apache License 2.0 5 votes vote down vote up
public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>();

	for (ExecutionVertex vertex : taskVertices) {
		Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators();
		if (next != null) {
			AccumulatorHelper.mergeInto(userAccumulators, next);
		}
	}

	return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators);
}
 
Example #18
Source File: ArchivedExecutionGraph.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionGraph(
		JobID jobID,
		String jobName,
		Map<JobVertexID, ArchivedExecutionJobVertex> tasks,
		List<ArchivedExecutionJobVertex> verticesInCreationOrder,
		long[] stateTimestamps,
		JobStatus state,
		@Nullable ErrorInfo failureCause,
		String jsonPlan,
		StringifiedAccumulatorResult[] archivedUserAccumulators,
		Map<String, SerializedValue<OptionalFailure<Object>>> serializedUserAccumulators,
		ArchivedExecutionConfig executionConfig,
		boolean isStoppable,
		@Nullable CheckpointCoordinatorConfiguration jobCheckpointingConfiguration,
		@Nullable CheckpointStatsSnapshot checkpointStatsSnapshot) {

	this.jobID = Preconditions.checkNotNull(jobID);
	this.jobName = Preconditions.checkNotNull(jobName);
	this.tasks = Preconditions.checkNotNull(tasks);
	this.verticesInCreationOrder = Preconditions.checkNotNull(verticesInCreationOrder);
	this.stateTimestamps = Preconditions.checkNotNull(stateTimestamps);
	this.state = Preconditions.checkNotNull(state);
	this.failureCause = failureCause;
	this.jsonPlan = Preconditions.checkNotNull(jsonPlan);
	this.archivedUserAccumulators = Preconditions.checkNotNull(archivedUserAccumulators);
	this.serializedUserAccumulators = Preconditions.checkNotNull(serializedUserAccumulators);
	this.archivedExecutionConfig = Preconditions.checkNotNull(executionConfig);
	this.isStoppable = isStoppable;
	this.jobCheckpointingConfiguration = jobCheckpointingConfiguration;
	this.checkpointStatsSnapshot = checkpointStatsSnapshot;
}
 
Example #19
Source File: SubtasksAllAccumulatorsHandler.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected SubtasksAllAccumulatorsInfo handleRequest(HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException {
	JobVertexID jobVertexId = jobVertex.getJobVertexId();
	int parallelism = jobVertex.getParallelism();

	final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>();

	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		StringifiedAccumulatorResult[] accs = vertex.getCurrentExecutionAttempt().getUserAccumulatorsStringified();
		List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length);
		for (StringifiedAccumulatorResult acc : accs) {
			userAccumulators.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
		}

		subtaskAccumulatorsInfos.add(
			new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo(
				vertex.getCurrentExecutionAttempt().getParallelSubtaskIndex(),
				vertex.getCurrentExecutionAttempt().getAttemptNumber(),
				locationString,
				userAccumulators
			));
	}

	return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos);
}
 
Example #20
Source File: ExecutionJobVertex.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>();

	for (ExecutionVertex vertex : taskVertices) {
		Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators();
		if (next != null) {
			AccumulatorHelper.mergeInto(userAccumulators, next);
		}
	}

	return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators);
}
 
Example #21
Source File: ArchivedExecutionJobVertexBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionJobVertex build() {
	Preconditions.checkNotNull(taskVertices);
	return new ArchivedExecutionJobVertex(
		taskVertices,
		id != null ? id : new JobVertexID(),
		name != null ? name : "task_" + RANDOM.nextInt(),
		parallelism,
		maxParallelism,
		ResourceProfile.UNKNOWN,
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0]
	);
}
 
Example #22
Source File: ArchivedExecutionBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecution build() throws UnknownHostException {
	return new ArchivedExecution(
		userAccumulators != null ? userAccumulators : new StringifiedAccumulatorResult[0],
		ioMetrics != null ? ioMetrics : new TestIOMetrics(),
		attemptId != null ? attemptId : new ExecutionAttemptID(),
		attemptNumber,
		state != null ? state : ExecutionState.FINISHED,
		failureCause != null ? failureCause : "(null)",
		assignedResourceLocation != null ? assignedResourceLocation : new TaskManagerLocation(new ResourceID("tm"), InetAddress.getLocalHost(), 1234),
		assignedAllocationID != null ? assignedAllocationID : new AllocationID(0L, 0L),
		parallelSubtaskIndex,
		stateTimestamps != null ? stateTimestamps : new long[]{1, 2, 3, 4, 5, 5, 5, 5}
	);
}
 
Example #23
Source File: ArchivedExecutionGraphTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void compareStringifiedAccumulators(StringifiedAccumulatorResult[] runtimeAccs, StringifiedAccumulatorResult[] archivedAccs) {
	assertEquals(runtimeAccs.length, archivedAccs.length);

	for (int x = 0; x < runtimeAccs.length; x++) {
		StringifiedAccumulatorResult runtimeResult = runtimeAccs[x];
		StringifiedAccumulatorResult archivedResult = archivedAccs[x];

		assertEquals(runtimeResult.getName(), archivedResult.getName());
		assertEquals(runtimeResult.getType(), archivedResult.getType());
		assertEquals(runtimeResult.getValue(), archivedResult.getValue());
	}
}
 
Example #24
Source File: SubtasksAllAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected SubtasksAllAccumulatorsInfo handleRequest(HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException {
	JobVertexID jobVertexId = jobVertex.getJobVertexId();
	int parallelism = jobVertex.getParallelism();

	final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>();

	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		StringifiedAccumulatorResult[] accs = vertex.getCurrentExecutionAttempt().getUserAccumulatorsStringified();
		List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length);
		for (StringifiedAccumulatorResult acc : accs) {
			userAccumulators.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
		}

		subtaskAccumulatorsInfos.add(
			new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo(
				vertex.getCurrentExecutionAttempt().getParallelSubtaskIndex(),
				vertex.getCurrentExecutionAttempt().getAttemptNumber(),
				locationString,
				userAccumulators
			));
	}

	return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos);
}
 
Example #25
Source File: SubtaskExecutionAttemptAccumulatorsHandler.java    From flink with Apache License 2.0 5 votes vote down vote up
private static SubtaskExecutionAttemptAccumulatorsInfo createAccumulatorInfo(AccessExecution execution) {
	final StringifiedAccumulatorResult[] accs = execution.getUserAccumulatorsStringified();
	final ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
	}

	return new SubtaskExecutionAttemptAccumulatorsInfo(
		execution.getParallelSubtaskIndex(),
		execution.getAttemptNumber(),
		execution.getAttemptId().toString(),
		userAccumulatorList);
}
 
Example #26
Source File: Execution.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators =
		userAccumulators == null ?
			null :
			userAccumulators.entrySet()
				.stream()
				.collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue())));
	return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators);
}
 
Example #27
Source File: ArchivedExecutionGraph.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionGraph(
		JobID jobID,
		String jobName,
		Map<JobVertexID, ArchivedExecutionJobVertex> tasks,
		List<ArchivedExecutionJobVertex> verticesInCreationOrder,
		long[] stateTimestamps,
		JobStatus state,
		@Nullable ErrorInfo failureCause,
		String jsonPlan,
		StringifiedAccumulatorResult[] archivedUserAccumulators,
		Map<String, SerializedValue<OptionalFailure<Object>>> serializedUserAccumulators,
		ArchivedExecutionConfig executionConfig,
		boolean isStoppable,
		@Nullable CheckpointCoordinatorConfiguration jobCheckpointingConfiguration,
		@Nullable CheckpointStatsSnapshot checkpointStatsSnapshot,
		@Nullable String stateBackendName) {

	this.jobID = Preconditions.checkNotNull(jobID);
	this.jobName = Preconditions.checkNotNull(jobName);
	this.tasks = Preconditions.checkNotNull(tasks);
	this.verticesInCreationOrder = Preconditions.checkNotNull(verticesInCreationOrder);
	this.stateTimestamps = Preconditions.checkNotNull(stateTimestamps);
	this.state = Preconditions.checkNotNull(state);
	this.failureCause = failureCause;
	this.jsonPlan = Preconditions.checkNotNull(jsonPlan);
	this.archivedUserAccumulators = Preconditions.checkNotNull(archivedUserAccumulators);
	this.serializedUserAccumulators = Preconditions.checkNotNull(serializedUserAccumulators);
	this.archivedExecutionConfig = Preconditions.checkNotNull(executionConfig);
	this.isStoppable = isStoppable;
	this.jobCheckpointingConfiguration = jobCheckpointingConfiguration;
	this.checkpointStatsSnapshot = checkpointStatsSnapshot;
	this.stateBackendName = stateBackendName;
}
 
Example #28
Source File: ExecutionJobVertex.java    From flink with Apache License 2.0 5 votes vote down vote up
public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>();

	for (ExecutionVertex vertex : taskVertices) {
		Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators();
		if (next != null) {
			AccumulatorHelper.mergeInto(userAccumulators, next);
		}
	}

	return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators);
}
 
Example #29
Source File: JobExceptionsHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static ArchivedExecutionJobVertex createArchivedExecutionJobVertex(JobVertexID jobVertexID) {
	final StringifiedAccumulatorResult[] emptyAccumulators = new StringifiedAccumulatorResult[0];
	final long[] timestamps = new long[ExecutionState.values().length];
	final ExecutionState expectedState = ExecutionState.RUNNING;

	final LocalTaskManagerLocation assignedResourceLocation = new LocalTaskManagerLocation();
	final AllocationID allocationID = new AllocationID();

	final int subtaskIndex = 1;
	final int attempt = 2;
	return new ArchivedExecutionJobVertex(
		new ArchivedExecutionVertex[]{
			new ArchivedExecutionVertex(
				subtaskIndex,
				"test task",
				new ArchivedExecution(
					new StringifiedAccumulatorResult[0],
					null,
					new ExecutionAttemptID(),
					attempt,
					expectedState,
					"error",
					assignedResourceLocation,
					allocationID,
					subtaskIndex,
					timestamps),
				new EvictingBoundedList<>(0)
			)
		},
		jobVertexID,
		jobVertexID.toString(),
		1,
		1,
		ResourceProfile.UNKNOWN,
		emptyAccumulators);
}
 
Example #30
Source File: ArchivedExecutionGraphTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void compareStringifiedAccumulators(StringifiedAccumulatorResult[] runtimeAccs, StringifiedAccumulatorResult[] archivedAccs) {
	assertEquals(runtimeAccs.length, archivedAccs.length);

	for (int x = 0; x < runtimeAccs.length; x++) {
		StringifiedAccumulatorResult runtimeResult = runtimeAccs[x];
		StringifiedAccumulatorResult archivedResult = archivedAccs[x];

		assertEquals(runtimeResult.getName(), archivedResult.getName());
		assertEquals(runtimeResult.getType(), archivedResult.getType());
		assertEquals(runtimeResult.getValue(), archivedResult.getValue());
	}
}