org.apache.flink.runtime.accumulators.StringifiedAccumulatorResult Java Examples

The following examples show how to use org.apache.flink.runtime.accumulators.StringifiedAccumulatorResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: Flink-CEPplus   Author: ljygz   File: JobVertexAccumulatorsHandler.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected JobVertexAccumulatorsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
	ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(
			new UserAccumulator(
				acc.getName(),
				acc.getType(),
				acc.getValue()));
	}

	return new JobVertexAccumulatorsInfo(jobVertex.getJobVertexId().toString(), userAccumulatorList);
}
 
Example #2
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecution.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecution(
		StringifiedAccumulatorResult[] userAccumulators, IOMetrics ioMetrics,
		ExecutionAttemptID attemptId, int attemptNumber, ExecutionState state, String failureCause,
		TaskManagerLocation assignedResourceLocation, AllocationID assignedAllocationID,  int parallelSubtaskIndex,
		long[] stateTimestamps) {
	this.userAccumulators = userAccumulators;
	this.ioMetrics = ioMetrics;
	this.failureCause = failureCause;
	this.assignedResourceLocation = assignedResourceLocation;
	this.attemptNumber = attemptNumber;
	this.attemptId = attemptId;
	this.state = state;
	this.stateTimestamps = stateTimestamps;
	this.parallelSubtaskIndex = parallelSubtaskIndex;
	this.assignedAllocationID = assignedAllocationID;
}
 
Example #3
Source Project: Flink-CEPplus   Author: ljygz   File: ExecutionGraphCacheTest.java    License: Apache License 2.0 6 votes vote down vote up
public SuspendableAccessExecutionGraph(JobID jobId) {
	super(
		jobId,
		"ExecutionGraphCacheTest",
		Collections.emptyMap(),
		Collections.emptyList(),
		new long[0],
		JobStatus.RUNNING,
		new ErrorInfo(new FlinkException("Test"), 42L),
		"",
		new StringifiedAccumulatorResult[0],
		Collections.emptyMap(),
		new ArchivedExecutionConfig(new ExecutionConfig()),
		false,
		null,
		null);

	jobStatus = super.getState();
}
 
Example #4
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecutionGraphBuilder.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionGraph build() {
	JobID jobID = this.jobID != null ? this.jobID : new JobID();
	String jobName = this.jobName != null ? this.jobName : "job_" + RANDOM.nextInt();

	if (tasks == null) {
		tasks = Collections.emptyMap();
	}

	return new ArchivedExecutionGraph(
		jobID,
		jobName,
		tasks,
		verticesInCreationOrder != null ? verticesInCreationOrder : new ArrayList<>(tasks.values()),
		stateTimestamps != null ? stateTimestamps : new long[JobStatus.values().length],
		state != null ? state : JobStatus.FINISHED,
		failureCause,
		jsonPlan != null ? jsonPlan : "{\"jobid\":\"" + jobID + "\", \"name\":\"" + jobName + "\", \"nodes\":[]}",
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0],
		serializedUserAccumulators != null ? serializedUserAccumulators : Collections.emptyMap(),
		archivedExecutionConfig != null ? archivedExecutionConfig : new ArchivedExecutionConfigBuilder().build(),
		isStoppable,
		null,
		null
	);
}
 
Example #5
Source Project: flink   Author: flink-tpc-ds   File: JobVertexAccumulatorsHandler.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected JobVertexAccumulatorsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
	ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(
			new UserAccumulator(
				acc.getName(),
				acc.getType(),
				acc.getValue()));
	}

	return new JobVertexAccumulatorsInfo(jobVertex.getJobVertexId().toString(), userAccumulatorList);
}
 
Example #6
Source Project: flink   Author: flink-tpc-ds   File: ArchivedExecution.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecution(
		StringifiedAccumulatorResult[] userAccumulators, IOMetrics ioMetrics,
		ExecutionAttemptID attemptId, int attemptNumber, ExecutionState state, String failureCause,
		TaskManagerLocation assignedResourceLocation, AllocationID assignedAllocationID,  int parallelSubtaskIndex,
		long[] stateTimestamps) {
	this.userAccumulators = userAccumulators;
	this.ioMetrics = ioMetrics;
	this.failureCause = failureCause;
	this.assignedResourceLocation = assignedResourceLocation;
	this.attemptNumber = attemptNumber;
	this.attemptId = attemptId;
	this.state = state;
	this.stateTimestamps = stateTimestamps;
	this.parallelSubtaskIndex = parallelSubtaskIndex;
	this.assignedAllocationID = assignedAllocationID;
}
 
Example #7
Source Project: flink   Author: flink-tpc-ds   File: ArchivedExecutionJobVertex.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionJobVertex(
		ArchivedExecutionVertex[] taskVertices,
		JobVertexID id,
		String name,
		int parallelism,
		int maxParallelism,
		ResourceProfile resourceProfile,
		StringifiedAccumulatorResult[] archivedUserAccumulators) {
	this.taskVertices = taskVertices;
	this.id = id;
	this.name = name;
	this.parallelism = parallelism;
	this.maxParallelism = maxParallelism;
	this.resourceProfile = resourceProfile;
	this.archivedUserAccumulators = archivedUserAccumulators;
}
 
Example #8
Source Project: flink   Author: flink-tpc-ds   File: ExecutionGraphCacheTest.java    License: Apache License 2.0 6 votes vote down vote up
public SuspendableAccessExecutionGraph(JobID jobId) {
	super(
		jobId,
		"ExecutionGraphCacheTest",
		Collections.emptyMap(),
		Collections.emptyList(),
		new long[0],
		JobStatus.RUNNING,
		new ErrorInfo(new FlinkException("Test"), 42L),
		"",
		new StringifiedAccumulatorResult[0],
		Collections.emptyMap(),
		new ArchivedExecutionConfig(new ExecutionConfig()),
		false,
		null,
		null);

	jobStatus = super.getState();
}
 
Example #9
Source Project: flink   Author: flink-tpc-ds   File: ArchivedExecutionGraphBuilder.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionGraph build() {
	JobID jobID = this.jobID != null ? this.jobID : new JobID();
	String jobName = this.jobName != null ? this.jobName : "job_" + RANDOM.nextInt();

	if (tasks == null) {
		tasks = Collections.emptyMap();
	}

	return new ArchivedExecutionGraph(
		jobID,
		jobName,
		tasks,
		verticesInCreationOrder != null ? verticesInCreationOrder : new ArrayList<>(tasks.values()),
		stateTimestamps != null ? stateTimestamps : new long[JobStatus.values().length],
		state != null ? state : JobStatus.FINISHED,
		failureCause,
		jsonPlan != null ? jsonPlan : "{\"jobid\":\"" + jobID + "\", \"name\":\"" + jobName + "\", \"nodes\":[]}",
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0],
		serializedUserAccumulators != null ? serializedUserAccumulators : Collections.emptyMap(),
		archivedExecutionConfig != null ? archivedExecutionConfig : new ArchivedExecutionConfigBuilder().build(),
		isStoppable,
		null,
		null
	);
}
 
Example #10
Source Project: flink   Author: apache   File: JobVertexAccumulatorsHandler.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected JobVertexAccumulatorsInfo handleRequest(
		HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request,
		AccessExecutionJobVertex jobVertex) throws RestHandlerException {

	StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
	ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(
			new UserAccumulator(
				acc.getName(),
				acc.getType(),
				acc.getValue()));
	}

	return new JobVertexAccumulatorsInfo(jobVertex.getJobVertexId().toString(), userAccumulatorList);
}
 
Example #11
Source Project: flink   Author: apache   File: ArchivedExecution.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecution(
		StringifiedAccumulatorResult[] userAccumulators, IOMetrics ioMetrics,
		ExecutionAttemptID attemptId, int attemptNumber, ExecutionState state, String failureCause,
		TaskManagerLocation assignedResourceLocation, AllocationID assignedAllocationID,  int parallelSubtaskIndex,
		long[] stateTimestamps) {
	this.userAccumulators = userAccumulators;
	this.ioMetrics = ioMetrics;
	this.failureCause = failureCause;
	this.assignedResourceLocation = assignedResourceLocation;
	this.attemptNumber = attemptNumber;
	this.attemptId = attemptId;
	this.state = state;
	this.stateTimestamps = stateTimestamps;
	this.parallelSubtaskIndex = parallelSubtaskIndex;
	this.assignedAllocationID = assignedAllocationID;
}
 
Example #12
Source Project: flink   Author: apache   File: ArchivedExecutionJobVertex.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionJobVertex(
		ArchivedExecutionVertex[] taskVertices,
		JobVertexID id,
		String name,
		int parallelism,
		int maxParallelism,
		ResourceProfile resourceProfile,
		StringifiedAccumulatorResult[] archivedUserAccumulators) {
	this.taskVertices = taskVertices;
	this.id = id;
	this.name = name;
	this.parallelism = parallelism;
	this.maxParallelism = maxParallelism;
	this.resourceProfile = resourceProfile;
	this.archivedUserAccumulators = archivedUserAccumulators;
}
 
Example #13
Source Project: flink   Author: apache   File: ArchivedExecutionGraphBuilder.java    License: Apache License 2.0 6 votes vote down vote up
public ArchivedExecutionGraph build() {
	JobID jobID = this.jobID != null ? this.jobID : new JobID();
	String jobName = this.jobName != null ? this.jobName : "job_" + RANDOM.nextInt();

	if (tasks == null) {
		tasks = Collections.emptyMap();
	}

	return new ArchivedExecutionGraph(
		jobID,
		jobName,
		tasks,
		verticesInCreationOrder != null ? verticesInCreationOrder : new ArrayList<>(tasks.values()),
		stateTimestamps != null ? stateTimestamps : new long[JobStatus.values().length],
		state != null ? state : JobStatus.FINISHED,
		failureCause,
		jsonPlan != null ? jsonPlan : "{\"jobid\":\"" + jobID + "\", \"name\":\"" + jobName + "\", \"nodes\":[]}",
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0],
		serializedUserAccumulators != null ? serializedUserAccumulators : Collections.emptyMap(),
		archivedExecutionConfig != null ? archivedExecutionConfig : new ArchivedExecutionConfigBuilder().build(),
		isStoppable,
		null,
		null,
		"stateBackendName"
	);
}
 
Example #14
Source Project: flink   Author: apache   File: DefaultExecutionGraphCacheTest.java    License: Apache License 2.0 6 votes vote down vote up
public SuspendableAccessExecutionGraph(JobID jobId) {
	super(
		jobId,
		"DefaultExecutionGraphCacheTest",
		Collections.emptyMap(),
		Collections.emptyList(),
		new long[0],
		JobStatus.RUNNING,
		new ErrorInfo(new FlinkException("Test"), 42L),
		"",
		new StringifiedAccumulatorResult[0],
		Collections.emptyMap(),
		new ArchivedExecutionConfig(new ExecutionConfig()),
		false,
		null,
		null,
		"stateBackendName");

	jobStatus = super.getState();
}
 
Example #15
Source Project: Flink-CEPplus   Author: ljygz   File: SubtasksAllAccumulatorsHandler.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected SubtasksAllAccumulatorsInfo handleRequest(HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException {
	JobVertexID jobVertexId = jobVertex.getJobVertexId();
	int parallelism = jobVertex.getParallelism();

	final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>();

	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		StringifiedAccumulatorResult[] accs = vertex.getCurrentExecutionAttempt().getUserAccumulatorsStringified();
		List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length);
		for (StringifiedAccumulatorResult acc : accs) {
			userAccumulators.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
		}

		subtaskAccumulatorsInfos.add(
			new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo(
				vertex.getCurrentExecutionAttempt().getParallelSubtaskIndex(),
				vertex.getCurrentExecutionAttempt().getAttemptNumber(),
				locationString,
				userAccumulators
			));
	}

	return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos);
}
 
Example #16
Source Project: Flink-CEPplus   Author: ljygz   File: SubtaskExecutionAttemptAccumulatorsHandler.java    License: Apache License 2.0 5 votes vote down vote up
private static SubtaskExecutionAttemptAccumulatorsInfo createAccumulatorInfo(AccessExecution execution) {
	final StringifiedAccumulatorResult[] accs = execution.getUserAccumulatorsStringified();
	final ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
	}

	return new SubtaskExecutionAttemptAccumulatorsInfo(
		execution.getParallelSubtaskIndex(),
		execution.getAttemptNumber(),
		execution.getAttemptId().toString(),
		userAccumulatorList);
}
 
Example #17
Source Project: Flink-CEPplus   Author: ljygz   File: Execution.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators =
		userAccumulators == null ?
			null :
			userAccumulators.entrySet()
				.stream()
				.collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue())));
	return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators);
}
 
Example #18
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecutionGraph.java    License: Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionGraph(
		JobID jobID,
		String jobName,
		Map<JobVertexID, ArchivedExecutionJobVertex> tasks,
		List<ArchivedExecutionJobVertex> verticesInCreationOrder,
		long[] stateTimestamps,
		JobStatus state,
		@Nullable ErrorInfo failureCause,
		String jsonPlan,
		StringifiedAccumulatorResult[] archivedUserAccumulators,
		Map<String, SerializedValue<OptionalFailure<Object>>> serializedUserAccumulators,
		ArchivedExecutionConfig executionConfig,
		boolean isStoppable,
		@Nullable CheckpointCoordinatorConfiguration jobCheckpointingConfiguration,
		@Nullable CheckpointStatsSnapshot checkpointStatsSnapshot) {

	this.jobID = Preconditions.checkNotNull(jobID);
	this.jobName = Preconditions.checkNotNull(jobName);
	this.tasks = Preconditions.checkNotNull(tasks);
	this.verticesInCreationOrder = Preconditions.checkNotNull(verticesInCreationOrder);
	this.stateTimestamps = Preconditions.checkNotNull(stateTimestamps);
	this.state = Preconditions.checkNotNull(state);
	this.failureCause = failureCause;
	this.jsonPlan = Preconditions.checkNotNull(jsonPlan);
	this.archivedUserAccumulators = Preconditions.checkNotNull(archivedUserAccumulators);
	this.serializedUserAccumulators = Preconditions.checkNotNull(serializedUserAccumulators);
	this.archivedExecutionConfig = Preconditions.checkNotNull(executionConfig);
	this.isStoppable = isStoppable;
	this.jobCheckpointingConfiguration = jobCheckpointingConfiguration;
	this.checkpointStatsSnapshot = checkpointStatsSnapshot;
}
 
Example #19
Source Project: Flink-CEPplus   Author: ljygz   File: ExecutionJobVertex.java    License: Apache License 2.0 5 votes vote down vote up
public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>();

	for (ExecutionVertex vertex : taskVertices) {
		Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators();
		if (next != null) {
			AccumulatorHelper.mergeInto(userAccumulators, next);
		}
	}

	return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators);
}
 
Example #20
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecutionJobVertex.java    License: Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionJobVertex(
		ArchivedExecutionVertex[] taskVertices,
		JobVertexID id,
		String name,
		int parallelism,
		int maxParallelism,
		StringifiedAccumulatorResult[] archivedUserAccumulators) {
	this.taskVertices = taskVertices;
	this.id = id;
	this.name = name;
	this.parallelism = parallelism;
	this.maxParallelism = maxParallelism;
	this.archivedUserAccumulators = archivedUserAccumulators;
}
 
Example #21
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedJobGenerationUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static void generateArchivedJob() throws Exception {
	// Attempt
	StringifiedAccumulatorResult acc1 = new StringifiedAccumulatorResult("name1", "type1", "value1");
	StringifiedAccumulatorResult acc2 = new StringifiedAccumulatorResult("name2", "type2", "value2");
	TaskManagerLocation location = new TaskManagerLocation(new ResourceID("hello"), InetAddress.getLocalHost(), 1234);
	AllocationID allocationID = new AllocationID(42L, 43L);
	originalAttempt = new ArchivedExecutionBuilder()
		.setStateTimestamps(new long[]{1, 2, 3, 4, 5, 6, 7, 8, 9})
		.setParallelSubtaskIndex(1)
		.setAttemptNumber(0)
		.setAssignedResourceLocation(location)
		.setAssignedAllocationID(allocationID)
		.setUserAccumulators(new StringifiedAccumulatorResult[]{acc1, acc2})
		.setState(ExecutionState.FINISHED)
		.setFailureCause("attemptException")
		.build();
	// Subtask
	originalSubtask = new ArchivedExecutionVertexBuilder()
		.setSubtaskIndex(originalAttempt.getParallelSubtaskIndex())
		.setTaskNameWithSubtask("hello(1/1)")
		.setCurrentExecution(originalAttempt)
		.build();
	// Task
	originalTask = new ArchivedExecutionJobVertexBuilder()
		.setTaskVertices(new ArchivedExecutionVertex[]{originalSubtask})
		.build();
	// Job
	Map<JobVertexID, ArchivedExecutionJobVertex> tasks = new HashMap<>();
	tasks.put(originalTask.getJobVertexId(), originalTask);
	originalJob = new ArchivedExecutionGraphBuilder()
		.setJobID(new JobID())
		.setTasks(tasks)
		.setFailureCause(new ErrorInfo(new Exception("jobException"), originalAttempt.getStateTimestamp(ExecutionState.FAILED)))
		.setState(JobStatus.FINISHED)
		.setStateTimestamps(new long[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11})
		.setArchivedUserAccumulators(new StringifiedAccumulatorResult[]{acc1, acc2})
		.build();
}
 
Example #22
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedJobGenerationUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static void compareStringifiedAccumulators(StringifiedAccumulatorResult[] expectedAccs, ArrayNode writtenAccs) {
	assertEquals(expectedAccs.length, writtenAccs.size());
	for (int x = 0; x < expectedAccs.length; x++) {
		JsonNode acc = writtenAccs.get(x);

		assertEquals(expectedAccs[x].getName(), acc.get("name").asText());
		assertEquals(expectedAccs[x].getType(), acc.get("type").asText());
		assertEquals(expectedAccs[x].getValue(), acc.get("value").asText());
	}
}
 
Example #23
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecutionJobVertexBuilder.java    License: Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionJobVertex build() {
	Preconditions.checkNotNull(taskVertices);
	return new ArchivedExecutionJobVertex(
		taskVertices,
		id != null ? id : new JobVertexID(),
		name != null ? name : "task_" + RANDOM.nextInt(),
		parallelism,
		maxParallelism,
		archivedUserAccumulators != null ? archivedUserAccumulators : new StringifiedAccumulatorResult[0]
	);
}
 
Example #24
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecutionBuilder.java    License: Apache License 2.0 5 votes vote down vote up
public ArchivedExecution build() throws UnknownHostException {
	return new ArchivedExecution(
		userAccumulators != null ? userAccumulators : new StringifiedAccumulatorResult[0],
		ioMetrics != null ? ioMetrics : new TestIOMetrics(),
		attemptId != null ? attemptId : new ExecutionAttemptID(),
		attemptNumber,
		state != null ? state : ExecutionState.FINISHED,
		failureCause != null ? failureCause : "(null)",
		assignedResourceLocation != null ? assignedResourceLocation : new TaskManagerLocation(new ResourceID("tm"), InetAddress.getLocalHost(), 1234),
		assignedAllocationID != null ? assignedAllocationID : new AllocationID(0L, 0L),
		parallelSubtaskIndex,
		stateTimestamps != null ? stateTimestamps : new long[]{1, 2, 3, 4, 5, 5, 5, 5}
	);
}
 
Example #25
Source Project: Flink-CEPplus   Author: ljygz   File: ArchivedExecutionGraphTest.java    License: Apache License 2.0 5 votes vote down vote up
private static void compareStringifiedAccumulators(StringifiedAccumulatorResult[] runtimeAccs, StringifiedAccumulatorResult[] archivedAccs) {
	assertEquals(runtimeAccs.length, archivedAccs.length);

	for (int x = 0; x < runtimeAccs.length; x++) {
		StringifiedAccumulatorResult runtimeResult = runtimeAccs[x];
		StringifiedAccumulatorResult archivedResult = archivedAccs[x];

		assertEquals(runtimeResult.getName(), archivedResult.getName());
		assertEquals(runtimeResult.getType(), archivedResult.getType());
		assertEquals(runtimeResult.getValue(), archivedResult.getValue());
	}
}
 
Example #26
Source Project: flink   Author: flink-tpc-ds   File: SubtasksAllAccumulatorsHandler.java    License: Apache License 2.0 5 votes vote down vote up
@Override
protected SubtasksAllAccumulatorsInfo handleRequest(HandlerRequest<EmptyRequestBody, JobVertexMessageParameters> request, AccessExecutionJobVertex jobVertex) throws RestHandlerException {
	JobVertexID jobVertexId = jobVertex.getJobVertexId();
	int parallelism = jobVertex.getParallelism();

	final List<SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo> subtaskAccumulatorsInfos = new ArrayList<>();

	for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
		TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
		String locationString = location == null ? "(unassigned)" : location.getHostname();

		StringifiedAccumulatorResult[] accs = vertex.getCurrentExecutionAttempt().getUserAccumulatorsStringified();
		List<UserAccumulator> userAccumulators = new ArrayList<>(accs.length);
		for (StringifiedAccumulatorResult acc : accs) {
			userAccumulators.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
		}

		subtaskAccumulatorsInfos.add(
			new SubtasksAllAccumulatorsInfo.SubtaskAccumulatorsInfo(
				vertex.getCurrentExecutionAttempt().getParallelSubtaskIndex(),
				vertex.getCurrentExecutionAttempt().getAttemptNumber(),
				locationString,
				userAccumulators
			));
	}

	return new SubtasksAllAccumulatorsInfo(jobVertexId, parallelism, subtaskAccumulatorsInfos);
}
 
Example #27
Source Project: flink   Author: flink-tpc-ds   File: SubtaskExecutionAttemptAccumulatorsHandler.java    License: Apache License 2.0 5 votes vote down vote up
private static SubtaskExecutionAttemptAccumulatorsInfo createAccumulatorInfo(AccessExecution execution) {
	final StringifiedAccumulatorResult[] accs = execution.getUserAccumulatorsStringified();
	final ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(accs.length);

	for (StringifiedAccumulatorResult acc : accs) {
		userAccumulatorList.add(new UserAccumulator(acc.getName(), acc.getType(), acc.getValue()));
	}

	return new SubtaskExecutionAttemptAccumulatorsInfo(
		execution.getParallelSubtaskIndex(),
		execution.getAttemptNumber(),
		execution.getAttemptId().toString(),
		userAccumulatorList);
}
 
Example #28
Source Project: flink   Author: flink-tpc-ds   File: Execution.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators =
		userAccumulators == null ?
			null :
			userAccumulators.entrySet()
				.stream()
				.collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue())));
	return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators);
}
 
Example #29
Source Project: flink   Author: flink-tpc-ds   File: ArchivedExecutionGraph.java    License: Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionGraph(
		JobID jobID,
		String jobName,
		Map<JobVertexID, ArchivedExecutionJobVertex> tasks,
		List<ArchivedExecutionJobVertex> verticesInCreationOrder,
		long[] stateTimestamps,
		JobStatus state,
		@Nullable ErrorInfo failureCause,
		String jsonPlan,
		StringifiedAccumulatorResult[] archivedUserAccumulators,
		Map<String, SerializedValue<OptionalFailure<Object>>> serializedUserAccumulators,
		ArchivedExecutionConfig executionConfig,
		boolean isStoppable,
		@Nullable CheckpointCoordinatorConfiguration jobCheckpointingConfiguration,
		@Nullable CheckpointStatsSnapshot checkpointStatsSnapshot) {

	this.jobID = Preconditions.checkNotNull(jobID);
	this.jobName = Preconditions.checkNotNull(jobName);
	this.tasks = Preconditions.checkNotNull(tasks);
	this.verticesInCreationOrder = Preconditions.checkNotNull(verticesInCreationOrder);
	this.stateTimestamps = Preconditions.checkNotNull(stateTimestamps);
	this.state = Preconditions.checkNotNull(state);
	this.failureCause = failureCause;
	this.jsonPlan = Preconditions.checkNotNull(jsonPlan);
	this.archivedUserAccumulators = Preconditions.checkNotNull(archivedUserAccumulators);
	this.serializedUserAccumulators = Preconditions.checkNotNull(serializedUserAccumulators);
	this.archivedExecutionConfig = Preconditions.checkNotNull(executionConfig);
	this.isStoppable = isStoppable;
	this.jobCheckpointingConfiguration = jobCheckpointingConfiguration;
	this.checkpointStatsSnapshot = checkpointStatsSnapshot;
}
 
Example #30
Source Project: flink   Author: flink-tpc-ds   File: ExecutionJobVertex.java    License: Apache License 2.0 5 votes vote down vote up
public StringifiedAccumulatorResult[] getAggregatedUserAccumulatorsStringified() {
	Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>();

	for (ExecutionVertex vertex : taskVertices) {
		Map<String, Accumulator<?, ?>> next = vertex.getCurrentExecutionAttempt().getUserAccumulators();
		if (next != null) {
			AccumulatorHelper.mergeInto(userAccumulators, next);
		}
	}

	return StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators);
}