org.apache.flink.runtime.util.EvictingBoundedList Java Examples

The following examples show how to use org.apache.flink.runtime.util.EvictingBoundedList. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ArchivedExecutionVertex.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertex(
		int subTaskIndex, String taskNameWithSubtask,
		ArchivedExecution currentExecution, EvictingBoundedList<ArchivedExecution> priorExecutions) {
	this.subTaskIndex = subTaskIndex;
	this.taskNameWithSubtask = taskNameWithSubtask;
	this.currentExecution = currentExecution;
	this.priorExecutions = priorExecutions;
}
 
Example #2
Source File: ArchivedExecutionVertexBuilder.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertexBuilder setPriorExecutions(List<ArchivedExecution> priorExecutions) {
	this.priorExecutions = new EvictingBoundedList<>(priorExecutions.size());
	for (ArchivedExecution execution : priorExecutions) {
		this.priorExecutions.add(execution);
	}
	return this;
}
 
Example #3
Source File: ArchivedExecutionVertexBuilder.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertex build() {
	Preconditions.checkNotNull(currentExecution);
	return new ArchivedExecutionVertex(
		subtaskIndex,
		taskNameWithSubtask != null ? taskNameWithSubtask : "task_" + RANDOM.nextInt() + "_" + subtaskIndex,
		currentExecution,
		priorExecutions != null ? priorExecutions : new EvictingBoundedList<ArchivedExecution>(0)
	);
}
 
Example #4
Source File: ArchivedExecutionVertex.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertex(
		int subTaskIndex, String taskNameWithSubtask,
		ArchivedExecution currentExecution, EvictingBoundedList<ArchivedExecution> priorExecutions) {
	this.subTaskIndex = subTaskIndex;
	this.taskNameWithSubtask = taskNameWithSubtask;
	this.currentExecution = currentExecution;
	this.priorExecutions = priorExecutions;
}
 
Example #5
Source File: ArchivedExecutionVertexBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertexBuilder setPriorExecutions(List<ArchivedExecution> priorExecutions) {
	this.priorExecutions = new EvictingBoundedList<>(priorExecutions.size());
	for (ArchivedExecution execution : priorExecutions) {
		this.priorExecutions.add(execution);
	}
	return this;
}
 
Example #6
Source File: ArchivedExecutionVertexBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertex build() {
	Preconditions.checkNotNull(currentExecution);
	return new ArchivedExecutionVertex(
		subtaskIndex,
		taskNameWithSubtask != null ? taskNameWithSubtask : "task_" + RANDOM.nextInt() + "_" + subtaskIndex,
		currentExecution,
		priorExecutions != null ? priorExecutions : new EvictingBoundedList<ArchivedExecution>(0)
	);
}
 
Example #7
Source File: ArchivedExecutionVertex.java    From flink with Apache License 2.0 5 votes vote down vote up
public ArchivedExecutionVertex(
		int subTaskIndex, String taskNameWithSubtask,
		ArchivedExecution currentExecution, EvictingBoundedList<ArchivedExecution> priorExecutions) {
	this.subTaskIndex = subTaskIndex;
	this.taskNameWithSubtask = taskNameWithSubtask;
	this.currentExecution = currentExecution;
	this.priorExecutions = priorExecutions;
}
 
Example #8
Source File: JobExceptionsHandlerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static ArchivedExecutionJobVertex createArchivedExecutionJobVertex(JobVertexID jobVertexID) {
	final StringifiedAccumulatorResult[] emptyAccumulators = new StringifiedAccumulatorResult[0];
	final long[] timestamps = new long[ExecutionState.values().length];
	final ExecutionState expectedState = ExecutionState.RUNNING;

	final LocalTaskManagerLocation assignedResourceLocation = new LocalTaskManagerLocation();
	final AllocationID allocationID = new AllocationID();

	final int subtaskIndex = 1;
	final int attempt = 2;
	return new ArchivedExecutionJobVertex(
		new ArchivedExecutionVertex[]{
			new ArchivedExecutionVertex(
				subtaskIndex,
				"test task",
				new ArchivedExecution(
					new StringifiedAccumulatorResult[0],
					null,
					new ExecutionAttemptID(),
					attempt,
					expectedState,
					"error",
					assignedResourceLocation,
					allocationID,
					subtaskIndex,
					timestamps),
				new EvictingBoundedList<>(0)
			)
		},
		jobVertexID,
		jobVertexID.toString(),
		1,
		1,
		ResourceProfile.UNKNOWN,
		emptyAccumulators);
}
 
Example #9
Source File: ExecutionVertex.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Creates an ExecutionVertex.
 *
 * @param timeout
 *            The RPC timeout to use for deploy / cancel calls
 * @param initialGlobalModVersion
 *            The global modification version to initialize the first Execution with.
 * @param createTimestamp
 *            The timestamp for the vertex creation, used to initialize the first Execution with.
 * @param maxPriorExecutionHistoryLength
 *            The number of prior Executions (= execution attempts) to keep.
 */
public ExecutionVertex(
		ExecutionJobVertex jobVertex,
		int subTaskIndex,
		IntermediateResult[] producedDataSets,
		Time timeout,
		long initialGlobalModVersion,
		long createTimestamp,
		int maxPriorExecutionHistoryLength) {

	this.jobVertex = jobVertex;
	this.subTaskIndex = subTaskIndex;
	this.taskNameWithSubtask = String.format("%s (%d/%d)",
			jobVertex.getJobVertex().getName(), subTaskIndex + 1, jobVertex.getParallelism());

	this.resultPartitions = new LinkedHashMap<>(producedDataSets.length, 1);

	for (IntermediateResult result : producedDataSets) {
		IntermediateResultPartition irp = new IntermediateResultPartition(result, this, subTaskIndex);
		result.setPartition(subTaskIndex, irp);

		resultPartitions.put(irp.getPartitionId(), irp);
	}

	this.inputEdges = new ExecutionEdge[jobVertex.getJobVertex().getInputs().size()][];

	this.priorExecutions = new EvictingBoundedList<>(maxPriorExecutionHistoryLength);

	this.currentExecution = new Execution(
		getExecutionGraph().getFutureExecutor(),
		this,
		0,
		initialGlobalModVersion,
		createTimestamp,
		timeout);

	// create a co-location scheduling hint, if necessary
	CoLocationGroup clg = jobVertex.getCoLocationGroup();
	if (clg != null) {
		this.locationConstraint = clg.getLocationConstraint(subTaskIndex);
	}
	else {
		this.locationConstraint = null;
	}

	getExecutionGraph().registerExecution(currentExecution);

	this.timeout = timeout;
}
 
Example #10
Source File: ExecutionVertex.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
EvictingBoundedList<ArchivedExecution> getCopyOfPriorExecutionsList() {
	synchronized (priorExecutions) {
		return new EvictingBoundedList<>(priorExecutions);
	}
}
 
Example #11
Source File: ExecutionVertex.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Creates an ExecutionVertex.
 *
 * @param timeout
 *            The RPC timeout to use for deploy / cancel calls
 * @param initialGlobalModVersion
 *            The global modification version to initialize the first Execution with.
 * @param createTimestamp
 *            The timestamp for the vertex creation, used to initialize the first Execution with.
 * @param maxPriorExecutionHistoryLength
 *            The number of prior Executions (= execution attempts) to keep.
 */
public ExecutionVertex(
		ExecutionJobVertex jobVertex,
		int subTaskIndex,
		IntermediateResult[] producedDataSets,
		Time timeout,
		long initialGlobalModVersion,
		long createTimestamp,
		int maxPriorExecutionHistoryLength) {

	this.jobVertex = jobVertex;
	this.subTaskIndex = subTaskIndex;
	this.executionVertexId = new ExecutionVertexID(jobVertex.getJobVertexId(), subTaskIndex);
	this.taskNameWithSubtask = String.format("%s (%d/%d)",
			jobVertex.getJobVertex().getName(), subTaskIndex + 1, jobVertex.getParallelism());

	this.resultPartitions = new LinkedHashMap<>(producedDataSets.length, 1);

	for (IntermediateResult result : producedDataSets) {
		IntermediateResultPartition irp = new IntermediateResultPartition(result, this, subTaskIndex);
		result.setPartition(subTaskIndex, irp);

		resultPartitions.put(irp.getPartitionId(), irp);
	}

	this.inputEdges = new ExecutionEdge[jobVertex.getJobVertex().getInputs().size()][];

	this.priorExecutions = new EvictingBoundedList<>(maxPriorExecutionHistoryLength);

	this.currentExecution = new Execution(
		getExecutionGraph().getFutureExecutor(),
		this,
		0,
		initialGlobalModVersion,
		createTimestamp,
		timeout);

	// create a co-location scheduling hint, if necessary
	CoLocationGroup clg = jobVertex.getCoLocationGroup();
	if (clg != null) {
		this.locationConstraint = clg.getLocationConstraint(subTaskIndex);
	}
	else {
		this.locationConstraint = null;
	}

	getExecutionGraph().registerExecution(currentExecution);

	this.timeout = timeout;
	this.inputSplits = new ArrayList<>();
}
 
Example #12
Source File: ExecutionVertex.java    From flink with Apache License 2.0 4 votes vote down vote up
EvictingBoundedList<ArchivedExecution> getCopyOfPriorExecutionsList() {
	synchronized (priorExecutions) {
		return new EvictingBoundedList<>(priorExecutions);
	}
}
 
Example #13
Source File: ExecutionVertex.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Creates an ExecutionVertex.
 *
 * @param timeout
 *            The RPC timeout to use for deploy / cancel calls
 * @param initialGlobalModVersion
 *            The global modification version to initialize the first Execution with.
 * @param createTimestamp
 *            The timestamp for the vertex creation, used to initialize the first Execution with.
 * @param maxPriorExecutionHistoryLength
 *            The number of prior Executions (= execution attempts) to keep.
 */
public ExecutionVertex(
		ExecutionJobVertex jobVertex,
		int subTaskIndex,
		IntermediateResult[] producedDataSets,
		Time timeout,
		long initialGlobalModVersion,
		long createTimestamp,
		int maxPriorExecutionHistoryLength) {

	this.jobVertex = jobVertex;
	this.subTaskIndex = subTaskIndex;
	this.executionVertexId = new ExecutionVertexID(jobVertex.getJobVertexId(), subTaskIndex);
	this.taskNameWithSubtask = String.format("%s (%d/%d)",
			jobVertex.getJobVertex().getName(), subTaskIndex + 1, jobVertex.getParallelism());

	this.resultPartitions = new LinkedHashMap<>(producedDataSets.length, 1);

	for (IntermediateResult result : producedDataSets) {
		IntermediateResultPartition irp = new IntermediateResultPartition(result, this, subTaskIndex);
		result.setPartition(subTaskIndex, irp);

		resultPartitions.put(irp.getPartitionId(), irp);
	}

	this.inputEdges = new ExecutionEdge[jobVertex.getJobVertex().getInputs().size()][];

	this.priorExecutions = new EvictingBoundedList<>(maxPriorExecutionHistoryLength);

	this.currentExecution = new Execution(
		getExecutionGraph().getFutureExecutor(),
		this,
		0,
		initialGlobalModVersion,
		createTimestamp,
		timeout);

	// create a co-location scheduling hint, if necessary
	CoLocationGroup clg = jobVertex.getCoLocationGroup();
	if (clg != null) {
		this.locationConstraint = clg.getLocationConstraint(subTaskIndex);
	}
	else {
		this.locationConstraint = null;
	}

	getExecutionGraph().registerExecution(currentExecution);

	this.timeout = timeout;
	this.inputSplits = new ArrayList<>();
}
 
Example #14
Source File: ExecutionVertex.java    From flink with Apache License 2.0 4 votes vote down vote up
EvictingBoundedList<ArchivedExecution> getCopyOfPriorExecutionsList() {
	synchronized (priorExecutions) {
		return new EvictingBoundedList<>(priorExecutions);
	}
}