Java Code Examples for org.apache.flink.util.ExceptionUtils#stripExecutionException()

The following examples show how to use org.apache.flink.util.ExceptionUtils#stripExecutionException() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CliFrontend.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Sends a SavepointTriggerMessage to the job manager.
 */
private void triggerSavepoint(ClusterClient<?> clusterClient, JobID jobId, String savepointDirectory) throws FlinkException {
	logAndSysout("Triggering savepoint for job " + jobId + '.');

	CompletableFuture<String> savepointPathFuture = clusterClient.triggerSavepoint(jobId, savepointDirectory);

	logAndSysout("Waiting for response...");

	try {
		final String savepointPath = savepointPathFuture.get(clientTimeout.toMillis(), TimeUnit.MILLISECONDS);

		logAndSysout("Savepoint completed. Path: " + savepointPath);
		logAndSysout("You can resume your program from this savepoint with the run command.");
	} catch (Exception e) {
		Throwable cause = ExceptionUtils.stripExecutionException(e);
		throw new FlinkException("Triggering a savepoint for the job " + jobId + " failed.", cause);
	}
}
 
Example 2
Source File: TestingFatalErrorHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Nullable
public synchronized Throwable getException() {
	if (errorFuture.isDone()) {
		Throwable throwable;

		try {
			throwable = errorFuture.get();
		} catch (InterruptedException ie) {
			ExceptionUtils.checkInterrupted(ie);
			throw new FlinkRuntimeException("This should never happen since the future was completed.");
		} catch (ExecutionException e) {
			throwable = ExceptionUtils.stripExecutionException(e);
		}

		return throwable;
	} else {
		return null;
	}
}
 
Example 3
Source File: RocksDBStateDownloader.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Copies all the files from the given stream state handles to the given path, renaming the files w.r.t. their
 * {@link StateHandleID}.
 */
private void downloadDataForAllStateHandles(
	Map<StateHandleID, StreamStateHandle> stateHandleMap,
	Path restoreInstancePath,
	CloseableRegistry closeableRegistry) throws Exception {

	try {
		List<Runnable> runnables = createDownloadRunnables(stateHandleMap, restoreInstancePath, closeableRegistry);
		List<CompletableFuture<Void>> futures = new ArrayList<>(runnables.size());
		for (Runnable runnable : runnables) {
			futures.add(CompletableFuture.runAsync(runnable, executorService));
		}
		FutureUtils.waitForAll(futures).get();
	} catch (ExecutionException e) {
		Throwable throwable = ExceptionUtils.stripExecutionException(e);
		throwable = ExceptionUtils.stripException(throwable, RuntimeException.class);
		if (throwable instanceof IOException) {
			throw (IOException) throwable;
		} else {
			throw new FlinkRuntimeException("Failed to download data for state handles.", e);
		}
	}
}
 
Example 4
Source File: RestClientTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testConnectionTimeout() throws Exception {
	final Configuration config = new Configuration();
	config.setLong(RestOptions.CONNECTION_TIMEOUT, 1);
	try (final RestClient restClient = new RestClient(RestClientConfiguration.fromConfiguration(config), Executors.directExecutor())) {
		restClient.sendRequest(
			unroutableIp,
			80,
			new TestMessageHeaders(),
			EmptyMessageParameters.getInstance(),
			EmptyRequestBody.getInstance())
			.get(60, TimeUnit.SECONDS);
	} catch (final ExecutionException e) {
		final Throwable throwable = ExceptionUtils.stripExecutionException(e);
		assertThat(throwable, instanceOf(ConnectTimeoutException.class));
		assertThat(throwable.getMessage(), containsString(unroutableIp));
	}
}
 
Example 5
Source File: RestServerEndpointITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that requests larger than {@link #TEST_REST_MAX_CONTENT_LENGTH} are rejected.
 */
@Test
public void testShouldRespectMaxContentLengthLimitForRequests() throws Exception {
	testHandler.handlerBody = id -> {
		throw new AssertionError("Request should not arrive at server.");
	};

	try {
		sendRequestToTestHandler(new TestRequest(2, createStringOfSize(TEST_REST_MAX_CONTENT_LENGTH))).get();
		fail("Expected exception not thrown");
	} catch (final ExecutionException e) {
		final Throwable throwable = ExceptionUtils.stripExecutionException(e);
		assertThat(throwable, instanceOf(RestClientException.class));
		assertThat(throwable.getMessage(), containsString("Try to raise"));
	}
}
 
Example 6
Source File: FutureUtilsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that a retry future is failed after all retries have been consumed.
 */
@Test(expected = FutureUtils.RetryException.class)
public void testRetryFailure() throws Throwable {
	final int retries = 3;

	CompletableFuture<?> retryFuture = FutureUtils.retry(
		() -> FutureUtils.completedExceptionally(new FlinkException("Test exception")),
		retries,
		TestingUtils.defaultExecutor());

	try {
		retryFuture.get();
	} catch (ExecutionException ee) {
		throw ExceptionUtils.stripExecutionException(ee);
	}
}
 
Example 7
Source File: RocksDBStateUploader.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Upload all the files to checkpoint fileSystem using specified number of threads.
 *
 * @param files The files will be uploaded to checkpoint filesystem.
 * @param checkpointStreamFactory The checkpoint streamFactory used to create outputstream.
 *
 * @throws Exception Thrown if can not upload all the files.
 */
public Map<StateHandleID, StreamStateHandle> uploadFilesToCheckpointFs(
	@Nonnull Map<StateHandleID, Path> files,
	CheckpointStreamFactory checkpointStreamFactory,
	CloseableRegistry closeableRegistry) throws Exception {

	Map<StateHandleID, StreamStateHandle> handles = new HashMap<>();

	Map<StateHandleID, CompletableFuture<StreamStateHandle>> futures =
		createUploadFutures(files, checkpointStreamFactory, closeableRegistry);

	try {
		FutureUtils.waitForAll(futures.values()).get();

		for (Map.Entry<StateHandleID, CompletableFuture<StreamStateHandle>> entry : futures.entrySet()) {
			handles.put(entry.getKey(), entry.getValue().get());
		}
	} catch (ExecutionException e) {
		Throwable throwable = ExceptionUtils.stripExecutionException(e);
		throwable = ExceptionUtils.stripException(throwable, RuntimeException.class);
		if (throwable instanceof IOException) {
			throw (IOException) throwable;
		} else {
			throw new FlinkRuntimeException("Failed to download data for state handles.", e);
		}
	}

	return handles;
}
 
Example 8
Source File: RestServerEndpointITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that responses larger than {@link #TEST_REST_MAX_CONTENT_LENGTH} are rejected.
 */
@Test
public void testShouldRespectMaxContentLengthLimitForResponses() throws Exception {
	testHandler.handlerBody = id -> CompletableFuture.completedFuture(
		new TestResponse(id, createStringOfSize(TEST_REST_MAX_CONTENT_LENGTH)));

	try {
		sendRequestToTestHandler(new TestRequest(1)).get();
		fail("Expected exception not thrown");
	} catch (final ExecutionException e) {
		final Throwable throwable = ExceptionUtils.stripExecutionException(e);
		assertThat(throwable, instanceOf(TooLongFrameException.class));
		assertThat(throwable.getMessage(), containsString("Try to raise"));
	}
}
 
Example 9
Source File: RestServerEndpointITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that a bad handler request (HandlerRequest cannot be created) is reported as a BAD_REQUEST
 * and not an internal server error.
 *
 * <p>See FLINK-7663
 */
@Test
public void testBadHandlerRequest() throws Exception {
	final FaultyTestParameters parameters = new FaultyTestParameters();

	parameters.faultyJobIDPathParameter.resolve(PATH_JOB_ID);
	((TestParameters) parameters).jobIDQueryParameter.resolve(Collections.singletonList(QUERY_JOB_ID));

	CompletableFuture<TestResponse> response = restClient.sendRequest(
		serverAddress.getHostName(),
		serverAddress.getPort(),
		new TestHeaders(),
		parameters,
		new TestRequest(2));

	try {
		response.get();

		fail("The request should fail with a bad request return code.");
	} catch (ExecutionException ee) {
		Throwable t = ExceptionUtils.stripExecutionException(ee);

		assertTrue(t instanceof RestClientException);

		RestClientException rce = (RestClientException) t;

		assertEquals(HttpResponseStatus.BAD_REQUEST, rce.getHttpResponseStatus());
	}
}
 
Example 10
Source File: StreamExecutionEnvironment.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Triggers the program execution asynchronously. The environment will execute all parts of
 * the program that have resulted in a "sink" operation. Sink operations are
 * for example printing results or forwarding them to a message queue.
 *
 * @param streamGraph the stream graph representing the transformations
 * @return A {@link JobClient} that can be used to communicate with the submitted job, completed on submission succeeded.
 * @throws Exception which occurs during job execution.
 */
@Internal
public JobClient executeAsync(StreamGraph streamGraph) throws Exception {
	checkNotNull(streamGraph, "StreamGraph cannot be null.");
	checkNotNull(configuration.get(DeploymentOptions.TARGET), "No execution.target specified in your configuration file.");

	final PipelineExecutorFactory executorFactory =
		executorServiceLoader.getExecutorFactory(configuration);

	checkNotNull(
		executorFactory,
		"Cannot find compatible factory for specified execution.target (=%s)",
		configuration.get(DeploymentOptions.TARGET));

	CompletableFuture<JobClient> jobClientFuture = executorFactory
		.getExecutor(configuration)
		.execute(streamGraph, configuration);

	try {
		JobClient jobClient = jobClientFuture.get();
		jobListeners.forEach(jobListener -> jobListener.onJobSubmitted(jobClient, null));
		return jobClient;
	} catch (ExecutionException executionException) {
		final Throwable strippedException = ExceptionUtils.stripExecutionException(executionException);
		jobListeners.forEach(jobListener -> jobListener.onJobSubmitted(null, strippedException));

		throw new FlinkException(
			String.format("Failed to execute job '%s'.", streamGraph.getJobName()),
			strippedException);
	}
}
 
Example 11
Source File: RestServerEndpointITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that responses larger than {@link #TEST_REST_MAX_CONTENT_LENGTH} are rejected.
 */
@Test
public void testShouldRespectMaxContentLengthLimitForResponses() throws Exception {
	testHandler.handlerBody = id -> CompletableFuture.completedFuture(
		new TestResponse(id, createStringOfSize(TEST_REST_MAX_CONTENT_LENGTH)));

	try {
		sendRequestToTestHandler(new TestRequest(1)).get();
		fail("Expected exception not thrown");
	} catch (final ExecutionException e) {
		final Throwable throwable = ExceptionUtils.stripExecutionException(e);
		assertThat(throwable, instanceOf(TooLongFrameException.class));
		assertThat(throwable.getMessage(), containsString("Try to raise"));
	}
}
 
Example 12
Source File: RestServerEndpointITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that a bad handler request (HandlerRequest cannot be created) is reported as a BAD_REQUEST
 * and not an internal server error.
 *
 * <p>See FLINK-7663
 */
@Test
public void testBadHandlerRequest() throws Exception {
	final FaultyTestParameters parameters = new FaultyTestParameters();

	parameters.faultyJobIDPathParameter.resolve(PATH_JOB_ID);
	((TestParameters) parameters).jobIDQueryParameter.resolve(Collections.singletonList(QUERY_JOB_ID));

	CompletableFuture<TestResponse> response = restClient.sendRequest(
		serverAddress.getHostName(),
		serverAddress.getPort(),
		new TestHeaders(),
		parameters,
		new TestRequest(2));

	try {
		response.get();

		fail("The request should fail with a bad request return code.");
	} catch (ExecutionException ee) {
		Throwable t = ExceptionUtils.stripExecutionException(ee);

		assertTrue(t instanceof RestClientException);

		RestClientException rce = (RestClientException) t;

		assertEquals(HttpResponseStatus.BAD_REQUEST, rce.getHttpResponseStatus());
	}
}
 
Example 13
Source File: RocksDBStateUploader.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Upload all the files to checkpoint fileSystem using specified number of threads.
 *
 * @param files The files will be uploaded to checkpoint filesystem.
 * @param checkpointStreamFactory The checkpoint streamFactory used to create outputstream.
 *
 * @throws Exception Thrown if can not upload all the files.
 */
public Map<StateHandleID, StreamStateHandle> uploadFilesToCheckpointFs(
	@Nonnull Map<StateHandleID, Path> files,
	CheckpointStreamFactory checkpointStreamFactory,
	CloseableRegistry closeableRegistry) throws Exception {

	Map<StateHandleID, StreamStateHandle> handles = new HashMap<>();

	Map<StateHandleID, CompletableFuture<StreamStateHandle>> futures =
		createUploadFutures(files, checkpointStreamFactory, closeableRegistry);

	try {
		FutureUtils.waitForAll(futures.values()).get();

		for (Map.Entry<StateHandleID, CompletableFuture<StreamStateHandle>> entry : futures.entrySet()) {
			handles.put(entry.getKey(), entry.getValue().get());
		}
	} catch (ExecutionException e) {
		Throwable throwable = ExceptionUtils.stripExecutionException(e);
		throwable = ExceptionUtils.stripException(throwable, RuntimeException.class);
		if (throwable instanceof IOException) {
			throw (IOException) throwable;
		} else {
			throw new FlinkRuntimeException("Failed to upload data for state handles.", e);
		}
	}

	return handles;
}
 
Example 14
Source File: AkkaInvocationHandler.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Invokes a RPC method by sending the RPC invocation details to the rpc endpoint.
 *
 * @param method to call
 * @param args of the method call
 * @return result of the RPC
 * @throws Exception if the RPC invocation fails
 */
private Object invokeRpc(Method method, Object[] args) throws Exception {
	String methodName = method.getName();
	Class<?>[] parameterTypes = method.getParameterTypes();
	Annotation[][] parameterAnnotations = method.getParameterAnnotations();
	Time futureTimeout = extractRpcTimeout(parameterAnnotations, args, timeout);

	final RpcInvocation rpcInvocation = createRpcInvocationMessage(methodName, parameterTypes, args);

	Class<?> returnType = method.getReturnType();

	final Object result;

	if (Objects.equals(returnType, Void.TYPE)) {
		tell(rpcInvocation);

		result = null;
	} else {
		// Capture the call stack. It is significantly faster to do that via an exception than
		// via Thread.getStackTrace(), because exceptions lazily initialize the stack trace, initially only
		// capture a lightweight native pointer, and convert that into the stack trace lazily when needed.
		final Throwable callStackCapture = captureAskCallStack ? new Throwable() : null;

		// execute an asynchronous call
		final CompletableFuture<?> resultFuture = ask(rpcInvocation, futureTimeout);

		final CompletableFuture<Object> completableFuture = new CompletableFuture<>();
		resultFuture.whenComplete((resultValue, failure) -> {
			if (failure != null) {
				completableFuture.completeExceptionally(resolveTimeoutException(failure, callStackCapture, method));
			} else {
				completableFuture.complete(deserializeValueIfNeeded(resultValue, method));
			}
		});

		if (Objects.equals(returnType, CompletableFuture.class)) {
			result = completableFuture;
		} else {
			try {
				result = completableFuture.get(futureTimeout.getSize(), futureTimeout.getUnit());
			} catch (ExecutionException ee) {
				throw new RpcException("Failure while obtaining synchronous RPC result.", ExceptionUtils.stripExecutionException(ee));
			}
		}
	}

	return result;
}
 
Example 15
Source File: FutureUtilsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testCompleteAllExceptional() throws Exception {
	final CompletableFuture<String> inputFuture1 = new CompletableFuture<>();
	final CompletableFuture<Integer> inputFuture2 = new CompletableFuture<>();

	final List<CompletableFuture<?>> futuresToComplete = Arrays.asList(inputFuture1, inputFuture2);
	final FutureUtils.ConjunctFuture<Void> completeFuture = FutureUtils.completeAll(futuresToComplete);

	assertThat(completeFuture.isDone(), is(false));
	assertThat(completeFuture.getNumFuturesCompleted(), is(0));
	assertThat(completeFuture.getNumFuturesTotal(), is(futuresToComplete.size()));

	final FlinkException testException1 = new FlinkException("Test exception 1");
	inputFuture1.completeExceptionally(testException1);

	assertThat(completeFuture.isDone(), is(false));
	assertThat(completeFuture.getNumFuturesCompleted(), is(1));

	final FlinkException testException2 = new FlinkException("Test exception 2");
	inputFuture2.completeExceptionally(testException2);

	assertThat(completeFuture.isDone(), is(true));
	assertThat(completeFuture.getNumFuturesCompleted(), is(2));

	try {
		completeFuture.get();
		fail("Expected an exceptional completion");
	} catch (ExecutionException ee) {
		final Throwable actual = ExceptionUtils.stripExecutionException(ee);

		final Throwable[] suppressed = actual.getSuppressed();
		final FlinkException suppressedException;

		if (actual.equals(testException1)) {
			suppressedException = testException2;
		} else {
			suppressedException = testException1;
		}

		assertThat(suppressed, is(not(emptyArray())));
		assertThat(suppressed, arrayContaining(suppressedException));
	}
}
 
Example 16
Source File: FutureUtilsTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test
public void testCompleteAllExceptional() throws Exception {
	final CompletableFuture<String> inputFuture1 = new CompletableFuture<>();
	final CompletableFuture<Integer> inputFuture2 = new CompletableFuture<>();

	final List<CompletableFuture<?>> futuresToComplete = Arrays.asList(inputFuture1, inputFuture2);
	final FutureUtils.ConjunctFuture<Void> completeFuture = FutureUtils.completeAll(futuresToComplete);

	assertThat(completeFuture.isDone(), is(false));
	assertThat(completeFuture.getNumFuturesCompleted(), is(0));
	assertThat(completeFuture.getNumFuturesTotal(), is(futuresToComplete.size()));

	final FlinkException testException1 = new FlinkException("Test exception 1");
	inputFuture1.completeExceptionally(testException1);

	assertThat(completeFuture.isDone(), is(false));
	assertThat(completeFuture.getNumFuturesCompleted(), is(1));

	final FlinkException testException2 = new FlinkException("Test exception 2");
	inputFuture2.completeExceptionally(testException2);

	assertThat(completeFuture.isDone(), is(true));
	assertThat(completeFuture.getNumFuturesCompleted(), is(2));

	try {
		completeFuture.get();
		fail("Expected an exceptional completion");
	} catch (ExecutionException ee) {
		final Throwable actual = ExceptionUtils.stripExecutionException(ee);

		final Throwable[] suppressed = actual.getSuppressed();
		final FlinkException suppressedException;

		if (actual.equals(testException1)) {
			suppressedException = testException2;
		} else {
			suppressedException = testException1;
		}

		assertThat(suppressed, is(not(emptyArray())));
		assertThat(suppressed, arrayContaining(suppressedException));
	}
}
 
Example 17
Source File: FutureUtilsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testCompleteAllExceptional() throws Exception {
	final CompletableFuture<String> inputFuture1 = new CompletableFuture<>();
	final CompletableFuture<Integer> inputFuture2 = new CompletableFuture<>();

	final List<CompletableFuture<?>> futuresToComplete = Arrays.asList(inputFuture1, inputFuture2);
	final FutureUtils.ConjunctFuture<Void> completeFuture = FutureUtils.completeAll(futuresToComplete);

	assertThat(completeFuture.isDone(), is(false));
	assertThat(completeFuture.getNumFuturesCompleted(), is(0));
	assertThat(completeFuture.getNumFuturesTotal(), is(futuresToComplete.size()));

	final FlinkException testException1 = new FlinkException("Test exception 1");
	inputFuture1.completeExceptionally(testException1);

	assertThat(completeFuture.isDone(), is(false));
	assertThat(completeFuture.getNumFuturesCompleted(), is(1));

	final FlinkException testException2 = new FlinkException("Test exception 2");
	inputFuture2.completeExceptionally(testException2);

	assertThat(completeFuture.isDone(), is(true));
	assertThat(completeFuture.getNumFuturesCompleted(), is(2));

	try {
		completeFuture.get();
		fail("Expected an exceptional completion");
	} catch (ExecutionException ee) {
		final Throwable actual = ExceptionUtils.stripExecutionException(ee);

		final Throwable[] suppressed = actual.getSuppressed();
		final FlinkException suppressedException;

		if (actual.equals(testException1)) {
			suppressedException = testException2;
		} else {
			suppressedException = testException1;
		}

		assertThat(suppressed, is(not(emptyArray())));
		assertThat(suppressed, arrayContaining(suppressedException));
	}
}
 
Example 18
Source File: CliFrontend.java    From flink with Apache License 2.0 4 votes vote down vote up
private <ClusterID> void listJobs(
		ClusterClient<ClusterID> clusterClient,
		boolean showRunning,
		boolean showScheduled,
		boolean showAll) throws FlinkException {
	Collection<JobStatusMessage> jobDetails;
	try {
		CompletableFuture<Collection<JobStatusMessage>> jobDetailsFuture = clusterClient.listJobs();

		logAndSysout("Waiting for response...");
		jobDetails = jobDetailsFuture.get();

	} catch (Exception e) {
		Throwable cause = ExceptionUtils.stripExecutionException(e);
		throw new FlinkException("Failed to retrieve job list.", cause);
	}

	LOG.info("Successfully retrieved list of jobs");

	final List<JobStatusMessage> runningJobs = new ArrayList<>();
	final List<JobStatusMessage> scheduledJobs = new ArrayList<>();
	final List<JobStatusMessage> terminatedJobs = new ArrayList<>();
	jobDetails.forEach(details -> {
		if (details.getJobState() == JobStatus.CREATED) {
			scheduledJobs.add(details);
		} else if (!details.getJobState().isGloballyTerminalState()) {
			runningJobs.add(details);
		} else {
			terminatedJobs.add(details);
		}
	});

	if (showRunning || showAll) {
		if (runningJobs.size() == 0) {
			System.out.println("No running jobs.");
		}
		else {
			System.out.println("------------------ Running/Restarting Jobs -------------------");
			printJobStatusMessages(runningJobs);
			System.out.println("--------------------------------------------------------------");
		}
	}
	if (showScheduled || showAll) {
		if (scheduledJobs.size() == 0) {
			System.out.println("No scheduled jobs.");
		}
		else {
			System.out.println("----------------------- Scheduled Jobs -----------------------");
			printJobStatusMessages(scheduledJobs);
			System.out.println("--------------------------------------------------------------");
		}
	}
	if (showAll) {
		if (terminatedJobs.size() != 0) {
			System.out.println("---------------------- Terminated Jobs -----------------------");
			printJobStatusMessages(terminatedJobs);
			System.out.println("--------------------------------------------------------------");
		}
	}
}
 
Example 19
Source File: JobMasterTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Tests that the timeout in {@link JobMasterGateway#triggerSavepoint(String, boolean, Time)}
 * is respected.
 */
@Test
public void testTriggerSavepointTimeout() throws Exception {
	final JobMaster jobMaster = new JobMaster(
		rpcService,
		JobMasterConfiguration.fromConfiguration(configuration),
		jmResourceId,
		jobGraph,
		haServices,
		DefaultSlotPoolFactory.fromConfiguration(configuration),
		DefaultSchedulerFactory.fromConfiguration(configuration),
		new TestingJobManagerSharedServicesBuilder().build(),
		heartbeatServices,
		UnregisteredJobManagerJobMetricGroupFactory.INSTANCE,
		new TestingOnCompletionActions(),
		testingFatalErrorHandler,
		JobMasterTest.class.getClassLoader()) {

		@Override
		public CompletableFuture<String> triggerSavepoint(
				@Nullable final String targetDirectory,
				final boolean cancelJob,
				final Time timeout) {
			return new CompletableFuture<>();
		}
	};

	try {
		final CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId);
		startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS);

		final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class);
		final CompletableFuture<String> savepointFutureLowTimeout = jobMasterGateway.triggerSavepoint("/tmp", false, Time.milliseconds(1));
		final CompletableFuture<String> savepointFutureHighTimeout = jobMasterGateway.triggerSavepoint("/tmp", false, RpcUtils.INF_TIMEOUT);

		try {
			savepointFutureLowTimeout.get(testingTimeout.getSize(), testingTimeout.getUnit());
			fail();
		} catch (final ExecutionException e) {
			final Throwable cause = ExceptionUtils.stripExecutionException(e);
			assertThat(cause, instanceOf(TimeoutException.class));
		}

		assertThat(savepointFutureHighTimeout.isDone(), is(equalTo(false)));
	} finally {
		RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout);
	}
}
 
Example 20
Source File: AkkaInvocationHandler.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Invokes a RPC method by sending the RPC invocation details to the rpc endpoint.
 *
 * @param method to call
 * @param args of the method call
 * @return result of the RPC
 * @throws Exception if the RPC invocation fails
 */
private Object invokeRpc(Method method, Object[] args) throws Exception {
	String methodName = method.getName();
	Class<?>[] parameterTypes = method.getParameterTypes();
	Annotation[][] parameterAnnotations = method.getParameterAnnotations();
	Time futureTimeout = extractRpcTimeout(parameterAnnotations, args, timeout);

	final RpcInvocation rpcInvocation = createRpcInvocationMessage(methodName, parameterTypes, args);

	Class<?> returnType = method.getReturnType();

	final Object result;

	if (Objects.equals(returnType, Void.TYPE)) {
		tell(rpcInvocation);

		result = null;
	} else {
		// execute an asynchronous call
		CompletableFuture<?> resultFuture = ask(rpcInvocation, futureTimeout);

		CompletableFuture<?> completableFuture = resultFuture.thenApply((Object o) -> {
			if (o instanceof SerializedValue) {
				try {
					return  ((SerializedValue<?>) o).deserializeValue(getClass().getClassLoader());
				} catch (IOException | ClassNotFoundException e) {
					throw new CompletionException(
						new RpcException("Could not deserialize the serialized payload of RPC method : "
							+ methodName, e));
				}
			} else {
				return o;
			}
		});

		if (Objects.equals(returnType, CompletableFuture.class)) {
			result = completableFuture;
		} else {
			try {
				result = completableFuture.get(futureTimeout.getSize(), futureTimeout.getUnit());
			} catch (ExecutionException ee) {
				throw new RpcException("Failure while obtaining synchronous RPC result.", ExceptionUtils.stripExecutionException(ee));
			}
		}
	}

	return result;
}