org.apache.flink.client.program.ProgramInvocationException Java Examples

The following examples show how to use org.apache.flink.client.program.ProgramInvocationException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = PackagedProgram.newBuilder()
		.setJarFile(new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH))
		.setArguments(new String[] { checkpointDir.toURI().toString(), outputDir.toURI().toString()})
		.build();

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)),
		Collections.emptyList());

	try {
		program.invokeInteractiveModeForExecution();
		fail("exception should happen");
	} catch (ProgramInvocationException e) {
		assertTrue(ExceptionUtils.findThrowable(e, SuccessException.class).isPresent());
	}
}
 
Example #2
Source File: ClassPathPackagedProgramRetrieverTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobGraphRetrieval() throws IOException, FlinkException, ProgramInvocationException {
	final int parallelism = 42;
	final JobID jobId = new JobID();

	final Configuration configuration = new Configuration();
	configuration.setInteger(CoreOptions.DEFAULT_PARALLELISM, parallelism);
	configuration.set(PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, jobId.toHexString());

	final ClassPathPackagedProgramRetriever retrieverUnderTest =
		ClassPathPackagedProgramRetriever.newBuilder(PROGRAM_ARGUMENTS)
			.setJobClassName(TestJob.class.getCanonicalName())
			.build();

	final JobGraph jobGraph = retrieveJobGraph(retrieverUnderTest, configuration);

	assertThat(jobGraph.getName(), is(equalTo(TestJob.class.getCanonicalName() + "-suffix")));
	assertThat(jobGraph.getSavepointRestoreSettings(), is(equalTo(SavepointRestoreSettings.none())));
	assertThat(jobGraph.getMaximumParallelism(), is(parallelism));
	assertEquals(jobGraph.getJobID(), jobId);
}
 
Example #3
Source File: JarRunHandler.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
protected CompletableFuture<JarRunResponseBody> handleRequest(
		@Nonnull final HandlerRequest<JarRunRequestBody, JarRunMessageParameters> request,
		@Nonnull final DispatcherGateway gateway) throws RestHandlerException {

	final Configuration effectiveConfiguration = new Configuration(configuration);
	effectiveConfiguration.set(DeploymentOptions.ATTACHED, false);
	effectiveConfiguration.set(DeploymentOptions.TARGET, EmbeddedExecutor.NAME);

	final JarHandlerContext context = JarHandlerContext.fromRequest(request, jarDir, log);
	context.applyToConfiguration(effectiveConfiguration);
	SavepointRestoreSettings.toConfiguration(getSavepointRestoreSettings(request), effectiveConfiguration);

	final PackagedProgram program = context.toPackagedProgram(effectiveConfiguration);

	return CompletableFuture
			.supplyAsync(() -> applicationRunner.run(gateway, program, effectiveConfiguration), executor)
			.thenApply(jobIds -> {
				if (jobIds.isEmpty()) {
					throw new CompletionException(new ProgramInvocationException("No jobs submitted."));
				}
				return new JarRunResponseBody(jobIds.get(0));
			});
}
 
Example #4
Source File: DetachedApplicationRunner.java    From flink with Apache License 2.0 6 votes vote down vote up
private List<JobID> tryExecuteJobs(final DispatcherGateway dispatcherGateway, final PackagedProgram program, final Configuration configuration) {
	configuration.set(DeploymentOptions.ATTACHED, false);

	final List<JobID> applicationJobIds = new ArrayList<>();
	final PipelineExecutorServiceLoader executorServiceLoader =
			new WebSubmissionExecutorServiceLoader(applicationJobIds, dispatcherGateway);

	try {
		ClientUtils.executeProgram(executorServiceLoader, configuration, program, enforceSingleJobExecution, true);
	} catch (ProgramInvocationException e) {
		LOG.warn("Could not execute application: ", e);
		throw new FlinkRuntimeException("Could not execute application.", e);
	}

	return applicationJobIds;
}
 
Example #5
Source File: CliFrontend.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
protected void executeProgram(PackagedProgram program, ClusterClient<?> client, int parallelism) throws ProgramMissingJobException, ProgramInvocationException {
	logAndSysout("Starting execution of program");

	final JobSubmissionResult result = client.run(program, parallelism);

	if (null == result) {
		throw new ProgramMissingJobException("No JobSubmissionResult returned, please make sure you called " +
			"ExecutionEnvironment.execute()");
	}

	if (result.isJobExecutionResult()) {
		logAndSysout("Program execution finished");
		JobExecutionResult execResult = result.getJobExecutionResult();
		System.out.println("Job with JobID " + execResult.getJobID() + " has finished.");
		System.out.println("Job Runtime: " + execResult.getNetRuntime() + " ms");
		Map<String, Object> accumulatorsResult = execResult.getAllAccumulatorResults();
		if (accumulatorsResult.size() > 0) {
			System.out.println("Accumulator Results: ");
			System.out.println(AccumulatorHelper.getResultsFormatted(accumulatorsResult));
		}
	} else {
		logAndSysout("Job has been submitted with JobID " + result.getJobID());
	}
}
 
Example #6
Source File: JarHandlerUtils.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public JobGraph toJobGraph(Configuration configuration) {
	if (!Files.exists(jarFile)) {
		throw new CompletionException(new RestHandlerException(
			String.format("Jar file %s does not exist", jarFile), HttpResponseStatus.BAD_REQUEST));
	}

	try {
		final PackagedProgram packagedProgram = new PackagedProgram(
			jarFile.toFile(),
			entryClass,
			programArgs.toArray(new String[0]));
		return PackagedProgramUtils.createJobGraph(packagedProgram, configuration, parallelism, jobId);
	} catch (final ProgramInvocationException e) {
		throw new CompletionException(e);
	}
}
 
Example #7
Source File: JarHandlerUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
public PackagedProgram toPackagedProgram(Configuration configuration) {
	checkNotNull(configuration);

	if (!Files.exists(jarFile)) {
		throw new CompletionException(new RestHandlerException(
				String.format("Jar file %s does not exist", jarFile), HttpResponseStatus.BAD_REQUEST));
	}

	try {
		return PackagedProgram.newBuilder()
				.setJarFile(jarFile.toFile())
				.setEntryPointClassName(entryClass)
				.setConfiguration(configuration)
				.setArguments(programArgs.toArray(new String[0]))
				.build();
	} catch (final ProgramInvocationException e) {
		throw new CompletionException(e);
	}
}
 
Example #8
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testKMeansJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	PackagedProgram kMeansProg = new PackagedProgram(
		new File(KMEANS_JAR_PATH),
		new String[] {
			KMeansData.DATAPOINTS,
			KMeansData.INITIAL_CENTERS,
			"25"
		});

	TestEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(KMEANS_JAR_PATH)),
		Collections.<URL>emptyList());

	kMeansProg.invokeInteractiveModeForExecution();
}
 
Example #9
Source File: ScalaShellRemoteStreamEnvironment.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Executes the remote job.
 *
 * @param streamGraph
 *            Stream Graph to execute
 * @param jarFiles
 * 			  List of jar file URLs to ship to the cluster
 * @return The result of the job execution, containing elapsed time and accumulators.
 */
@Override
protected JobExecutionResult executeRemotely(StreamGraph streamGraph, List<URL> jarFiles) throws ProgramInvocationException {
	URL jarUrl;
	try {
		jarUrl = flinkILoop.writeFilesToDisk().getAbsoluteFile().toURI().toURL();
	} catch (MalformedURLException e) {
		throw new ProgramInvocationException("Could not write the user code classes to disk.",
			streamGraph.getJobGraph().getJobID(), e);
	}

	List<URL> allJarFiles = new ArrayList<>(jarFiles.size() + 1);
	allJarFiles.addAll(jarFiles);
	allJarFiles.add(jarUrl);

	return super.executeRemotely(streamGraph, allJarFiles);
}
 
Example #10
Source File: TestUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
public static JobExecutionResult tryExecute(StreamExecutionEnvironment see, String name) throws Exception {
	try {
		return see.execute(name);
	}
	catch (ProgramInvocationException | JobExecutionException root) {
		Throwable cause = root.getCause();

		// search for nested SuccessExceptions
		int depth = 0;
		while (!(cause instanceof SuccessException)) {
			if (cause == null || depth++ == 20) {
				root.printStackTrace();
				fail("Test failed: " + root.getMessage());
			}
			else {
				cause = cause.getCause();
			}
		}
	}

	return null;
}
 
Example #11
Source File: TestUtils.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static JobExecutionResult tryExecute(StreamExecutionEnvironment see, String name) throws Exception {
	try {
		return see.execute(name);
	}
	catch (ProgramInvocationException | JobExecutionException root) {
		Throwable cause = root.getCause();

		// search for nested SuccessExceptions
		int depth = 0;
		while (!(cause instanceof SuccessException)) {
			if (cause == null || depth++ == 20) {
				root.printStackTrace();
				fail("Test failed: " + root.getMessage());
			}
			else {
				cause = cause.getCause();
			}
		}
	}

	return null;
}
 
Example #12
Source File: CliFrontend.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a Packaged program from the given command line options.
 *
 * @return A PackagedProgram (upon success)
 */
PackagedProgram buildProgram(final ProgramOptions runOptions)
		throws FileNotFoundException, ProgramInvocationException, CliArgsException {
	runOptions.validate();

	String[] programArgs = runOptions.getProgramArgs();
	String jarFilePath = runOptions.getJarFilePath();
	List<URL> classpaths = runOptions.getClasspaths();

	// Get assembler class
	String entryPointClass = runOptions.getEntryPointClassName();
	File jarFile = jarFilePath != null ? getJarFile(jarFilePath) : null;

	return PackagedProgram.newBuilder()
		.setJarFile(jarFile)
		.setUserClassPaths(classpaths)
		.setEntryPointClassName(entryPointClass)
		.setConfiguration(configuration)
		.setSavepointRestoreSettings(runOptions.getSavepointRestoreSettings())
		.setArguments(programArgs)
		.build();
}
 
Example #13
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
		new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH),
		new String[] {
			checkpointDir.toURI().toString(),
			outputDir.toURI().toString()
		});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList());

	expectedException.expectCause(
		Matchers.<Throwable>hasProperty("cause", isA(SuccessException.class)));

	program.invokeInteractiveModeForExecution();
}
 
Example #14
Source File: KafkaTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
protected static void tryExecutePropagateExceptions(StreamExecutionEnvironment see, String name) throws Exception {
	try {
		see.execute(name);
	}
	catch (ProgramInvocationException | JobExecutionException root) {
		Throwable cause = root.getCause();

		// search for nested SuccessExceptions
		int depth = 0;
		while (!(cause instanceof SuccessException)) {
			if (cause == null || depth++ == 20) {
				throw root;
			}
			else {
				cause = cause.getCause();
			}
		}
	}
}
 
Example #15
Source File: ClassPathPackagedProgramRetrieverTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobGraphRetrievalFailIfDoesNotFindTheEntryClassInTheJobDir() throws IOException, ProgramInvocationException {
	final ClassPathPackagedProgramRetriever retrieverUnderTest =
		ClassPathPackagedProgramRetriever.newBuilder(PROGRAM_ARGUMENTS)
			.setJobClassName(TestJobInfo.JOB_CLASS)
			.setJarsOnClassPath(Collections::emptyList)
			.setUserLibDirectory(userDirHasNotEntryClass)
			.build();
	try {
		retrieveJobGraph(retrieverUnderTest, new Configuration());
		Assert.fail("This case should throw class not found exception!!");
	} catch (FlinkException e) {
		assertTrue(ExceptionUtils
			.findThrowableWithMessage(e, "Could not find the provided job class")
			.isPresent());
	}

}
 
Example #16
Source File: ClassPathPackagedProgramRetrieverTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSavepointRestoreSettings() throws FlinkException, IOException, ProgramInvocationException {
	final Configuration configuration = new Configuration();
	final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath("foobar", true);
	final JobID jobId = new JobID();

	configuration.setString(PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID, jobId.toHexString());
	SavepointRestoreSettings.toConfiguration(savepointRestoreSettings, configuration);

	final ClassPathPackagedProgramRetriever retrieverUnderTest =
		ClassPathPackagedProgramRetriever.newBuilder(PROGRAM_ARGUMENTS)
		.setJobClassName(TestJob.class.getCanonicalName())
		.build();

	final JobGraph jobGraph = retrieveJobGraph(retrieverUnderTest, configuration);

	assertThat(jobGraph.getSavepointRestoreSettings(), is(equalTo(savepointRestoreSettings)));
	assertEquals(jobGraph.getJobID(), jobId);
}
 
Example #17
Source File: RemoteStreamEnvironment.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Executes the job remotely.
 *
 * <p>This method can be used independent of the {@link StreamExecutionEnvironment} type.
 * @return The result of the job execution, containing elapsed time and accumulators.
 */
@PublicEvolving
public static JobExecutionResult executeRemotely(StreamExecutionEnvironment streamExecutionEnvironment,
	List<URL> jarFiles,
	String host,
	int port,
	Configuration clientConfiguration,
	List<URL> globalClasspaths,
	String jobName,
	SavepointRestoreSettings savepointRestoreSettings
) throws ProgramInvocationException {
	StreamGraph streamGraph = streamExecutionEnvironment.getStreamGraph();
	streamGraph.setJobName(jobName);
	return executeRemotely(streamGraph,
		streamExecutionEnvironment.getClass().getClassLoader(),
		streamExecutionEnvironment.getConfig(),
		jarFiles,
		host,
		port,
		clientConfiguration,
		globalClasspaths,
		savepointRestoreSettings);
}
 
Example #18
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKMeansJobWithCustomClassLoader() throws ProgramInvocationException {
	PackagedProgram kMeansProg = PackagedProgram.newBuilder()
		.setJarFile(new File(KMEANS_JAR_PATH))
		.setArguments(new String[] {
			KMeansData.DATAPOINTS,
			KMeansData.INITIAL_CENTERS,
			"25"})
		.build();

	TestEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(KMEANS_JAR_PATH)),
		Collections.emptyList());

	kMeansProg.invokeInteractiveModeForExecution();
}
 
Example #19
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKMeansJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	PackagedProgram kMeansProg = new PackagedProgram(
		new File(KMEANS_JAR_PATH),
		new String[] {
			KMeansData.DATAPOINTS,
			KMeansData.INITIAL_CENTERS,
			"25"
		});

	TestEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(KMEANS_JAR_PATH)),
		Collections.<URL>emptyList());

	kMeansProg.invokeInteractiveModeForExecution();
}
 
Example #20
Source File: KafkaTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
protected static void tryExecutePropagateExceptions(StreamExecutionEnvironment see, String name) throws Exception {
	try {
		see.execute(name);
	}
	catch (ProgramInvocationException | JobExecutionException root) {
		Throwable cause = root.getCause();

		// search for nested SuccessExceptions
		int depth = 0;
		while (!(cause instanceof SuccessException)) {
			if (cause == null || depth++ == 20) {
				throw root;
			}
			else {
				cause = cause.getCause();
			}
		}
	}
}
 
Example #21
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
		new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH),
		new String[] {
			checkpointDir.toURI().toString(),
			outputDir.toURI().toString()
		});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList());

	expectedException.expectCause(
		Matchers.<Throwable>hasProperty("cause", isA(SuccessException.class)));

	program.invokeInteractiveModeForExecution();
}
 
Example #22
Source File: TestUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
public static JobExecutionResult tryExecute(StreamExecutionEnvironment see, String name) throws Exception {
	try {
		return see.execute(name);
	}
	catch (ProgramInvocationException | JobExecutionException root) {
		Throwable cause = root.getCause();

		// search for nested SuccessExceptions
		int depth = 0;
		while (!(cause instanceof SuccessException)) {
			if (cause == null || depth++ == 20) {
				root.printStackTrace();
				fail("Test failed: " + root.getMessage());
			}
			else {
				cause = cause.getCause();
			}
		}
	}

	return null;
}
 
Example #23
Source File: ClassPathPackagedProgramRetrieverTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobGraphRetrievalJobClassNameHasPrecedenceOverClassPath() throws IOException, FlinkException, ProgramInvocationException {
	final File testJar = new File("non-existing");

	final ClassPathPackagedProgramRetriever retrieverUnderTest =
		ClassPathPackagedProgramRetriever.newBuilder(PROGRAM_ARGUMENTS)
			// Both a class name is specified and a JAR "is" on the class path
			// The class name should have precedence.
		.setJobClassName(TestJob.class.getCanonicalName())
		.setJarsOnClassPath(() -> Collections.singleton(testJar))
		.build();

	final JobGraph jobGraph = retrieveJobGraph(retrieverUnderTest, new Configuration());

	assertThat(jobGraph.getName(), is(equalTo(TestJob.class.getCanonicalName() + "-suffix")));
}
 
Example #24
Source File: ScalaShellRemoteStreamEnvironment.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Executes the remote job.
 *
 * @param streamGraph
 *            Stream Graph to execute
 * @param jarFiles
 * 			  List of jar file URLs to ship to the cluster
 * @return The result of the job execution, containing elapsed time and accumulators.
 */
@Override
protected JobExecutionResult executeRemotely(StreamGraph streamGraph, List<URL> jarFiles) throws ProgramInvocationException {
	URL jarUrl;
	try {
		jarUrl = flinkILoop.writeFilesToDisk().getAbsoluteFile().toURI().toURL();
	} catch (MalformedURLException e) {
		throw new ProgramInvocationException("Could not write the user code classes to disk.",
			streamGraph.getJobGraph().getJobID(), e);
	}

	List<URL> allJarFiles = new ArrayList<>(jarFiles.size() + 1);
	allJarFiles.addAll(jarFiles);
	allJarFiles.add(jarUrl);

	return super.executeRemotely(streamGraph, allJarFiles);
}
 
Example #25
Source File: JarHandlerUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
public JobGraph toJobGraph(Configuration configuration) {
	if (!Files.exists(jarFile)) {
		throw new CompletionException(new RestHandlerException(
			String.format("Jar file %s does not exist", jarFile), HttpResponseStatus.BAD_REQUEST));
	}

	try {
		final PackagedProgram packagedProgram = new PackagedProgram(
			jarFile.toFile(),
			entryClass,
			programArgs.toArray(new String[0]));
		return PackagedProgramUtils.createJobGraph(packagedProgram, configuration, parallelism, jobId);
	} catch (final ProgramInvocationException e) {
		throw new CompletionException(e);
	}
}
 
Example #26
Source File: ClassPathPackagedProgramRetrieverTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobGraphRetrievalFailIfJobDirDoesNotHaveEntryClass() throws IOException, ProgramInvocationException {
	final File testJar = TestJob.getTestJobJar();
	final ClassPathPackagedProgramRetriever retrieverUnderTest =
		ClassPathPackagedProgramRetriever.newBuilder(PROGRAM_ARGUMENTS)
			.setJarsOnClassPath(() -> Collections.singleton(testJar))
			.setUserLibDirectory(userDirHasNotEntryClass)
			.build();
	try {
		retrieveJobGraph(retrieverUnderTest, new Configuration());
		Assert.fail("This case should throw exception !");
	} catch (FlinkException e) {
		assertTrue(ExceptionUtils
			.findThrowableWithMessage(e, "Failed to find job JAR on class path")
			.isPresent());
	}
}
 
Example #27
Source File: TestUtils.java    From flink-benchmarks with Apache License 2.0 6 votes vote down vote up
public static JobExecutionResult tryExecute(StreamExecutionEnvironment see, String name) throws Exception {
    try {
        return see.execute(name);
    }
    catch (ProgramInvocationException | JobExecutionException root) {
        Throwable cause = root.getCause();

        // search for nested SuccessExceptions
        int depth = 0;
        while (!(cause instanceof SuccessException)) {
            if (cause == null || depth++ == 20) {
                root.printStackTrace();
                fail("Test failed: " + root.getMessage());
            }
            else {
                cause = cause.getCause();
            }
        }
    }

    return null;
}
 
Example #28
Source File: StatefulFunctionsJobGraphRetriever.java    From flink-statefun with Apache License 2.0 6 votes vote down vote up
private PackagedProgram createPackagedProgram() {
  File mainJar = new File(Constants.FLINK_JOB_JAR_PATH);
  if (!mainJar.exists()) {
    throw new IllegalStateException("Unable to locate the launcher jar");
  }
  try {
    return PackagedProgram.newBuilder()
        .setJarFile(mainJar)
        .setUserClassPaths(obtainModuleAdditionalClassPath())
        .setEntryPointClassName(StatefulFunctionsJob.class.getName())
        .setArguments(programArguments)
        .build();
  } catch (ProgramInvocationException e) {
    throw new RuntimeException("Unable to construct a packaged program", e);
  }
}
 
Example #29
Source File: StatefulFunctionsJobGraphRetriever.java    From stateful-functions with Apache License 2.0 6 votes vote down vote up
private PackagedProgram createPackagedProgram() {
  File mainJar = new File(Constants.FLINK_JOB_JAR_PATH);
  if (!mainJar.exists()) {
    throw new IllegalStateException("Unable to locate the launcher jar");
  }
  try {
    return PackagedProgram.newBuilder()
        .setJarFile(mainJar)
        .setUserClassPaths(obtainModuleAdditionalClassPath())
        .setEntryPointClassName(StatefulFunctionsJob.class.getName())
        .setArguments(programArguments)
        .build();
  } catch (ProgramInvocationException e) {
    throw new RuntimeException("Unable to construct a packaged program", e);
  }
}
 
Example #30
Source File: RemoteStreamEnvironment.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Executes the job remotely.
 *
 * <p>This method can be used independent of the {@link StreamExecutionEnvironment} type.
 * @return The result of the job execution, containing elapsed time and accumulators.
 */
@PublicEvolving
public static JobExecutionResult executeRemotely(StreamExecutionEnvironment streamExecutionEnvironment,
	List<URL> jarFiles,
	String host,
	int port,
	Configuration clientConfiguration,
	List<URL> globalClasspaths,
	String jobName,
	SavepointRestoreSettings savepointRestoreSettings
) throws ProgramInvocationException {
	StreamGraph streamGraph = streamExecutionEnvironment.getStreamGraph(jobName);
	return executeRemotely(streamGraph,
		streamExecutionEnvironment.getClass().getClassLoader(),
		streamExecutionEnvironment.getConfig(),
		jarFiles,
		host,
		port,
		clientConfiguration,
		globalClasspaths,
		savepointRestoreSettings);
}