Java Code Examples for org.apache.flink.client.program.ProgramInvocationException#printStackTrace()

The following examples show how to use org.apache.flink.client.program.ProgramInvocationException#printStackTrace() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamFaultToleranceTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Runs the following program the test program defined in {@link #testProgram(StreamExecutionEnvironment)}
 * followed by the checks in {@link #postSubmit}.
 */
@Test
public void runCheckpointedProgram() throws Exception {
	try {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(PARALLELISM);
		env.enableCheckpointing(500);
		env.getConfig().disableSysoutLogging();
		env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 0L));

		testProgram(env);

		JobGraph jobGraph = env.getStreamGraph().getJobGraph();
		try {
			cluster.getClusterClient().submitJob(jobGraph, getClass().getClassLoader()).getJobExecutionResult();
		}
		catch (ProgramInvocationException root) {
			Throwable cause = root.getCause();

			// search for nested SuccessExceptions
			int depth = 0;
			while (!(cause instanceof SuccessException)) {
				if (cause == null || depth++ == 20) {
					root.printStackTrace();
					fail("Test failed: " + root.getMessage());
				}
				else {
					cause = cause.getCause();
				}
			}
		}

		postSubmit();
	}
	catch (Exception e) {
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example 2
Source File: StreamFaultToleranceTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Runs the following program the test program defined in {@link #testProgram(StreamExecutionEnvironment)}
 * followed by the checks in {@link #postSubmit}.
 */
@Test
public void runCheckpointedProgram() throws Exception {
	try {
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(PARALLELISM);
		env.enableCheckpointing(500);
					env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 0L));

		testProgram(env);

		JobGraph jobGraph = env.getStreamGraph().getJobGraph();
		try {
			ClientUtils.submitJobAndWaitForResult(cluster.getClusterClient(), jobGraph, getClass().getClassLoader()).getJobExecutionResult();
		} catch (ProgramInvocationException root) {
			Throwable cause = root.getCause();

			// search for nested SuccessExceptions
			int depth = 0;
			while (!(cause instanceof SuccessException)) {
				if (cause == null || depth++ == 20) {
					root.printStackTrace();
					fail("Test failed: " + root.getMessage());
				}
				else {
					cause = cause.getCause();
				}
			}
		}

		postSubmit();
	}
	catch (Exception e) {
		e.printStackTrace();
		Assert.fail(e.getMessage());
	}
}
 
Example 3
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Tests disposal of a savepoint, which contains custom user code KvState.
 */
@Test
public void testDisposeSavepointWithCustomKvState() throws Exception {
	ClusterClient<?> clusterClient = new MiniClusterClient(new Configuration(), miniClusterResource.getMiniCluster());

	Deadline deadline = new FiniteDuration(100, TimeUnit.SECONDS).fromNow();

	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
			new File(CUSTOM_KV_STATE_JAR_PATH),
			new String[] {
					String.valueOf(parallelism),
					checkpointDir.toURI().toString(),
					"5000",
					outputDir.toURI().toString()
			});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList()
	);

	// Execute detached
	Thread invokeThread = new Thread(new Runnable() {
		@Override
		public void run() {
			try {
				program.invokeInteractiveModeForExecution();
			} catch (ProgramInvocationException ignored) {
				if (ignored.getCause() == null ||
					!(ignored.getCause() instanceof JobCancellationException)) {
					ignored.printStackTrace();
				}
			}
		}
	});

	LOG.info("Starting program invoke thread");
	invokeThread.start();

	// The job ID
	JobID jobId = null;

	LOG.info("Waiting for job status running.");

	// Wait for running job
	while (jobId == null && deadline.hasTimeLeft()) {

		Collection<JobStatusMessage> jobs = clusterClient.listJobs().get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		for (JobStatusMessage job : jobs) {
			if (job.getJobState() == JobStatus.RUNNING) {
				jobId = job.getJobId();
				LOG.info("Job running. ID: " + jobId);
				break;
			}
		}

		// Retry if job is not available yet
		if (jobId == null) {
			Thread.sleep(100L);
		}
	}

	// Trigger savepoint
	String savepointPath = null;
	for (int i = 0; i < 20; i++) {
		LOG.info("Triggering savepoint (" + (i + 1) + "/20).");
		try {
			savepointPath = clusterClient.triggerSavepoint(jobId, null)
				.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		} catch (Exception cause) {
			LOG.info("Failed to trigger savepoint. Retrying...", cause);
			// This can fail if the operators are not opened yet
			Thread.sleep(500);
		}
	}

	assertNotNull("Failed to trigger savepoint", savepointPath);

	clusterClient.disposeSavepoint(savepointPath).get();

	clusterClient.cancel(jobId);

	// make sure, the execution is finished to not influence other test methods
	invokeThread.join(deadline.timeLeft().toMillis());
	assertFalse("Program invoke thread still running", invokeThread.isAlive());
}
 
Example 4
Source File: CliFrontendPackageProgramTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Ensure that we will never have the following error.
 *
 * <pre>
 * 	org.apache.flink.client.program.ProgramInvocationException: The main method caused an error.
 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:398)
 *		at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:301)
 *		at org.apache.flink.client.program.Client.getOptimizedPlan(Client.java:140)
 *		at org.apache.flink.client.program.Client.getOptimizedPlanAsJson(Client.java:125)
 *		at org.apache.flink.client.cli.CliFrontend.info(CliFrontend.java:439)
 *		at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:931)
 *		at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:951)
 *	Caused by: java.io.IOException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.io.RCFileInputFormat
 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:102)
 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:54)
 *		at tlabs.CDR_In_Report.createHCatInputFormat(CDR_In_Report.java:322)
 *		at tlabs.CDR_Out_Report.main(CDR_Out_Report.java:380)
 *		at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 *		at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 *		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 *		at java.lang.reflect.Method.invoke(Method.java:622)
 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:383)
 * </pre>
 *
 * <p>The test works as follows:
 *
 * <ul>
 *   <li> Use the CliFrontend to invoke a jar file that loads a class which is only available
 * 	      in the jarfile itself (via a custom classloader)
 *   <li> Change the Usercode classloader of the PackagedProgram to a special classloader for this test
 *   <li> the classloader will accept the special class (and return a String.class)
 * </ul>
 */
@Test
public void testPlanWithExternalClass() throws Exception {
	final boolean[] callme = { false }; // create a final object reference, to be able to change its val later

	try {
		String[] arguments = {
				"--classpath", "file:///tmp/foo",
				"--classpath", "file:///tmp/bar",
				"-c", TEST_JAR_CLASSLOADERTEST_CLASS, getTestJarPath(),
				"true", "arg1", "arg2" };
		URL[] classpath = new URL[] { new URL("file:///tmp/foo"), new URL("file:///tmp/bar") };
		String[] reducedArguments = { "true", "arg1", "arg2" };

		RunOptions options = CliFrontendParser.parseRunCommand(arguments);
		assertEquals(getTestJarPath(), options.getJarFilePath());
		assertArrayEquals(classpath, options.getClasspaths().toArray());
		assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, options.getEntryPointClassName());
		assertArrayEquals(reducedArguments, options.getProgramArgs());

		PackagedProgram prog = spy(frontend.buildProgram(options));

		ClassLoader testClassLoader = new ClassLoader(prog.getUserCodeClassLoader()) {
			@Override
			public Class<?> loadClass(String name) throws ClassNotFoundException {
				if ("org.apache.hadoop.hive.ql.io.RCFileInputFormat".equals(name)) {
					callme[0] = true;
					return String.class; // Intentionally return the wrong class.
				} else {
					return super.loadClass(name);
				}
			}
		};
		when(prog.getUserCodeClassLoader()).thenReturn(testClassLoader);

		assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, prog.getMainClassName());
		assertArrayEquals(reducedArguments, prog.getArguments());

		Configuration c = new Configuration();
		Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), c);

		// we expect this to fail with a "ClassNotFoundException"
		ClusterClient.getOptimizedPlanAsJson(compiler, prog, 666);
		fail("Should have failed with a ClassNotFoundException");
	}
	catch (ProgramInvocationException e) {
		if (!(e.getCause() instanceof ClassNotFoundException)) {
			e.printStackTrace();
			fail("Program didn't throw ClassNotFoundException");
		}
		assertTrue("Classloader was not called", callme[0]);
	}
}
 
Example 5
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests disposal of a savepoint, which contains custom user code KvState.
 */
@Test
public void testDisposeSavepointWithCustomKvState() throws Exception {
	ClusterClient<?> clusterClient = new MiniClusterClient(new Configuration(), miniClusterResource.getMiniCluster());

	Deadline deadline = new FiniteDuration(100, TimeUnit.SECONDS).fromNow();

	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
			new File(CUSTOM_KV_STATE_JAR_PATH),
			new String[] {
					String.valueOf(parallelism),
					checkpointDir.toURI().toString(),
					"5000",
					outputDir.toURI().toString()
			});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList()
	);

	// Execute detached
	Thread invokeThread = new Thread(new Runnable() {
		@Override
		public void run() {
			try {
				program.invokeInteractiveModeForExecution();
			} catch (ProgramInvocationException ignored) {
				if (ignored.getCause() == null ||
					!(ignored.getCause() instanceof JobCancellationException)) {
					ignored.printStackTrace();
				}
			}
		}
	});

	LOG.info("Starting program invoke thread");
	invokeThread.start();

	// The job ID
	JobID jobId = null;

	LOG.info("Waiting for job status running.");

	// Wait for running job
	while (jobId == null && deadline.hasTimeLeft()) {

		Collection<JobStatusMessage> jobs = clusterClient.listJobs().get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		for (JobStatusMessage job : jobs) {
			if (job.getJobState() == JobStatus.RUNNING) {
				jobId = job.getJobId();
				LOG.info("Job running. ID: " + jobId);
				break;
			}
		}

		// Retry if job is not available yet
		if (jobId == null) {
			Thread.sleep(100L);
		}
	}

	// Trigger savepoint
	String savepointPath = null;
	for (int i = 0; i < 20; i++) {
		LOG.info("Triggering savepoint (" + (i + 1) + "/20).");
		try {
			savepointPath = clusterClient.triggerSavepoint(jobId, null)
				.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		} catch (Exception cause) {
			LOG.info("Failed to trigger savepoint. Retrying...", cause);
			// This can fail if the operators are not opened yet
			Thread.sleep(500);
		}
	}

	assertNotNull("Failed to trigger savepoint", savepointPath);

	clusterClient.disposeSavepoint(savepointPath).get();

	clusterClient.cancel(jobId);

	// make sure, the execution is finished to not influence other test methods
	invokeThread.join(deadline.timeLeft().toMillis());
	assertFalse("Program invoke thread still running", invokeThread.isAlive());
}
 
Example 6
Source File: CliFrontendPackageProgramTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Ensure that we will never have the following error.
 *
 * <pre>
 * 	org.apache.flink.client.program.ProgramInvocationException: The main method caused an error.
 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:398)
 *		at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:301)
 *		at org.apache.flink.client.program.Client.getOptimizedPlan(Client.java:140)
 *		at org.apache.flink.client.program.Client.getOptimizedPlanAsJson(Client.java:125)
 *		at org.apache.flink.client.cli.CliFrontend.info(CliFrontend.java:439)
 *		at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:931)
 *		at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:951)
 *	Caused by: java.io.IOException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.io.RCFileInputFormat
 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:102)
 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:54)
 *		at tlabs.CDR_In_Report.createHCatInputFormat(CDR_In_Report.java:322)
 *		at tlabs.CDR_Out_Report.main(CDR_Out_Report.java:380)
 *		at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 *		at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 *		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 *		at java.lang.reflect.Method.invoke(Method.java:622)
 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:383)
 * </pre>
 *
 * <p>The test works as follows:
 *
 * <ul>
 *   <li> Use the CliFrontend to invoke a jar file that loads a class which is only available
 * 	      in the jarfile itself (via a custom classloader)
 *   <li> Change the Usercode classloader of the PackagedProgram to a special classloader for this test
 *   <li> the classloader will accept the special class (and return a String.class)
 * </ul>
 */
@Test
public void testPlanWithExternalClass() throws Exception {
	final boolean[] callme = { false }; // create a final object reference, to be able to change its val later

	try {
		String[] arguments = {
				"--classpath", "file:///tmp/foo",
				"--classpath", "file:///tmp/bar",
				"-c", TEST_JAR_CLASSLOADERTEST_CLASS, getTestJarPath(),
				"true", "arg1", "arg2" };
		URL[] classpath = new URL[] { new URL("file:///tmp/foo"), new URL("file:///tmp/bar") };
		String[] reducedArguments = { "true", "arg1", "arg2" };

		RunOptions options = CliFrontendParser.parseRunCommand(arguments);
		assertEquals(getTestJarPath(), options.getJarFilePath());
		assertArrayEquals(classpath, options.getClasspaths().toArray());
		assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, options.getEntryPointClassName());
		assertArrayEquals(reducedArguments, options.getProgramArgs());

		PackagedProgram prog = spy(frontend.buildProgram(options));

		ClassLoader testClassLoader = new ClassLoader(prog.getUserCodeClassLoader()) {
			@Override
			public Class<?> loadClass(String name) throws ClassNotFoundException {
				if ("org.apache.hadoop.hive.ql.io.RCFileInputFormat".equals(name)) {
					callme[0] = true;
					return String.class; // Intentionally return the wrong class.
				} else {
					return super.loadClass(name);
				}
			}
		};
		when(prog.getUserCodeClassLoader()).thenReturn(testClassLoader);

		assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, prog.getMainClassName());
		assertArrayEquals(reducedArguments, prog.getArguments());

		Configuration c = new Configuration();
		Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), c);

		// we expect this to fail with a "ClassNotFoundException"
		ClusterClient.getOptimizedPlanAsJson(compiler, prog, 666);
		fail("Should have failed with a ClassNotFoundException");
	}
	catch (ProgramInvocationException e) {
		if (!(e.getCause() instanceof ClassNotFoundException)) {
			e.printStackTrace();
			fail("Program didn't throw ClassNotFoundException");
		}
		assertTrue("Classloader was not called", callme[0]);
	}
}
 
Example 7
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests disposal of a savepoint, which contains custom user code KvState.
 */
@Test
public void testDisposeSavepointWithCustomKvState() throws Exception {
	ClusterClient<?> clusterClient = new MiniClusterClient(new Configuration(), miniClusterResource.getMiniCluster());

	Deadline deadline = new FiniteDuration(100, TimeUnit.SECONDS).fromNow();

	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = PackagedProgram.newBuilder()
		.setJarFile(new File(CUSTOM_KV_STATE_JAR_PATH))
		.setArguments(new String[] {
			String.valueOf(parallelism),
			checkpointDir.toURI().toString(),
			"5000",
			outputDir.toURI().toString(),
			"false" // Disable unaligned checkpoints as this test is triggering concurrent savepoints/checkpoints
		})
		.build();

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CUSTOM_KV_STATE_JAR_PATH)),
		Collections.emptyList()
	);

	// Execute detached
	Thread invokeThread = new Thread(() -> {
		try {
			program.invokeInteractiveModeForExecution();
		} catch (ProgramInvocationException ex) {
			if (ex.getCause() == null ||
				!(ex.getCause() instanceof JobCancellationException)) {
				ex.printStackTrace();
			}
		}
	});

	LOG.info("Starting program invoke thread");
	invokeThread.start();

	// The job ID
	JobID jobId = null;

	LOG.info("Waiting for job status running.");

	// Wait for running job
	while (jobId == null && deadline.hasTimeLeft()) {

		Collection<JobStatusMessage> jobs = clusterClient.listJobs().get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		for (JobStatusMessage job : jobs) {
			if (job.getJobState() == JobStatus.RUNNING) {
				jobId = job.getJobId();
				LOG.info("Job running. ID: " + jobId);
				break;
			}
		}

		// Retry if job is not available yet
		if (jobId == null) {
			Thread.sleep(100L);
		}
	}

	// Trigger savepoint
	String savepointPath = null;
	for (int i = 0; i < 20; i++) {
		LOG.info("Triggering savepoint (" + (i + 1) + "/20).");
		try {
			savepointPath = clusterClient.triggerSavepoint(jobId, null)
				.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		} catch (Exception cause) {
			LOG.info("Failed to trigger savepoint. Retrying...", cause);
			// This can fail if the operators are not opened yet
			Thread.sleep(500);
		}
	}

	assertNotNull("Failed to trigger savepoint", savepointPath);

	clusterClient.disposeSavepoint(savepointPath).get();

	clusterClient.cancel(jobId).get();

	// make sure, the execution is finished to not influence other test methods
	invokeThread.join(deadline.timeLeft().toMillis());
	assertFalse("Program invoke thread still running", invokeThread.isAlive());
}
 
Example 8
Source File: CliFrontendPackageProgramTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Ensure that we will never have the following error.
 *
 * <pre>
 * 	org.apache.flink.client.program.ProgramInvocationException: The main method caused an error.
 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:398)
 *		at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:301)
 *		at org.apache.flink.client.program.Client.getOptimizedPlan(Client.java:140)
 *		at org.apache.flink.client.program.Client.getOptimizedPlanAsJson(Client.java:125)
 *		at org.apache.flink.client.cli.CliFrontend.info(CliFrontend.java:439)
 *		at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:931)
 *		at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:951)
 *	Caused by: java.io.IOException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.io.RCFileInputFormat
 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:102)
 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:54)
 *		at tlabs.CDR_In_Report.createHCatInputFormat(CDR_In_Report.java:322)
 *		at tlabs.CDR_Out_Report.main(CDR_Out_Report.java:380)
 *		at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 *		at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
 *		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 *		at java.lang.reflect.Method.invoke(Method.java:622)
 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:383)
 * </pre>
 *
 * <p>The test works as follows:
 *
 * <ul>
 *   <li> Use the CliFrontend to invoke a jar file that loads a class which is only available
 * 	      in the jarfile itself (via a custom classloader)
 *   <li> Change the Usercode classloader of the PackagedProgram to a special classloader for this test
 *   <li> the classloader will accept the special class (and return a String.class)
 * </ul>
 */
@Test
public void testPlanWithExternalClass() throws Exception {
	final boolean[] callme = { false }; // create a final object reference, to be able to change its val later

	try {
		String[] arguments = {
				"--classpath", "file:///tmp/foo",
				"--classpath", "file:///tmp/bar",
				"-c", TEST_JAR_CLASSLOADERTEST_CLASS, getTestJarPath(),
				"true", "arg1", "arg2" };
		URL[] classpath = new URL[] { new URL("file:///tmp/foo"), new URL("file:///tmp/bar") };
		String[] reducedArguments = { "true", "arg1", "arg2" };

		CommandLine commandLine = CliFrontendParser.parse(CliFrontendParser.RUN_OPTIONS, arguments, true);
		ProgramOptions programOptions = ProgramOptions.create(commandLine);

		assertEquals(getTestJarPath(), programOptions.getJarFilePath());
		assertArrayEquals(classpath, programOptions.getClasspaths().toArray());
		assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, programOptions.getEntryPointClassName());
		assertArrayEquals(reducedArguments, programOptions.getProgramArgs());

		PackagedProgram prog = spy(frontend.buildProgram(programOptions));

		ClassLoader testClassLoader = new ClassLoader(prog.getUserCodeClassLoader()) {
			@Override
			public Class<?> loadClass(String name) throws ClassNotFoundException {
				if ("org.apache.hadoop.hive.ql.io.RCFileInputFormat".equals(name)) {
					callme[0] = true;
					return String.class; // Intentionally return the wrong class.
				} else {
					return super.loadClass(name);
				}
			}
		};
		when(prog.getUserCodeClassLoader()).thenReturn(testClassLoader);

		assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, prog.getMainClassName());
		assertArrayEquals(reducedArguments, prog.getArguments());

		Configuration c = new Configuration();
		Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), c);

		// we expect this to fail with a "ClassNotFoundException"
		Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(prog, c, 666, true);
		FlinkPipelineTranslationUtil.translateToJSONExecutionPlan(pipeline);
		fail("Should have failed with a ClassNotFoundException");
	}
	catch (ProgramInvocationException e) {
		if (!(e.getCause() instanceof ClassNotFoundException)) {
			e.printStackTrace();
			fail("Program didn't throw ClassNotFoundException");
		}
		assertTrue("Classloader was not called", callme[0]);
	}
}