org.apache.flink.streaming.util.TestStreamEnvironment Java Examples

The following examples show how to use org.apache.flink.streaming.util.TestStreamEnvironment. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PulsarTestBase.java    From pulsar-flink with Apache License 2.0 6 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {
    log.info("-------------------------------------------------------------------------");
    log.info("    Shut down PulsarTestBase ");
    log.info("-------------------------------------------------------------------------");

    TestStreamEnvironment.unsetAsContext();

    if (pulsarService != null) {
        pulsarService.stop();
    }

    log.info("-------------------------------------------------------------------------");
    log.info("    PulsarTestBase finished");
    log.info("-------------------------------------------------------------------------");
}
 
Example #2
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
		new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH),
		new String[] {
			checkpointDir.toURI().toString(),
			outputDir.toURI().toString()
		});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList());

	expectedException.expectCause(
		Matchers.<Throwable>hasProperty("cause", isA(SuccessException.class)));

	program.invokeInteractiveModeForExecution();
}
 
Example #3
Source File: MiniClusterWithClientResource.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void after() {
	TestStreamEnvironment.unsetAsContext();
	TestEnvironment.unsetAsContext();

	Exception exception = null;

	if (clusterClient != null) {
		try {
			clusterClient.shutdown();
		} catch (Exception e) {
			exception = e;
		}
	}

	clusterClient = null;

	super.after();

	if (exception != null) {
		log.warn("Could not properly shut down the MiniClusterWithClientResource.", exception);
	}
}
 
Example #4
Source File: MiniClusterWithClientResource.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void after() {
	TestStreamEnvironment.unsetAsContext();
	TestEnvironment.unsetAsContext();

	Exception exception = null;

	if (clusterClient != null) {
		try {
			clusterClient.close();
		} catch (Exception e) {
			exception = e;
		}
	}

	clusterClient = null;

	super.after();

	if (exception != null) {
		log.warn("Could not properly shut down the MiniClusterWithClientResource.", exception);
	}
}
 
Example #5
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
		new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH),
		new String[] {
			checkpointDir.toURI().toString(),
			outputDir.toURI().toString()
		});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList());

	expectedException.expectCause(
		Matchers.<Throwable>hasProperty("cause", isA(SuccessException.class)));

	program.invokeInteractiveModeForExecution();
}
 
Example #6
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = PackagedProgram.newBuilder()
		.setJarFile(new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH))
		.setArguments(new String[] { checkpointDir.toURI().toString(), outputDir.toURI().toString()})
		.build();

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)),
		Collections.emptyList());

	try {
		program.invokeInteractiveModeForExecution();
		fail("exception should happen");
	} catch (ProgramInvocationException e) {
		assertTrue(ExceptionUtils.findThrowable(e, SuccessException.class).isPresent());
	}
}
 
Example #7
Source File: MiniClusterWithClientResource.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void after() {
	TestStreamEnvironment.unsetAsContext();
	TestEnvironment.unsetAsContext();

	Exception exception = null;

	if (clusterClient != null) {
		try {
			clusterClient.shutdown();
		} catch (Exception e) {
			exception = e;
		}
	}

	clusterClient = null;

	super.after();

	if (exception != null) {
		log.warn("Could not properly shut down the MiniClusterWithClientResource.", exception);
	}
}
 
Example #8
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckpointedStreamingClassloaderJobWithCustomClassLoader() throws ProgramInvocationException {
	// checkpointed streaming job with custom classes for the checkpoint (FLINK-2543)
	// the test also ensures that user specific exceptions are serializable between JobManager <--> JobClient.
	PackagedProgram streamingCheckpointedProg = PackagedProgram.newBuilder()
		.setJarFile(new File(STREAMING_CHECKPOINTED_PROG_JAR_FILE))
		.build();

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_CHECKPOINTED_PROG_JAR_FILE)),
		Collections.emptyList());

	try {
		streamingCheckpointedProg.invokeInteractiveModeForExecution();
	} catch (Exception e) {
		// Program should terminate with a 'SuccessException':
		// the exception class is contained in the user-jar, but is not present on the maven classpath
		// the deserialization of the exception should thus fail here
		Optional<Throwable> exception = ExceptionUtils.findThrowable(e,
			candidate -> candidate.getClass().getName().equals("org.apache.flink.test.classloading.jar.CheckpointedStreamingProgram$SuccessException"));

		if (!exception.isPresent()) {
			// if this is achieved, either we failed due to another exception or the user-specific
			// exception is not serialized between JobManager and JobClient.
			throw e;
		}

		try {
			Class.forName(exception.get().getClass().getName());
			fail("Deserialization of user exception should have failed.");
		} catch (ClassNotFoundException expected) {
			// expected
		}
	}
}
 
Example #9
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStreamingClassloaderJobWithCustomClassLoader() throws ProgramInvocationException {
	// regular streaming job
	PackagedProgram streamingProg = PackagedProgram.newBuilder().setJarFile(new File(STREAMING_PROG_JAR_FILE)).build();

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_PROG_JAR_FILE)),
		Collections.emptyList());

	streamingProg.invokeInteractiveModeForExecution();
}
 
Example #10
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStreamingCustomSplitJobWithCustomClassLoader() throws ProgramInvocationException {
	PackagedProgram streamingInputSplitTestProg = PackagedProgram.newBuilder()
		.setJarFile(new File(STREAMING_INPUT_SPLITS_PROG_JAR_FILE))
		.build();

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_INPUT_SPLITS_PROG_JAR_FILE)),
		Collections.emptyList());

	streamingInputSplitTestProg.invokeInteractiveModeForExecution();
}
 
Example #11
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testProgramWithParentFirstClassLoader() throws IOException, ProgramInvocationException {
	// We have two files named test-resource in src/resource (parent classloader classpath) and
	// tmp folders (child classloader classpath) respectively.
	String childResourceDirName = "child1";
	String testResourceName = "test-resource";
	File childResourceDir = FOLDER.newFolder(childResourceDirName);
	File childResource = new File(childResourceDir, testResourceName);
	assertTrue(childResource.createNewFile());

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CLASSLOADING_POLICY_JAR_PATH)),
		Collections.emptyList());

	// parent-first classloading
	Configuration parentFirstConf = new Configuration();
	parentFirstConf.setString("classloader.resolve-order", "parent-first");

	final PackagedProgram parentFirstProgram = PackagedProgram.newBuilder()
		.setJarFile(new File(CLASSLOADING_POLICY_JAR_PATH))
		.setUserClassPaths(Collections.singletonList(childResourceDir.toURI().toURL()))
		.setConfiguration(parentFirstConf)
		.setArguments(testResourceName, "test-classes")
		.build();

	final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
	Thread.currentThread().setContextClassLoader(parentFirstProgram.getUserCodeClassLoader());
	try {
		parentFirstProgram.invokeInteractiveModeForExecution();
	} finally {
		Thread.currentThread().setContextClassLoader(contextClassLoader);
	}
}
 
Example #12
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testProgramWithChildFirstClassLoader() throws IOException, ProgramInvocationException {
	// We have two files named test-resource in src/resource (parent classloader classpath) and
	// tmp folders (child classloader classpath) respectively.
	String childResourceDirName = "child0";
	String testResourceName = "test-resource";
	File childResourceDir = FOLDER.newFolder(childResourceDirName);
	File childResource = new File(childResourceDir, testResourceName);
	assertTrue(childResource.createNewFile());

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CLASSLOADING_POLICY_JAR_PATH)),
		Collections.emptyList());

	// child-first classloading
	Configuration childFirstConf = new Configuration();
	childFirstConf.setString("classloader.resolve-order", "child-first");

	final PackagedProgram childFirstProgram = PackagedProgram.newBuilder()
		.setJarFile(new File(CLASSLOADING_POLICY_JAR_PATH))
		.setUserClassPaths(Collections.singletonList(childResourceDir.toURI().toURL()))
		.setConfiguration(childFirstConf)
		.setArguments(testResourceName, childResourceDirName)
		.build();

	final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
	Thread.currentThread().setContextClassLoader(childFirstProgram.getUserCodeClassLoader());
	try {
		childFirstProgram.invokeInteractiveModeForExecution();
	} finally {
		Thread.currentThread().setContextClassLoader(contextClassLoader);
	}
}
 
Example #13
Source File: MiniClusterWithClientResource.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void before() throws Exception {
	super.before();

	clusterClient = createMiniClusterClient();

	executionEnvironment = new TestEnvironment(getMiniCluster(), getNumberSlots(), false);
	executionEnvironment.setAsContext();
	TestStreamEnvironment.setAsContext(getMiniCluster(), getNumberSlots());
}
 
Example #14
Source File: KafkaTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {

	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    Shut down KafkaTestBase ");
	LOG.info("-------------------------------------------------------------------------");

	TestStreamEnvironment.unsetAsContext();

	shutdownClusters();

	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    KafkaTestBase finished");
	LOG.info("-------------------------------------------------------------------------");
}
 
Example #15
Source File: DataSetTestEnvironment.java    From flink-spector with Apache License 2.0 5 votes vote down vote up
public DataSetTestEnvironment(MiniCluster executor, int parallelism) {
    super(executor, parallelism, false);
    runner = new Runner(executor) {
        @Override
        protected void executeEnvironment() throws Throwable {
            TestStreamEnvironment.setAsContext(executor, parallelism);
            try {
                execute();
            } finally {
                TestStreamEnvironment.unsetAsContext();
            }
        }
    };
}
 
Example #16
Source File: DataStreamTestEnvironment.java    From flink-spector with Apache License 2.0 5 votes vote down vote up
public DataStreamTestEnvironment(MiniCluster cluster, int parallelism) {
	super(cluster, parallelism);
	runner = new Runner(cluster) {
		@Override
		protected void executeEnvironment() throws Throwable {
			TestStreamEnvironment.setAsContext(cluster, parallelism);
			try {
				execute();
			}
			finally {
				TestStreamEnvironment.unsetAsContext();
			}
		}
	};
}
 
Example #17
Source File: FlinkRequiresStableInputTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void beforeClass() throws Exception {
  final int parallelism = 1;

  Configuration config = new Configuration();
  // Avoid port collision in parallel tests
  config.setInteger(RestOptions.PORT, 0);
  config.setString(CheckpointingOptions.STATE_BACKEND, "filesystem");
  // It is necessary to configure the checkpoint directory for the state backend,
  // even though we only create savepoints in this test.
  config.setString(
      CheckpointingOptions.CHECKPOINTS_DIRECTORY,
      "file://" + tempFolder.getRoot().getAbsolutePath());
  // Checkpoints will go into a subdirectory of this directory
  config.setString(
      CheckpointingOptions.SAVEPOINT_DIRECTORY,
      "file://" + tempFolder.getRoot().getAbsolutePath());

  MiniClusterConfiguration clusterConfig =
      new MiniClusterConfiguration.Builder()
          .setConfiguration(config)
          .setNumTaskManagers(1)
          .setNumSlotsPerTaskManager(1)
          .build();

  flinkCluster = new MiniCluster(clusterConfig);
  flinkCluster.start();

  TestStreamEnvironment.setAsContext(flinkCluster, parallelism);
}
 
Example #18
Source File: KafkaTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {

	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    Shut down KafkaTestBase ");
	LOG.info("-------------------------------------------------------------------------");

	TestStreamEnvironment.unsetAsContext();

	shutdownClusters();

	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    KafkaTestBase finished");
	LOG.info("-------------------------------------------------------------------------");
}
 
Example #19
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckpointedStreamingClassloaderJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	// checkpointed streaming job with custom classes for the checkpoint (FLINK-2543)
	// the test also ensures that user specific exceptions are serializable between JobManager <--> JobClient.
	PackagedProgram streamingCheckpointedProg = new PackagedProgram(new File(STREAMING_CHECKPOINTED_PROG_JAR_FILE));

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_CHECKPOINTED_PROG_JAR_FILE)),
		Collections.<URL>emptyList());

	try {
		streamingCheckpointedProg.invokeInteractiveModeForExecution();
	} catch (Exception e) {
		// Program should terminate with a 'SuccessException':
		// the exception class is contained in the user-jar, but is not present on the maven classpath
		// the deserialization of the exception should thus fail here
		try {
			Optional<Throwable> exception = ExceptionUtils.findThrowable(e,
				candidate -> candidate.getClass().getCanonicalName().equals("org.apache.flink.test.classloading.jar.CheckpointedStreamingProgram.SuccessException"));

			// if we reach this point we either failed due to another exception,
			// or the deserialization of the user-exception did not fail
			if (!exception.isPresent()) {
				throw e;
			} else {
				Assert.fail("Deserialization of user exception should have failed.");
			}
		} catch (NoClassDefFoundError expected) {
			// expected
		}
	}
}
 
Example #20
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStreamingClassloaderJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	// regular streaming job
	PackagedProgram streamingProg = new PackagedProgram(new File(STREAMING_PROG_JAR_FILE));

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_PROG_JAR_FILE)),
		Collections.<URL>emptyList());

	streamingProg.invokeInteractiveModeForExecution();
}
 
Example #21
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testStreamingCustomSplitJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	PackagedProgram streamingInputSplitTestProg = new PackagedProgram(new File(STREAMING_INPUT_SPLITS_PROG_JAR_FILE));

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_INPUT_SPLITS_PROG_JAR_FILE)),
		Collections.<URL>emptyList());

	streamingInputSplitTestProg.invokeInteractiveModeForExecution();
}
 
Example #22
Source File: MiniClusterWithClientResource.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void before() throws Exception {
	super.before();

	clusterClient = createMiniClusterClient();

	executionEnvironment = new TestEnvironment(getMiniCluster(), getNumberSlots(), false);
	executionEnvironment.setAsContext();
	TestStreamEnvironment.setAsContext(getMiniCluster(), getNumberSlots());
}
 
Example #23
Source File: KafkaTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@AfterClass
public static void shutDownServices() throws Exception {

	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    Shut down KafkaTestBase ");
	LOG.info("-------------------------------------------------------------------------");

	TestStreamEnvironment.unsetAsContext();

	shutdownClusters();

	LOG.info("-------------------------------------------------------------------------");
	LOG.info("    KafkaTestBase finished");
	LOG.info("-------------------------------------------------------------------------");
}
 
Example #24
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckpointedStreamingClassloaderJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	// checkpointed streaming job with custom classes for the checkpoint (FLINK-2543)
	// the test also ensures that user specific exceptions are serializable between JobManager <--> JobClient.
	PackagedProgram streamingCheckpointedProg = new PackagedProgram(new File(STREAMING_CHECKPOINTED_PROG_JAR_FILE));

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_CHECKPOINTED_PROG_JAR_FILE)),
		Collections.<URL>emptyList());

	try {
		streamingCheckpointedProg.invokeInteractiveModeForExecution();
	} catch (Exception e) {
		// Program should terminate with a 'SuccessException':
		// the exception class is contained in the user-jar, but is not present on the maven classpath
		// the deserialization of the exception should thus fail here
		try {
			Optional<Throwable> exception = ExceptionUtils.findThrowable(e,
				candidate -> candidate.getClass().getCanonicalName().equals("org.apache.flink.test.classloading.jar.CheckpointedStreamingProgram.SuccessException"));

			// if we reach this point we either failed due to another exception,
			// or the deserialization of the user-exception did not fail
			if (!exception.isPresent()) {
				throw e;
			} else {
				Assert.fail("Deserialization of user exception should have failed.");
			}
		} catch (NoClassDefFoundError expected) {
			// expected
		}
	}
}
 
Example #25
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testStreamingClassloaderJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	// regular streaming job
	PackagedProgram streamingProg = new PackagedProgram(new File(STREAMING_PROG_JAR_FILE));

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_PROG_JAR_FILE)),
		Collections.<URL>emptyList());

	streamingProg.invokeInteractiveModeForExecution();
}
 
Example #26
Source File: ClassLoaderITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testStreamingCustomSplitJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
	PackagedProgram streamingInputSplitTestProg = new PackagedProgram(new File(STREAMING_INPUT_SPLITS_PROG_JAR_FILE));

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(STREAMING_INPUT_SPLITS_PROG_JAR_FILE)),
		Collections.<URL>emptyList());

	streamingInputSplitTestProg.invokeInteractiveModeForExecution();
}
 
Example #27
Source File: MiniClusterWithClientResource.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void before() throws Exception {
	super.before();

	clusterClient = createMiniClusterClient();

	executionEnvironment = new TestEnvironment(getMiniCluster(), getNumberSlots(), false);
	executionEnvironment.setAsContext();
	TestStreamEnvironment.setAsContext(getMiniCluster(), getNumberSlots());
}
 
Example #28
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests disposal of a savepoint, which contains custom user code KvState.
 */
@Test
public void testDisposeSavepointWithCustomKvState() throws Exception {
	ClusterClient<?> clusterClient = new MiniClusterClient(new Configuration(), miniClusterResource.getMiniCluster());

	Deadline deadline = new FiniteDuration(100, TimeUnit.SECONDS).fromNow();

	File checkpointDir = FOLDER.newFolder();
	File outputDir = FOLDER.newFolder();

	final PackagedProgram program = new PackagedProgram(
			new File(CUSTOM_KV_STATE_JAR_PATH),
			new String[] {
					String.valueOf(parallelism),
					checkpointDir.toURI().toString(),
					"5000",
					outputDir.toURI().toString()
			});

	TestStreamEnvironment.setAsContext(
		miniClusterResource.getMiniCluster(),
		parallelism,
		Collections.singleton(new Path(CUSTOM_KV_STATE_JAR_PATH)),
		Collections.<URL>emptyList()
	);

	// Execute detached
	Thread invokeThread = new Thread(new Runnable() {
		@Override
		public void run() {
			try {
				program.invokeInteractiveModeForExecution();
			} catch (ProgramInvocationException ignored) {
				if (ignored.getCause() == null ||
					!(ignored.getCause() instanceof JobCancellationException)) {
					ignored.printStackTrace();
				}
			}
		}
	});

	LOG.info("Starting program invoke thread");
	invokeThread.start();

	// The job ID
	JobID jobId = null;

	LOG.info("Waiting for job status running.");

	// Wait for running job
	while (jobId == null && deadline.hasTimeLeft()) {

		Collection<JobStatusMessage> jobs = clusterClient.listJobs().get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		for (JobStatusMessage job : jobs) {
			if (job.getJobState() == JobStatus.RUNNING) {
				jobId = job.getJobId();
				LOG.info("Job running. ID: " + jobId);
				break;
			}
		}

		// Retry if job is not available yet
		if (jobId == null) {
			Thread.sleep(100L);
		}
	}

	// Trigger savepoint
	String savepointPath = null;
	for (int i = 0; i < 20; i++) {
		LOG.info("Triggering savepoint (" + (i + 1) + "/20).");
		try {
			savepointPath = clusterClient.triggerSavepoint(jobId, null)
				.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
		} catch (Exception cause) {
			LOG.info("Failed to trigger savepoint. Retrying...", cause);
			// This can fail if the operators are not opened yet
			Thread.sleep(500);
		}
	}

	assertNotNull("Failed to trigger savepoint", savepointPath);

	clusterClient.disposeSavepoint(savepointPath).get();

	clusterClient.cancel(jobId);

	// make sure, the execution is finished to not influence other test methods
	invokeThread.join(deadline.timeLeft().toMillis());
	assertFalse("Program invoke thread still running", invokeThread.isAlive());
}
 
Example #29
Source File: FlinkRequiresStableInputTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@AfterClass
public static void afterClass() throws Exception {
  TestStreamEnvironment.unsetAsContext();
  flinkCluster.close();
  flinkCluster = null;
}
 
Example #30
Source File: ClassLoaderITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@After
public void tearDown() {
	TestStreamEnvironment.unsetAsContext();
	TestEnvironment.unsetAsContext();
}