org.apache.flink.core.testutils.CheckedThread Java Examples

The following examples show how to use org.apache.flink.core.testutils.CheckedThread. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BlobServerPutTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void testServerContentAddressableGetStorageLocationConcurrent(
		@Nullable final JobID jobId) throws Exception {
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore())) {

		server.start();

		BlobKey key1 = new TransientBlobKey();
		BlobKey key2 = new PermanentBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key2),
			new ContentAddressableGetStorageLocation(server, jobId, key2),
			new ContentAddressableGetStorageLocation(server, jobId, key2)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #2
Source File: BlobCachePutTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void testTransientBlobCacheGetStorageLocationConcurrent(
		@Nullable final JobID jobId) throws Exception {
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore());
		final TransientBlobCache cache = new TransientBlobCache(
			config, new InetSocketAddress("localhost", server.getPort()))) {

		server.start();

		BlobKey key = new TransientBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new TransientBlobCacheGetStorageLocation(cache, jobId, key),
			new TransientBlobCacheGetStorageLocation(cache, jobId, key),
			new TransientBlobCacheGetStorageLocation(cache, jobId, key)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #3
Source File: FileUtilsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Ignore
@Test
public void testDeleteDirectoryConcurrently() throws Exception {
	final File parent = tmp.newFolder();

	generateRandomDirs(parent, 20, 5, 3);

	// start three concurrent threads that delete the contents
	CheckedThread t1 = new Deleter(parent);
	CheckedThread t2 = new Deleter(parent);
	CheckedThread t3 = new Deleter(parent);
	t1.start();
	t2.start();
	t3.start();
	t1.sync();
	t2.sync();
	t3.sync();

	// assert is empty
	assertFalse(parent.exists());
}
 
Example #4
Source File: BlobCachePutTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void testTransientBlobCacheGetStorageLocationConcurrent(
		@Nullable final JobID jobId) throws Exception {
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore());
		final TransientBlobCache cache = new TransientBlobCache(
			config, new InetSocketAddress("localhost", server.getPort()))) {

		server.start();

		BlobKey key = new TransientBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new TransientBlobCacheGetStorageLocation(cache, jobId, key),
			new TransientBlobCacheGetStorageLocation(cache, jobId, key),
			new TransientBlobCacheGetStorageLocation(cache, jobId, key)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #5
Source File: BlobCachePutTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tests concurrent calls to {@link PermanentBlobCache#getStorageLocation(JobID, BlobKey)}.
 */
@Test
public void testPermanentBlobCacheGetStorageLocationConcurrentForJob() throws Exception {
	final JobID jobId = new JobID();
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore());
		final PermanentBlobCache cache = new PermanentBlobCache(
			config, new VoidBlobStore(), new InetSocketAddress("localhost", server.getPort())
		)) {

		server.start();

		BlobKey key = new PermanentBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new PermanentBlobCacheGetStorageLocation(cache, jobId, key),
			new PermanentBlobCacheGetStorageLocation(cache, jobId, key),
			new PermanentBlobCacheGetStorageLocation(cache, jobId, key)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #6
Source File: BlobCachePutTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private void testTransientBlobCacheGetStorageLocationConcurrent(
		@Nullable final JobID jobId) throws Exception {
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore());
		final TransientBlobCache cache = new TransientBlobCache(
			config, new InetSocketAddress("localhost", server.getPort()))) {

		server.start();

		BlobKey key = new TransientBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new TransientBlobCacheGetStorageLocation(cache, jobId, key),
			new TransientBlobCacheGetStorageLocation(cache, jobId, key),
			new TransientBlobCacheGetStorageLocation(cache, jobId, key)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #7
Source File: BlobCachePutTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Tests concurrent calls to {@link PermanentBlobCache#getStorageLocation(JobID, BlobKey)}.
 */
@Test
public void testPermanentBlobCacheGetStorageLocationConcurrentForJob() throws Exception {
	final JobID jobId = new JobID();
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore());
		final PermanentBlobCache cache = new PermanentBlobCache(
			config, new VoidBlobStore(), new InetSocketAddress("localhost", server.getPort())
		)) {

		server.start();

		BlobKey key = new PermanentBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new PermanentBlobCacheGetStorageLocation(cache, jobId, key),
			new PermanentBlobCacheGetStorageLocation(cache, jobId, key),
			new PermanentBlobCacheGetStorageLocation(cache, jobId, key)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #8
Source File: BlobServerPutTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void testServerContentAddressableGetStorageLocationConcurrent(
		@Nullable final JobID jobId) throws Exception {
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore())) {

		server.start();

		BlobKey key1 = new TransientBlobKey();
		BlobKey key2 = new PermanentBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key2),
			new ContentAddressableGetStorageLocation(server, jobId, key2),
			new ContentAddressableGetStorageLocation(server, jobId, key2)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #9
Source File: FileUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Ignore
@Test
public void testDeleteDirectoryConcurrently() throws Exception {
	final File parent = tmp.newFolder();

	generateRandomDirs(parent, 20, 5, 3);

	// start three concurrent threads that delete the contents
	CheckedThread t1 = new Deleter(parent);
	CheckedThread t2 = new Deleter(parent);
	CheckedThread t3 = new Deleter(parent);
	t1.start();
	t2.start();
	t3.start();
	t1.sync();
	t2.sync();
	t3.sync();

	// assert is empty
	assertFalse(parent.exists());
}
 
Example #10
Source File: BlobServerPutTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private void testServerContentAddressableGetStorageLocationConcurrent(
		@Nullable final JobID jobId) throws Exception {
	final Configuration config = new Configuration();
	config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath());

	try (BlobServer server = new BlobServer(config, new VoidBlobStore())) {

		server.start();

		BlobKey key1 = new TransientBlobKey();
		BlobKey key2 = new PermanentBlobKey();
		CheckedThread[] threads = new CheckedThread[] {
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key1),
			new ContentAddressableGetStorageLocation(server, jobId, key2),
			new ContentAddressableGetStorageLocation(server, jobId, key2),
			new ContentAddressableGetStorageLocation(server, jobId, key2)
		};
		checkedThreadSimpleTest(threads);
	}
}
 
Example #11
Source File: CassandraSinkBaseTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnSnapshot() throws Exception {
	final TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {
		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.snapshot(123L, 123L);
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example #12
Source File: StateDescriptorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializerLazyInitializeInParallel() throws Exception {
	final String name = "testSerializerLazyInitializeInParallel";
	// use PojoTypeInfo which will create a new serializer when createSerializer is invoked.
	final TestStateDescriptor<String> desc =
		new TestStateDescriptor<>(name, new PojoTypeInfo<>(String.class, new ArrayList<>()));
	final int threadNumber = 20;
	final ArrayList<CheckedThread> threads = new ArrayList<>(threadNumber);
	final ExecutionConfig executionConfig = new ExecutionConfig();
	final ConcurrentHashMap<Integer, TypeSerializer<String>> serializers = new ConcurrentHashMap<>();
	for (int i = 0; i < threadNumber; i++) {
		threads.add(new CheckedThread() {
			@Override
			public void go() {
				desc.initializeSerializerUnlessSet(executionConfig);
				TypeSerializer<String> serializer = desc.getOriginalSerializer();
				serializers.put(System.identityHashCode(serializer), serializer);
			}
		});
	}
	threads.forEach(Thread::start);
	for (CheckedThread t : threads) {
		t.sync();
	}
	assertEquals("Should use only one serializer but actually: " + serializers, 1, serializers.size());
	threads.clear();
}
 
Example #13
Source File: NetworkBufferPoolTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests {@link NetworkBufferPool#requestMemorySegments()}, verifying it may be aborted in
 * case of a concurrent {@link NetworkBufferPool#destroy()} call.
 */
@Test
public void testRequestMemorySegmentsInterruptable() throws Exception {
	final int numBuffers = 10;

	NetworkBufferPool globalPool = new NetworkBufferPool(numBuffers, 128, 10);
	MemorySegment segment = globalPool.requestMemorySegment();
	assertNotNull(segment);

	final OneShotLatch isRunning = new OneShotLatch();
	CheckedThread asyncRequest = new CheckedThread() {
		@Override
		public void go() throws Exception {
			isRunning.trigger();
			globalPool.requestMemorySegments();
		}
	};
	asyncRequest.start();

	// We want the destroy call inside the blocking part of the globalPool.requestMemorySegments()
	// call above. We cannot guarantee this though but make it highly probable:
	isRunning.await();
	Thread.sleep(10);
	globalPool.destroy();

	segment.free();

	expectedException.expect(IllegalStateException.class);
	expectedException.expectMessage("destroyed");
	try {
		asyncRequest.sync();
	} finally {
		globalPool.destroy();
	}
}
 
Example #14
Source File: AvroSerializerConcurrencyTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final AvroSerializer<String> serializer = new AvroSerializer<>(String.class);

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}
 
Example #15
Source File: AbstractFetcherTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentPartitionsDiscoveryAndLoopFetching() throws Exception {
	// test data
	final KafkaTopicPartition testPartition = new KafkaTopicPartition("test", 42);

	// ----- create the test fetcher -----

	SourceContext<String> sourceContext = new TestSourceContext<>();
	Map<KafkaTopicPartition, Long> partitionsWithInitialOffsets =
		Collections.singletonMap(testPartition, KafkaTopicPartitionStateSentinel.GROUP_OFFSET);

	final OneShotLatch fetchLoopWaitLatch = new OneShotLatch();
	final OneShotLatch stateIterationBlockLatch = new OneShotLatch();

	final TestFetcher<String> fetcher = new TestFetcher<>(
		sourceContext,
		partitionsWithInitialOffsets,
		null, /* watermark strategy */
		new TestProcessingTimeService(),
		10,
		fetchLoopWaitLatch,
		stateIterationBlockLatch);

	// ----- run the fetcher -----

	final CheckedThread checkedThread = new CheckedThread() {
		@Override
		public void go() throws Exception {
			fetcher.runFetchLoop();
		}
	};
	checkedThread.start();

	// wait until state iteration begins before adding discovered partitions
	fetchLoopWaitLatch.await();
	fetcher.addDiscoveredPartitions(Collections.singletonList(testPartition));

	stateIterationBlockLatch.trigger();
	checkedThread.sync();
}
 
Example #16
Source File: CassandraSinkBaseTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnClose() throws Exception {
	TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {

		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.close();
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.TIMED_WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example #17
Source File: CassandraSinkBaseTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnSnapshot() throws Exception {
	final TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {
		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.snapshot(123L, 123L);
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.TIMED_WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example #18
Source File: NetworkBufferPoolTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests {@link NetworkBufferPool#requestMemorySegments()}, verifying it may be aborted and
 * remains in a defined state even if the waiting is interrupted.
 */
@Test
public void testRequestMemorySegmentsInterruptable2() throws Exception {
	final int numBuffers = 10;

	NetworkBufferPool globalPool = new NetworkBufferPool(numBuffers, 128, 10);
	MemorySegment segment = globalPool.requestMemorySegment();
	assertNotNull(segment);

	final OneShotLatch isRunning = new OneShotLatch();
	CheckedThread asyncRequest = new CheckedThread() {
		@Override
		public void go() throws Exception {
			isRunning.trigger();
			globalPool.requestMemorySegments();
		}
	};
	asyncRequest.start();

	// We want the destroy call inside the blocking part of the globalPool.requestMemorySegments()
	// call above. We cannot guarantee this though but make it highly probable:
	isRunning.await();
	Thread.sleep(10);
	asyncRequest.interrupt();

	globalPool.recycle(segment);

	try {
		asyncRequest.sync();
	} catch (IOException e) {
		assertThat(e, hasProperty("cause", instanceOf(InterruptedException.class)));

		// test indirectly for NetworkBufferPool#numTotalRequiredBuffers being correct:
		// -> creating a new buffer pool should not fail
		globalPool.createBufferPool(10, 10);
	} finally {
		globalPool.destroy();
	}
}
 
Example #19
Source File: KryoSerializerConcurrencyTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final KryoSerializer<String> serializer = new KryoSerializer<>(String.class, new ExecutionConfig());

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}
 
Example #20
Source File: CassandraSinkBaseTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnClose() throws Exception {
	TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {

		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.close();
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example #21
Source File: CheckpointStateOutputStreamTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * This test validates that a close operation can happen even while a 'closeAndGetHandle()'
 * call is in progress.
 * <p>
 * <p>That behavior is essential for fast cancellation (concurrent cleanup).
 */
@Test
public void testCloseDoesNotLock() throws Exception {
	final Path folder = new Path(tmp.newFolder().toURI());
	final String fileName = "this-is-ignored-anyways.file";

	final FileSystem fileSystem = spy(new TestFs((path) -> new BlockerStream()));

	final FSDataOutputStream checkpointStream =
		createTestStream(fileSystem, folder, fileName);

	final OneShotLatch sync = new OneShotLatch();

	final CheckedThread thread = new CheckedThread() {

		@Override
		public void go() throws Exception {
			sync.trigger();
			// that call should now block, because it accesses the position
			closeAndGetResult(checkpointStream);
		}
	};
	thread.start();

	sync.await();
	checkpointStream.close();

	// the thread may or may not fail, that depends on the thread race
	// it is not important for this test, important is that the thread does not freeze/lock up
	try {
		thread.sync();
	} catch (IOException ignored) {}
}
 
Example #22
Source File: NetworkBufferPoolTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Tests {@link NetworkBufferPool#requestMemorySegments(int)}, verifying it may be aborted in
 * case of a concurrent {@link NetworkBufferPool#destroy()} call.
 */
@Test
public void testRequestMemorySegmentsInterruptable() throws Exception {
	final int numBuffers = 10;

	NetworkBufferPool globalPool = new NetworkBufferPool(numBuffers, 128);
	MemorySegment segment = globalPool.requestMemorySegment();
	assertNotNull(segment);

	final OneShotLatch isRunning = new OneShotLatch();
	CheckedThread asyncRequest = new CheckedThread() {
		@Override
		public void go() throws Exception {
			isRunning.trigger();
			globalPool.requestMemorySegments(10);
		}
	};
	asyncRequest.start();

	// We want the destroy call inside the blocking part of the globalPool.requestMemorySegments()
	// call above. We cannot guarantee this though but make it highly probable:
	isRunning.await();
	Thread.sleep(10);
	globalPool.destroy();

	segment.free();

	expectedException.expect(IllegalStateException.class);
	expectedException.expectMessage("destroyed");
	try {
		asyncRequest.sync();
	} finally {
		globalPool.destroy();
	}
}
 
Example #23
Source File: NetworkBufferPoolTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Tests {@link NetworkBufferPool#requestMemorySegments(int)}, verifying it may be aborted and
 * remains in a defined state even if the waiting is interrupted.
 */
@Test
public void testRequestMemorySegmentsInterruptable2() throws Exception {
	final int numBuffers = 10;

	NetworkBufferPool globalPool = new NetworkBufferPool(numBuffers, 128);
	MemorySegment segment = globalPool.requestMemorySegment();
	assertNotNull(segment);

	final OneShotLatch isRunning = new OneShotLatch();
	CheckedThread asyncRequest = new CheckedThread() {
		@Override
		public void go() throws Exception {
			isRunning.trigger();
			globalPool.requestMemorySegments(10);
		}
	};
	asyncRequest.start();

	// We want the destroy call inside the blocking part of the globalPool.requestMemorySegments()
	// call above. We cannot guarantee this though but make it highly probable:
	isRunning.await();
	Thread.sleep(10);
	asyncRequest.interrupt();

	globalPool.recycle(segment);

	try {
		asyncRequest.sync();
	} catch (IOException e) {
		assertThat(e, hasProperty("cause", instanceOf(InterruptedException.class)));

		// test indirectly for NetworkBufferPool#numTotalRequiredBuffers being correct:
		// -> creating a new buffer pool should not fail
		globalPool.createBufferPool(10, 10);
	} finally {
		globalPool.destroy();
	}
}
 
Example #24
Source File: NetworkBufferPoolTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests {@link NetworkBufferPool#requestMemorySegments()}, verifying it may be aborted in
 * case of a concurrent {@link NetworkBufferPool#destroy()} call.
 */
@Test
public void testRequestMemorySegmentsInterruptable() throws Exception {
	final int numBuffers = 10;

	NetworkBufferPool globalPool = new NetworkBufferPool(numBuffers, 128, 10);
	MemorySegment segment = globalPool.requestMemorySegment();
	assertNotNull(segment);

	final OneShotLatch isRunning = new OneShotLatch();
	CheckedThread asyncRequest = new CheckedThread() {
		@Override
		public void go() throws Exception {
			isRunning.trigger();
			globalPool.requestMemorySegments();
		}
	};
	asyncRequest.start();

	// We want the destroy call inside the blocking part of the globalPool.requestMemorySegments()
	// call above. We cannot guarantee this though but make it highly probable:
	isRunning.await();
	Thread.sleep(10);
	globalPool.destroy();

	segment.free();

	expectedException.expect(IllegalStateException.class);
	expectedException.expectMessage("destroyed");
	try {
		asyncRequest.sync();
	} finally {
		globalPool.destroy();
	}
}
 
Example #25
Source File: CassandraSinkBaseTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnSnapshot() throws Exception {
	final TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {
		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.snapshot(123L, 123L);
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example #26
Source File: CassandraSinkBaseTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test(timeout = DEFAULT_TEST_TIMEOUT)
public void testWaitForPendingUpdatesOnClose() throws Exception {
	TestCassandraSink casSinkFunc = new TestCassandraSink();

	try (OneInputStreamOperatorTestHarness<String, Object> testHarness = createOpenedTestHarness(casSinkFunc)) {

		CompletableFuture<ResultSet> completableFuture = new CompletableFuture<>();
		casSinkFunc.enqueueCompletableFuture(completableFuture);

		casSinkFunc.invoke("hello");
		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());

		final CountDownLatch latch = new CountDownLatch(1);
		Thread t = new CheckedThread("Flink-CassandraSinkBaseTest") {
			@Override
			public void go() throws Exception {
				testHarness.close();
				latch.countDown();
			}
		};
		t.start();
		while (t.getState() != Thread.State.WAITING) {
			Thread.sleep(5);
		}

		Assert.assertEquals(1, casSinkFunc.getAcquiredPermits());
		completableFuture.complete(null);
		latch.await();
		Assert.assertEquals(0, casSinkFunc.getAcquiredPermits());
	}
}
 
Example #27
Source File: KryoSerializerConcurrencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final KryoSerializer<String> serializer = new KryoSerializer<>(String.class, new ExecutionConfig());

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}
 
Example #28
Source File: JobManagerMetricsITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() throws Exception {
	jobExecuteThread = new CheckedThread() {

		@Override
		public void go() throws Exception {
			StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
			env.addSource(new SourceFunction<String>() {

				@Override
				public void run(SourceContext<String> ctx) throws Exception {
					sync.block();
				}

				@Override
				public void cancel() {
					sync.releaseBlocker();
				}

			}).addSink(new PrintSinkFunction());

			env.execute();
		}

	};

	jobExecuteThread.start();
	sync.awaitBlocker();
}
 
Example #29
Source File: AvroSerializerConcurrencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final AvroSerializer<String> serializer = new AvroSerializer<>(String.class);

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}
 
Example #30
Source File: KryoSerializerConcurrencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final KryoSerializer<String> serializer = new KryoSerializer<>(String.class, new ExecutionConfig());

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}