Java Code Examples for org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness#getCheckpointLock()

The following examples show how to use org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness#getCheckpointLock() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private void writeSnapshot(String path, HashMap<KafkaTopicPartition, Long> state) throws Exception {

		final OneShotLatch latch = new OneShotLatch();
		final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);

		doAnswer(new Answer<Void>() {
			@Override
			public Void answer(InvocationOnMock invocation) throws Throwable {
				latch.trigger();
				return null;
			}
		}).when(fetcher).runFetchLoop();

		when(fetcher.snapshotCurrentState()).thenReturn(state);

		final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

		final DummyFlinkKafkaConsumer<String> consumerFunction =
			new DummyFlinkKafkaConsumer<>(fetcher, TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

		StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
				new StreamSource<>(consumerFunction);

		final AbstractStreamOperatorTestHarness<String> testHarness =
				new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

		testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

		testHarness.setup();
		testHarness.open();

		final Throwable[] error = new Throwable[1];

		// run the source asynchronously
		Thread runner = new Thread() {
			@Override
			public void run() {
				try {
					consumerFunction.run(new DummySourceContext() {
						@Override
						public void collect(String element) {

						}
					});
				}
				catch (Throwable t) {
					t.printStackTrace();
					error[0] = t;
				}
			}
		};
		runner.start();

		if (!latch.isTriggered()) {
			latch.await();
		}

		final OperatorSubtaskState snapshot;
		synchronized (testHarness.getCheckpointLock()) {
			snapshot = testHarness.snapshot(0L, 0L);
		}

		OperatorSnapshotUtil.writeStateHandle(snapshot, path);

		consumerOperator.close();
		runner.join();
	}
 
Example 2
Source File: FlinkKinesisConsumerMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private void writeSnapshot(String path, HashMap<StreamShardMetadata, SequenceNumber> state) throws Exception {
	final TestFetcher<String> fetcher = new TestFetcher<>(
		Collections.singletonList(TEST_STREAM_NAME),
		new TestSourceContext<>(),
		new TestRuntimeContext(true, 1, 0),
		new Properties(),
		new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()),
		state,
		null);

	final DummyFlinkKinesisConsumer<String> consumer = new DummyFlinkKinesisConsumer<>(
		fetcher, new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()));

	StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumer);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();
	testHarness.open();

	final AtomicReference<Throwable> error = new AtomicReference<>();

	// run the source asynchronously
	Thread runner = new Thread() {
		@Override
		public void run() {
			try {
				consumer.run(new TestSourceContext<>());
			} catch (Throwable t) {
				t.printStackTrace();
				error.set(t);
			}
		}
	};
	runner.start();

	fetcher.waitUntilRun();

	final OperatorSubtaskState snapshot;
	synchronized (testHarness.getCheckpointLock()) {
		snapshot = testHarness.snapshot(0L, 0L);
	}

	OperatorSnapshotUtil.writeStateHandle(snapshot, path);

	consumerOperator.close();
	runner.join();
}
 
Example 3
Source File: ContinuousFileProcessingMigrationTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data. Remove @Ignore to run.
 */
@Ignore
@Test
public void writeMonitoringSourceSnapshot() throws Exception {

	File testFolder = tempFolder.newFolder();

	long fileModTime = Long.MIN_VALUE;
	for (int i = 0; i < 1; i++) {
		Tuple2<File, String> file = createFileAndFillWithData(testFolder, "file", i, "This is test line.");
		fileModTime = file.f0.lastModified();
	}

	TextInputFormat format = new TextInputFormat(new Path(testFolder.getAbsolutePath()));

	final ContinuousFileMonitoringFunction<String> monitoringFunction =
		new ContinuousFileMonitoringFunction<>(format, FileProcessingMode.PROCESS_CONTINUOUSLY, 1, INTERVAL);

	StreamSource<TimestampedFileInputSplit, ContinuousFileMonitoringFunction<String>> src =
		new StreamSource<>(monitoringFunction);

	final AbstractStreamOperatorTestHarness<TimestampedFileInputSplit> testHarness =
			new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0);

	testHarness.open();

	final Throwable[] error = new Throwable[1];

	final OneShotLatch latch = new OneShotLatch();

	// run the source asynchronously
	Thread runner = new Thread() {
		@Override
		public void run() {
			try {
				monitoringFunction.run(new DummySourceContext() {
					@Override
					public void collect(TimestampedFileInputSplit element) {
						latch.trigger();
					}

					@Override
					public void markAsTemporarilyIdle() {

					}
				});
			}
			catch (Throwable t) {
				t.printStackTrace();
				error[0] = t;
			}
		}
	};
	runner.start();

	if (!latch.isTriggered()) {
		latch.await();
	}

	final OperatorSubtaskState snapshot;
	synchronized (testHarness.getCheckpointLock()) {
		snapshot = testHarness.snapshot(0L, 0L);
	}

	OperatorSnapshotUtil.writeStateHandle(
			snapshot,
			"src/test/resources/monitoring-function-migration-test-" + fileModTime + "-flink" + flinkGenerateSavepointVersion + "-snapshot");

	monitoringFunction.cancel();
	runner.join();

	testHarness.close();
}
 
Example 4
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private void writeSnapshot(String path, HashMap<KafkaTopicPartition, Long> state) throws Exception {

		final OneShotLatch latch = new OneShotLatch();
		final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);

		doAnswer(new Answer<Void>() {
			@Override
			public Void answer(InvocationOnMock invocation) throws Throwable {
				latch.trigger();
				return null;
			}
		}).when(fetcher).runFetchLoop();

		when(fetcher.snapshotCurrentState()).thenReturn(state);

		final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

		final DummyFlinkKafkaConsumer<String> consumerFunction =
			new DummyFlinkKafkaConsumer<>(fetcher, TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

		StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
				new StreamSource<>(consumerFunction);

		final AbstractStreamOperatorTestHarness<String> testHarness =
				new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

		testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

		testHarness.setup();
		testHarness.open();

		final Throwable[] error = new Throwable[1];

		// run the source asynchronously
		Thread runner = new Thread() {
			@Override
			public void run() {
				try {
					consumerFunction.run(new DummySourceContext() {
						@Override
						public void collect(String element) {

						}
					});
				}
				catch (Throwable t) {
					t.printStackTrace();
					error[0] = t;
				}
			}
		};
		runner.start();

		if (!latch.isTriggered()) {
			latch.await();
		}

		final OperatorSubtaskState snapshot;
		synchronized (testHarness.getCheckpointLock()) {
			snapshot = testHarness.snapshot(0L, 0L);
		}

		OperatorSnapshotUtil.writeStateHandle(snapshot, path);

		consumerOperator.close();
		runner.join();
	}
 
Example 5
Source File: FlinkKinesisConsumerMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private void writeSnapshot(String path, HashMap<StreamShardMetadata, SequenceNumber> state) throws Exception {
	final List<StreamShardHandle> initialDiscoveryShards = new ArrayList<>(state.size());
	for (StreamShardMetadata shardMetadata : state.keySet()) {
		Shard shard = new Shard();
		shard.setShardId(shardMetadata.getShardId());

		SequenceNumberRange sequenceNumberRange = new SequenceNumberRange();
		sequenceNumberRange.withStartingSequenceNumber("1");
		shard.setSequenceNumberRange(sequenceNumberRange);

		initialDiscoveryShards.add(new StreamShardHandle(shardMetadata.getStreamName(), shard));
	}

	final TestFetcher<String> fetcher = new TestFetcher<>(
		Collections.singletonList(TEST_STREAM_NAME),
		new TestSourceContext<>(),
		new TestRuntimeContext(true, 1, 0),
		TestUtils.getStandardProperties(),
		new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()),
		state,
		initialDiscoveryShards);

	final DummyFlinkKinesisConsumer<String> consumer = new DummyFlinkKinesisConsumer<>(
		fetcher, new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()));

	StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumer);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();
	testHarness.open();

	final AtomicReference<Throwable> error = new AtomicReference<>();

	// run the source asynchronously
	Thread runner = new Thread() {
		@Override
		public void run() {
			try {
				consumer.run(new TestSourceContext<>());
			} catch (Throwable t) {
				t.printStackTrace();
				error.set(t);
			}
		}
	};
	runner.start();

	fetcher.waitUntilRun();

	final OperatorSubtaskState snapshot;
	synchronized (testHarness.getCheckpointLock()) {
		snapshot = testHarness.snapshot(0L, 0L);
	}

	OperatorSnapshotUtil.writeStateHandle(snapshot, path);

	consumerOperator.close();
	runner.join();
}
 
Example 6
Source File: ContinuousFileProcessingMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data. Remove @Ignore to run.
 */
@Ignore
@Test
public void writeMonitoringSourceSnapshot() throws Exception {

	File testFolder = tempFolder.newFolder();

	long fileModTime = Long.MIN_VALUE;
	for (int i = 0; i < 1; i++) {
		Tuple2<File, String> file = createFileAndFillWithData(testFolder, "file", i, "This is test line.");
		fileModTime = file.f0.lastModified();
	}

	TextInputFormat format = new TextInputFormat(new Path(testFolder.getAbsolutePath()));

	final ContinuousFileMonitoringFunction<String> monitoringFunction =
		new ContinuousFileMonitoringFunction<>(format, FileProcessingMode.PROCESS_CONTINUOUSLY, 1, INTERVAL);

	StreamSource<TimestampedFileInputSplit, ContinuousFileMonitoringFunction<String>> src =
		new StreamSource<>(monitoringFunction);

	final AbstractStreamOperatorTestHarness<TimestampedFileInputSplit> testHarness =
			new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0);

	testHarness.open();

	final Throwable[] error = new Throwable[1];

	final OneShotLatch latch = new OneShotLatch();

	// run the source asynchronously
	Thread runner = new Thread() {
		@Override
		public void run() {
			try {
				monitoringFunction.run(new DummySourceContext() {
					@Override
					public void collect(TimestampedFileInputSplit element) {
						latch.trigger();
					}

					@Override
					public void markAsTemporarilyIdle() {

					}
				});
			}
			catch (Throwable t) {
				t.printStackTrace();
				error[0] = t;
			}
		}
	};
	runner.start();

	if (!latch.isTriggered()) {
		latch.await();
	}

	final OperatorSubtaskState snapshot;
	synchronized (testHarness.getCheckpointLock()) {
		snapshot = testHarness.snapshot(0L, 0L);
	}

	OperatorSnapshotUtil.writeStateHandle(
			snapshot,
			"src/test/resources/monitoring-function-migration-test-" + fileModTime + "-flink" + flinkGenerateSavepointVersion + "-snapshot");

	monitoringFunction.cancel();
	runner.join();

	testHarness.close();
}
 
Example 7
Source File: FlinkKafkaConsumerBaseMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private void writeSnapshot(String path, HashMap<KafkaTopicPartition, Long> state) throws Exception {

		final OneShotLatch latch = new OneShotLatch();
		final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);

		doAnswer(new Answer<Void>() {
			@Override
			public Void answer(InvocationOnMock invocation) throws Throwable {
				latch.trigger();
				return null;
			}
		}).when(fetcher).runFetchLoop();

		when(fetcher.snapshotCurrentState()).thenReturn(state);

		final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

		final DummyFlinkKafkaConsumer<String> consumerFunction =
			new DummyFlinkKafkaConsumer<>(fetcher, TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

		StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
				new StreamSource<>(consumerFunction);

		final AbstractStreamOperatorTestHarness<String> testHarness =
				new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

		testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

		testHarness.setup();
		testHarness.open();

		final Throwable[] error = new Throwable[1];

		// run the source asynchronously
		Thread runner = new Thread() {
			@Override
			public void run() {
				try {
					consumerFunction.run(new DummySourceContext() {
						@Override
						public void collect(String element) {

						}
					});
				}
				catch (Throwable t) {
					t.printStackTrace();
					error[0] = t;
				}
			}
		};
		runner.start();

		if (!latch.isTriggered()) {
			latch.await();
		}

		final OperatorSubtaskState snapshot;
		synchronized (testHarness.getCheckpointLock()) {
			snapshot = testHarness.snapshot(0L, 0L);
		}

		OperatorSnapshotUtil.writeStateHandle(snapshot, path);

		consumerOperator.close();
		runner.join();
	}
 
Example 8
Source File: FlinkKinesisConsumerMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private void writeSnapshot(String path, HashMap<StreamShardMetadata, SequenceNumber> state) throws Exception {
	final List<StreamShardHandle> initialDiscoveryShards = new ArrayList<>(state.size());
	for (StreamShardMetadata shardMetadata : state.keySet()) {
		Shard shard = new Shard();
		shard.setShardId(shardMetadata.getShardId());

		SequenceNumberRange sequenceNumberRange = new SequenceNumberRange();
		sequenceNumberRange.withStartingSequenceNumber("1");
		shard.setSequenceNumberRange(sequenceNumberRange);

		initialDiscoveryShards.add(new StreamShardHandle(shardMetadata.getStreamName(), shard));
	}

	final TestFetcher<String> fetcher = new TestFetcher<>(
		Collections.singletonList(TEST_STREAM_NAME),
		new TestSourceContext<>(),
		new TestRuntimeContext(true, 1, 0),
		TestUtils.getStandardProperties(),
		new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()),
		state,
		initialDiscoveryShards);

	final DummyFlinkKinesisConsumer<String> consumer = new DummyFlinkKinesisConsumer<>(
		fetcher, new KinesisDeserializationSchemaWrapper<>(new SimpleStringSchema()));

	StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumer);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();
	testHarness.open();

	final AtomicReference<Throwable> error = new AtomicReference<>();

	// run the source asynchronously
	Thread runner = new Thread() {
		@Override
		public void run() {
			try {
				consumer.run(new TestSourceContext<>());
			} catch (Throwable t) {
				t.printStackTrace();
				error.set(t);
			}
		}
	};
	runner.start();

	fetcher.waitUntilRun();

	final OperatorSubtaskState snapshot;
	synchronized (testHarness.getCheckpointLock()) {
		snapshot = testHarness.snapshot(0L, 0L);
	}

	OperatorSnapshotUtil.writeStateHandle(snapshot, path);

	consumerOperator.close();
	runner.join();
}
 
Example 9
Source File: ContinuousFileProcessingMigrationTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Manually run this to write binary snapshot data. Remove @Ignore to run.
 */
@Ignore
@Test
public void writeMonitoringSourceSnapshot() throws Exception {

	File testFolder = tempFolder.newFolder();

	long fileModTime = Long.MIN_VALUE;
	for (int i = 0; i < 1; i++) {
		Tuple2<File, String> file = createFileAndFillWithData(testFolder, "file", i, "This is test line.");
		fileModTime = file.f0.lastModified();
	}

	TextInputFormat format = new TextInputFormat(new Path(testFolder.getAbsolutePath()));

	final ContinuousFileMonitoringFunction<String> monitoringFunction =
		new ContinuousFileMonitoringFunction<>(format, FileProcessingMode.PROCESS_CONTINUOUSLY, 1, INTERVAL);

	StreamSource<TimestampedFileInputSplit, ContinuousFileMonitoringFunction<String>> src =
		new StreamSource<>(monitoringFunction);

	final AbstractStreamOperatorTestHarness<TimestampedFileInputSplit> testHarness =
			new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0);

	testHarness.open();

	final Throwable[] error = new Throwable[1];

	final OneShotLatch latch = new OneShotLatch();

	// run the source asynchronously
	Thread runner = new Thread() {
		@Override
		public void run() {
			try {
				monitoringFunction.run(new DummySourceContext() {
					@Override
					public void collect(TimestampedFileInputSplit element) {
						latch.trigger();
					}

					@Override
					public void markAsTemporarilyIdle() {

					}
				});
			}
			catch (Throwable t) {
				t.printStackTrace();
				error[0] = t;
			}
		}
	};
	runner.start();

	if (!latch.isTriggered()) {
		latch.await();
	}

	final OperatorSubtaskState snapshot;
	synchronized (testHarness.getCheckpointLock()) {
		snapshot = testHarness.snapshot(0L, 0L);
	}

	OperatorSnapshotUtil.writeStateHandle(
			snapshot,
			"src/test/resources/monitoring-function-migration-test-" + fileModTime + "-flink" + flinkGenerateSavepointVersion + "-snapshot");

	monitoringFunction.cancel();
	runner.join();

	testHarness.close();
}