org.apache.flink.runtime.state.memory.MemoryStateBackend Java Examples

The following examples show how to use org.apache.flink.runtime.state.memory.MemoryStateBackend. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OperatorStateBackendTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testSnapshotEmpty() throws Exception {
	final AbstractStateBackend abstractStateBackend = new MemoryStateBackend(4096);
	CloseableRegistry cancelStreamRegistry = new CloseableRegistry();

	final OperatorStateBackend operatorStateBackend =
			abstractStateBackend.createOperatorStateBackend(createMockEnvironment(), "testOperator", emptyStateHandles, cancelStreamRegistry);

	CheckpointStreamFactory streamFactory = new MemCheckpointStreamFactory(4096);

	RunnableFuture<SnapshotResult<OperatorStateHandle>> snapshot =
			operatorStateBackend.snapshot(0L, 0L, streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation());

	SnapshotResult<OperatorStateHandle> snapshotResult = FutureUtils.runIfNotDoneAndGet(snapshot);
	OperatorStateHandle stateHandle = snapshotResult.getJobManagerOwnedSnapshot();
	assertNull(stateHandle);
}
 
Example #2
Source File: StateBackendLoadingTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Validates taking the application-defined memory state backend and adding additional
 * parameters from the cluster configuration, but giving precedence to application-defined
 * parameters over configuration-defined parameters.
 */
@Test
public void testConfigureMemoryStateBackendMixed() throws Exception {
	final String appCheckpointDir = new Path(tmp.newFolder().toURI()).toString();
	final String checkpointDir = new Path(tmp.newFolder().toURI()).toString();
	final String savepointDir = new Path(tmp.newFolder().toURI()).toString();

	final Path expectedCheckpointPath = new Path(appCheckpointDir);
	final Path expectedSavepointPath = new Path(savepointDir);

	final MemoryStateBackend backend = new MemoryStateBackend(appCheckpointDir, null);

	final Configuration config = new Configuration();
	config.setString(backendKey, "filesystem"); // check that this is not accidentally picked up
	config.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir); // this parameter should not be picked up
	config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);

	StateBackend loadedBackend = StateBackendLoader.fromApplicationOrConfigOrDefault(backend, config, cl, null);
	assertTrue(loadedBackend instanceof MemoryStateBackend);

	final MemoryStateBackend memBackend = (MemoryStateBackend) loadedBackend;
	assertEquals(expectedCheckpointPath, memBackend.getCheckpointPath());
	assertEquals(expectedSavepointPath, memBackend.getSavepointPath());
}
 
Example #3
Source File: AbstractOperatorRestoreTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
private JobGraph createJobGraph(ExecutionMode mode) {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.enableCheckpointing(500, CheckpointingMode.EXACTLY_ONCE);
	env.setRestartStrategy(RestartStrategies.noRestart());
	env.setStateBackend((StateBackend) new MemoryStateBackend());

	switch (mode) {
		case MIGRATE:
			createMigrationJob(env);
			break;
		case RESTORE:
			createRestoredJob(env);
			break;
	}

	return StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
}
 
Example #4
Source File: SavepointTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(expected = IllegalArgumentException.class)
public void testNewSavepointEnforceUniqueUIDs() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(10);

	DataSource<Integer> input = env.fromElements(0);

	BootstrapTransformation<Integer> transformation = OperatorTransformation
		.bootstrapWith(input)
		.transform(new ExampleStateBootstrapFunction());

	SavepointMetadata metadata = new SavepointMetadata(1, Collections.emptyList(), Collections.emptyList());

	new NewSavepoint(metadata, new MemoryStateBackend())
		.withOperator(UID, transformation)
		.withOperator(UID, transformation);
}
 
Example #5
Source File: SavepointTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(expected = IllegalArgumentException.class)
public void testExistingSavepointEnforceUniqueUIDs() throws IOException {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(10);

	DataSource<Integer> input = env.fromElements(0);

	BootstrapTransformation<Integer> transformation = OperatorTransformation
		.bootstrapWith(input)
		.transform(new ExampleStateBootstrapFunction());

	Collection<OperatorState> operatorStates = Collections.singletonList(new OperatorState(
		OperatorIDGenerator.fromUid(UID), 1, 4));

	SavepointMetadata metadata = new SavepointMetadata(4, Collections.emptyList(), operatorStates);

	new ExistingSavepoint(env, metadata, new MemoryStateBackend())
		.withOperator(UID, transformation)
		.withOperator(UID, transformation);
}
 
Example #6
Source File: OperatorStateBackendTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSnapshotEmpty() throws Exception {
	final AbstractStateBackend abstractStateBackend = new MemoryStateBackend(4096);
	CloseableRegistry cancelStreamRegistry = new CloseableRegistry();

	final OperatorStateBackend operatorStateBackend =
			abstractStateBackend.createOperatorStateBackend(createMockEnvironment(), "testOperator", emptyStateHandles, cancelStreamRegistry);

	CheckpointStreamFactory streamFactory = new MemCheckpointStreamFactory(4096);

	RunnableFuture<SnapshotResult<OperatorStateHandle>> snapshot =
			operatorStateBackend.snapshot(0L, 0L, streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation());

	SnapshotResult<OperatorStateHandle> snapshotResult = FutureUtils.runIfNotDoneAndGet(snapshot);
	OperatorStateHandle stateHandle = snapshotResult.getJobManagerOwnedSnapshot();
	assertNull(stateHandle);
}
 
Example #7
Source File: JobCheckpointingSettingsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that the settings are actually serializable.
 */
@Test
public void testIsJavaSerializable() throws Exception {
	JobCheckpointingSettings settings = new JobCheckpointingSettings(
		Arrays.asList(new JobVertexID(), new JobVertexID()),
		Arrays.asList(new JobVertexID(), new JobVertexID()),
		Arrays.asList(new JobVertexID(), new JobVertexID()),
		new CheckpointCoordinatorConfiguration(
			1231231,
			1231,
			112,
			12,
			CheckpointRetentionPolicy.RETAIN_ON_FAILURE,
			false,
			false,
			0),
		new SerializedValue<>(new MemoryStateBackend()));

	JobCheckpointingSettings copy = CommonTestUtils.createCopySerializable(settings);
	assertEquals(settings.getVerticesToAcknowledge(), copy.getVerticesToAcknowledge());
	assertEquals(settings.getVerticesToConfirm(), copy.getVerticesToConfirm());
	assertEquals(settings.getVerticesToTrigger(), copy.getVerticesToTrigger());
	assertEquals(settings.getCheckpointCoordinatorConfiguration(), copy.getCheckpointCoordinatorConfiguration());
	assertNotNull(copy.getDefaultStateBackend());
	assertTrue(copy.getDefaultStateBackend().deserializeValue(this.getClass().getClassLoader()).getClass() == MemoryStateBackend.class);
}
 
Example #8
Source File: SourceOperatorEventTimeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private static <T> SourceOperator<T, MockSourceSplit> createTestOperator(
		SourceReader<T, MockSourceSplit> reader,
		WatermarkStrategy<T> watermarkStrategy,
		ProcessingTimeService timeService) throws Exception {

	final OperatorStateStore operatorStateStore =
			new MemoryStateBackend().createOperatorStateBackend(
					new MockEnvironmentBuilder().build(),
					"test-operator",
					Collections.emptyList(),
					new CloseableRegistry());

	final StateInitializationContext stateContext = new StateInitializationContextImpl(
		false, operatorStateStore, null, null, null);

	final SourceOperator<T, MockSourceSplit> sourceOperator =
			new TestingSourceOperator<>(reader, watermarkStrategy, timeService);
	sourceOperator.initializeState(stateContext);
	sourceOperator.open();

	return sourceOperator;
}
 
Example #9
Source File: CheckpointCoordinatorMasterHooksTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private static CheckpointCoordinator instantiateCheckpointCoordinator(JobID jid, ExecutionVertex... ackVertices) {
	CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration(
		10000000L,
		600000L,
		0L,
		1,
		CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION,
		true,
		false,
		0);
	return new CheckpointCoordinator(
			jid,
			chkConfig,
			new ExecutionVertex[0],
			ackVertices,
			new ExecutionVertex[0],
			new StandaloneCheckpointIDCounter(),
			new StandaloneCompletedCheckpointStore(10),
			new MemoryStateBackend(),
			Executors.directExecutor(),
			SharedStateRegistry.DEFAULT_FACTORY,
			new CheckpointFailureManager(
				0,
				NoOpFailJobCall.INSTANCE));
}
 
Example #10
Source File: StreamSourceOperatorLatencyMetricsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private static <T> void setupSourceOperator(
		StreamSource<T, ?> operator,
		ExecutionConfig executionConfig,
		Environment env,
		ProcessingTimeService timeProvider) {

	StreamConfig cfg = new StreamConfig(new Configuration());
	cfg.setStateBackend(new MemoryStateBackend());

	cfg.setTimeCharacteristic(TimeCharacteristic.EventTime);
	cfg.setOperatorID(new OperatorID());

	try {
		MockStreamTask mockTask = new MockStreamTaskBuilder(env)
			.setConfig(cfg)
			.setExecutionConfig(executionConfig)
			.setProcessingTimeService(timeProvider)
			.build();

		operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
	} catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
Example #11
Source File: CEPRescalingTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private KeyedOneInputStreamOperatorTestHarness<Integer, Event, Map<String, List<Event>>> getTestHarness(
		int maxParallelism,
		int taskParallelism,
		int subtaskIdx) throws Exception {

	KeySelector<Event, Integer> keySelector = new TestKeySelector();
	KeyedOneInputStreamOperatorTestHarness<Integer, Event, Map<String, List<Event>>> harness =
			new KeyedOneInputStreamOperatorTestHarness<>(
					getKeyedCepOpearator(
							false,
							new NFAFactory()),
					keySelector,
					BasicTypeInfo.INT_TYPE_INFO,
					maxParallelism,
					taskParallelism,
					subtaskIdx);
	harness.setStateBackend(new RocksDBStateBackend(new MemoryStateBackend()));
	return harness;
}
 
Example #12
Source File: BootstrapTransformationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDefaultParallelismRespectedWhenLessThanMaxParallelism() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	DataSource<Integer> input = env.fromElements(0);

	BootstrapTransformation<Integer> transformation = OperatorTransformation
		.bootstrapWith(input)
		.transform(new ExampleStateBootstrapFunction());

	int maxParallelism = transformation.getMaxParallelism(10);
	DataSet<TaggedOperatorSubtaskState> result = transformation.writeOperatorSubtaskStates(
		OperatorIDGenerator.fromUid("uid"),
		new MemoryStateBackend(),
		new Path(),
		maxParallelism
	);

	Assert.assertEquals(
		"The parallelism of a data set should not change when less than the max parallelism of the savepoint",
		ExecutionConfig.PARALLELISM_DEFAULT,
		getParallelism(result));
}
 
Example #13
Source File: AbstractOperatorRestoreTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
private JobGraph createJobGraph(ExecutionMode mode) {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.enableCheckpointing(500, CheckpointingMode.EXACTLY_ONCE);
	env.setRestartStrategy(RestartStrategies.noRestart());
	env.setStateBackend((StateBackend) new MemoryStateBackend());

	switch (mode) {
		case MIGRATE:
			createMigrationJob(env);
			break;
		case RESTORE:
			createRestoredJob(env);
			break;
	}

	return StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
}
 
Example #14
Source File: KeyedStateInputFormatTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(expected = IOException.class)
public void testInvalidProcessReaderFunctionFails() throws Exception {
	OperatorID operatorID = OperatorIDGenerator.fromUid("uid");

	OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction()));
	OperatorState operatorState = new OperatorState(operatorID, 1, 128);
	operatorState.putState(0, state);

	KeyedStateInputFormat<?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), Types.INT, new ReaderFunction());
	KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];

	KeyedStateReaderFunction<Integer, Integer> userFunction = new InvalidReaderFunction();

	readInputSplit(split, userFunction);

	Assert.fail("KeyedStateReaderFunction did not fail on invalid RuntimeContext use");
}
 
Example #15
Source File: KeyedStateInputFormatTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testReadState() throws Exception {
	OperatorID operatorID = OperatorIDGenerator.fromUid("uid");

	OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction()));
	OperatorState operatorState = new OperatorState(operatorID, 1, 128);
	operatorState.putState(0, state);

	KeyedStateInputFormat<?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), Types.INT, new ReaderFunction());
	KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];

	KeyedStateReaderFunction<Integer, Integer> userFunction = new ReaderFunction();

	List<Integer> data = readInputSplit(split, userFunction);

	Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 2, 3), data);
}
 
Example #16
Source File: BootstrapTransformationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testOperatorSpecificMaxParallelismRespected() {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(4);

	DataSource<Integer> input = env.fromElements(0);

	BootstrapTransformation<Integer> transformation = OperatorTransformation
		.bootstrapWith(input)
		.setMaxParallelism(1)
		.transform(new ExampleStateBootstrapFunction());

	int maxParallelism = transformation.getMaxParallelism(4);
	DataSet<TaggedOperatorSubtaskState> result = transformation.writeOperatorSubtaskStates(
		OperatorIDGenerator.fromUid("uid"),
		new MemoryStateBackend(),
		new Path(),
		maxParallelism
	);

	Assert.assertEquals("The parallelism of a data set should be constrained my the savepoint max parallelism", 1, getParallelism(result));
}
 
Example #17
Source File: CEPRescalingTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private KeyedOneInputStreamOperatorTestHarness<Integer, Event, Map<String, List<Event>>> getTestHarness(
		int maxParallelism,
		int taskParallelism,
		int subtaskIdx) throws Exception {

	KeySelector<Event, Integer> keySelector = new TestKeySelector();
	KeyedOneInputStreamOperatorTestHarness<Integer, Event, Map<String, List<Event>>> harness =
			new KeyedOneInputStreamOperatorTestHarness<>(
					getKeyedCepOpearator(
							false,
							new NFAFactory()),
					keySelector,
					BasicTypeInfo.INT_TYPE_INFO,
					maxParallelism,
					taskParallelism,
					subtaskIdx);
	harness.setStateBackend(new RocksDBStateBackend(new MemoryStateBackend()));
	return harness;
}
 
Example #18
Source File: SavepointTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test(expected = IllegalArgumentException.class)
public void testExistingSavepointEnforceUniqueUIDs() throws IOException {
	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(10);

	DataSource<Integer> input = env.fromElements(0);

	BootstrapTransformation<Integer> transformation = OperatorTransformation
		.bootstrapWith(input)
		.transform(new ExampleStateBootstrapFunction());

	Collection<OperatorState> operatorStates = Collections.singletonList(new OperatorState(
		OperatorIDGenerator.fromUid(UID), 1, 4));

	SavepointMetadata metadata = new SavepointMetadata(4, Collections.emptyList(), operatorStates);

	new ExistingSavepoint(env, metadata, new MemoryStateBackend())
		.withOperator(UID, transformation)
		.withOperator(UID, transformation);
}
 
Example #19
Source File: StateBackendLoadingTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Validates taking the application-defined memory state backend and adding additional
 * parameters from the cluster configuration.
 */
@Test
public void testConfigureMemoryStateBackend() throws Exception {
	final String checkpointDir = new Path(tmp.newFolder().toURI()).toString();
	final String savepointDir = new Path(tmp.newFolder().toURI()).toString();
	final Path expectedCheckpointPath = new Path(checkpointDir);
	final Path expectedSavepointPath = new Path(savepointDir);

	final int maxSize = 100;
	final boolean async = !CheckpointingOptions.ASYNC_SNAPSHOTS.defaultValue();

	final MemoryStateBackend backend = new MemoryStateBackend(maxSize, async);

	final Configuration config = new Configuration();
	config.setString(backendKey, "filesystem"); // check that this is not accidentally picked up
	config.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir);
	config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);
	config.setBoolean(CheckpointingOptions.ASYNC_SNAPSHOTS, !async);

	StateBackend loadedBackend = StateBackendLoader.fromApplicationOrConfigOrDefault(backend, config, cl, null);
	assertTrue(loadedBackend instanceof MemoryStateBackend);

	final MemoryStateBackend memBackend = (MemoryStateBackend) loadedBackend;
	assertEquals(expectedCheckpointPath, memBackend.getCheckpointPath());
	assertEquals(expectedSavepointPath, memBackend.getSavepointPath());
	assertEquals(maxSize, memBackend.getMaxStateSize());
	assertEquals(async, memBackend.isUsingAsynchronousSnapshots());
}
 
Example #20
Source File: HeapSyncSnapshotTtlStateTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected StateBackendTestContext createStateBackendTestContext(TtlTimeProvider timeProvider) {
	return new StateBackendTestContext(timeProvider) {
		@Override
		protected StateBackend createStateBackend() {
			return new MemoryStateBackend(false);
		}
	};
}
 
Example #21
Source File: SavepointReaderITTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
private void verifyListState(String path, ExecutionEnvironment batchEnv) throws Exception {
	ExistingSavepoint savepoint = Savepoint.load(batchEnv, path, new MemoryStateBackend());
	List<Integer> listResult = readListState(savepoint).collect();
	listResult.sort(Comparator.naturalOrder());

	Assert.assertEquals("Unexpected elements read from list state", SavepointSource.getElements(), listResult);
}
 
Example #22
Source File: HeapAsyncSnapshotTtlStateTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected StateBackendTestContext createStateBackendTestContext(TtlTimeProvider timeProvider) {
	return new StateBackendTestContext(timeProvider) {
		@Override
		protected StateBackend createStateBackend() {
			return new MemoryStateBackend(true);
		}
	};
}
 
Example #23
Source File: StateBackendLoadingTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Validates loading a memory state backend with additional parameters from the cluster configuration.
 */
@Test
public void testLoadMemoryStateWithParameters() throws Exception {
	final String checkpointDir = new Path(tmp.newFolder().toURI()).toString();
	final String savepointDir = new Path(tmp.newFolder().toURI()).toString();
	final Path expectedCheckpointPath = new Path(checkpointDir);
	final Path expectedSavepointPath = new Path(savepointDir);

	final boolean async = !CheckpointingOptions.ASYNC_SNAPSHOTS.defaultValue();

	// we configure with the explicit string (rather than AbstractStateBackend#X_STATE_BACKEND_NAME)
	// to guard against config-breaking changes of the name

	final Configuration config1 = new Configuration();
	config1.setString(backendKey, "jobmanager");
	config1.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir);
	config1.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);
	config1.setBoolean(CheckpointingOptions.ASYNC_SNAPSHOTS, async);

	final Configuration config2 = new Configuration();
	config2.setString(backendKey, MemoryStateBackendFactory.class.getName());
	config2.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir);
	config2.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);
	config2.setBoolean(CheckpointingOptions.ASYNC_SNAPSHOTS, async);

	MemoryStateBackend backend1 = (MemoryStateBackend)
			StateBackendLoader.loadStateBackendFromConfig(config1, cl, null);
	MemoryStateBackend backend2 = (MemoryStateBackend)
			StateBackendLoader.loadStateBackendFromConfig(config2, cl, null);

	assertNotNull(backend1);
	assertNotNull(backend2);

	assertEquals(expectedCheckpointPath, backend1.getCheckpointPath());
	assertEquals(expectedCheckpointPath, backend2.getCheckpointPath());
	assertEquals(expectedSavepointPath, backend1.getSavepointPath());
	assertEquals(expectedSavepointPath, backend2.getSavepointPath());
	assertEquals(async, backend1.isUsingAsynchronousSnapshots());
	assertEquals(async, backend2.isUsingAsynchronousSnapshots());
}
 
Example #24
Source File: SavepointReaderITTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
private void verifyListState(String path, ExecutionEnvironment batchEnv) throws Exception {
	ExistingSavepoint savepoint = Savepoint.load(batchEnv, path, new MemoryStateBackend());
	List<Integer> listResult = readListState(savepoint).collect();
	listResult.sort(Comparator.naturalOrder());

	Assert.assertEquals("Unexpected elements read from list state", SavepointSource.getElements(), listResult);
}
 
Example #25
Source File: StreamingRuntimeContextTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private static AbstractStreamOperator<?> createMapPlainMockOp() throws Exception {

	AbstractStreamOperator<?> operatorMock = mock(AbstractStreamOperator.class);
	ExecutionConfig config = new ExecutionConfig();

	KeyedStateBackend keyedStateBackend = mock(KeyedStateBackend.class);

	DefaultKeyedStateStore keyedStateStore = new DefaultKeyedStateStore(keyedStateBackend, config);

	when(operatorMock.getExecutionConfig()).thenReturn(config);

	doAnswer(new Answer<MapState<Integer, String>>() {

		@Override
		public MapState<Integer, String> answer(InvocationOnMock invocationOnMock) throws Throwable {
			MapStateDescriptor<Integer, String> descr =
					(MapStateDescriptor<Integer, String>) invocationOnMock.getArguments()[2];

			AbstractKeyedStateBackend<Integer> backend = new MemoryStateBackend().createKeyedStateBackend(
				new DummyEnvironment("test_task", 1, 0),
				new JobID(),
				"test_op",
				IntSerializer.INSTANCE,
				1,
				new KeyGroupRange(0, 0),
				new KvStateRegistry().createTaskRegistry(new JobID(), new JobVertexID()),
				TtlTimeProvider.DEFAULT,
				new UnregisteredMetricsGroup(),
				Collections.emptyList(),
				new CloseableRegistry());
			backend.setCurrentKey(0);
			return backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, descr);
		}
	}).when(keyedStateBackend).getPartitionedState(Matchers.any(), any(TypeSerializer.class), any(MapStateDescriptor.class));

	when(operatorMock.getKeyedStateStore()).thenReturn(keyedStateStore);
	when(operatorMock.getOperatorID()).thenReturn(new OperatorID());
	return operatorMock;
}
 
Example #26
Source File: StateBackendLoadingTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Validates loading a memory state backend with additional parameters from the cluster configuration.
 */
@Test
public void testLoadMemoryStateWithParameters() throws Exception {
	final String checkpointDir = new Path(tmp.newFolder().toURI()).toString();
	final String savepointDir = new Path(tmp.newFolder().toURI()).toString();
	final Path expectedCheckpointPath = new Path(checkpointDir);
	final Path expectedSavepointPath = new Path(savepointDir);

	final boolean async = !CheckpointingOptions.ASYNC_SNAPSHOTS.defaultValue();

	// we configure with the explicit string (rather than AbstractStateBackend#X_STATE_BACKEND_NAME)
	// to guard against config-breaking changes of the name

	final Configuration config1 = new Configuration();
	config1.setString(backendKey, "jobmanager");
	config1.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir);
	config1.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);
	config1.setBoolean(CheckpointingOptions.ASYNC_SNAPSHOTS, async);

	final Configuration config2 = new Configuration();
	config2.setString(backendKey, MemoryStateBackendFactory.class.getName());
	config2.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir);
	config2.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);
	config2.setBoolean(CheckpointingOptions.ASYNC_SNAPSHOTS, async);

	MemoryStateBackend backend1 = (MemoryStateBackend)
			StateBackendLoader.loadStateBackendFromConfig(config1, cl, null);
	MemoryStateBackend backend2 = (MemoryStateBackend)
			StateBackendLoader.loadStateBackendFromConfig(config2, cl, null);

	assertNotNull(backend1);
	assertNotNull(backend2);

	assertEquals(expectedCheckpointPath, backend1.getCheckpointPath());
	assertEquals(expectedCheckpointPath, backend2.getCheckpointPath());
	assertEquals(expectedSavepointPath, backend1.getSavepointPath());
	assertEquals(expectedSavepointPath, backend2.getSavepointPath());
	assertEquals(async, backend1.isUsingAsynchronousSnapshots());
	assertEquals(async, backend2.isUsingAsynchronousSnapshots());
}
 
Example #27
Source File: ExecutionGraphCheckpointCoordinatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private ExecutionGraph createExecutionGraphAndEnableCheckpointing(
		CheckpointIDCounter counter,
		CompletedCheckpointStore store) throws Exception {
	final Time timeout = Time.days(1L);

	JobVertex jobVertex = new JobVertex("MockVertex");
	jobVertex.setInvokableClass(AbstractInvokable.class);

	final ExecutionGraph executionGraph = new ExecutionGraphTestUtils.TestingExecutionGraphBuilder(jobVertex)
		.setRpcTimeout(timeout)
		.setAllocationTimeout(timeout)
		.allowQueuedScheduling()
		.build();

	executionGraph.start(ComponentMainThreadExecutorServiceAdapter.forMainThread());

	CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration(
		100,
		100,
		100,
		1,
		CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION,
		true,
		false,
		0);

	executionGraph.enableCheckpointing(
			chkConfig,
			Collections.emptyList(),
			Collections.emptyList(),
			Collections.emptyList(),
			Collections.emptyList(),
			counter,
			store,
			new MemoryStateBackend(),
			CheckpointStatsTrackerTest.createTestTracker());

	return executionGraph;
}
 
Example #28
Source File: HeapKeyedStateBackendAsyncByDefaultTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testMemoryStateBackendDefaultsToAsync() throws Exception {
	MemoryStateBackend backend = new MemoryStateBackend();
	assertTrue(backend.isUsingAsynchronousSnapshots());

	validateSupportForAsyncSnapshots(backend);
}
 
Example #29
Source File: HeapAsyncSnapshotTtlStateTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected StateBackendTestContext createStateBackendTestContext(TtlTimeProvider timeProvider) {
	return new StateBackendTestContext(timeProvider) {
		@Override
		protected StateBackend createStateBackend() {
			return new MemoryStateBackend(true);
		}
	};
}
 
Example #30
Source File: StateBackendLoadingTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testInstantiateMemoryBackendByDefault() throws Exception {
	StateBackend backend =
			StateBackendLoader.fromApplicationOrConfigOrDefault(null, new Configuration(), cl, null);

	assertTrue(backend instanceof MemoryStateBackend);
}