org.apache.flink.api.common.state.ValueState Java Examples

The following examples show how to use org.apache.flink.api.common.state.ValueState. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OngoingRidesExercise.java    From flink-training-exercises with Apache License 2.0 6 votes vote down vote up
@Override
public void processBroadcastElement(String msg, Context ctx, Collector<TaxiRide> out) throws Exception {
	DateTimeFormatter timeFormatter =
			DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withLocale(Locale.US).withZoneUTC();

	Long thresholdInMinutes = Long.valueOf(msg);
	Long wm = ctx.currentWatermark();
	System.out.println("QUERY: " + thresholdInMinutes + " minutes at " + timeFormatter.print(wm));

	// Collect to the output all ongoing rides that started at least thresholdInMinutes ago.
	ctx.applyToKeyedState(taxiDescriptor, new KeyedStateFunction<Long, ValueState<TaxiRide>>() {
		@Override
		public void process(Long taxiId, ValueState<TaxiRide> taxiState) throws Exception {
			throw new MissingSolutionException();
		}
	});
}
 
Example #2
Source File: EventTimeTriggers.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public boolean onEventTime(long time, W window) throws Exception {
	ValueState<Boolean> hasFiredState = ctx.getPartitionedState(hasFiredOnTimeStateDesc);
	Boolean hasFired = hasFiredState.value();
	if (hasFired != null && hasFired) {
		// late fire
		return lateTrigger != null && lateTrigger.onEventTime(time, window);
	} else {
		if (time == window.maxTimestamp()) {
			// fire on time and update state
			hasFiredState.update(true);
			return true;
		} else {
			// early fire
			return earlyTrigger != null && earlyTrigger.onEventTime(time, window);
		}
	}
}
 
Example #3
Source File: CepOperator.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private void migrateOldState() throws Exception {
	getKeyedStateBackend().applyToAllKeys(
		VoidNamespace.INSTANCE,
		VoidNamespaceSerializer.INSTANCE,
		new ValueStateDescriptor<>(
			"nfaOperatorStateName",
			new NFA.NFASerializer<>(inputSerializer)
		),
		new KeyedStateFunction<Object, ValueState<MigratedNFA<IN>>>() {
			@Override
			public void process(Object key, ValueState<MigratedNFA<IN>> state) throws Exception {
				MigratedNFA<IN> oldState = state.value();
				computationStates.update(new NFAState(oldState.getComputationStates()));
				org.apache.flink.cep.nfa.SharedBuffer<IN> sharedBuffer = oldState.getSharedBuffer();
				partialMatches.init(sharedBuffer.getEventsBuffer(), sharedBuffer.getPages());
				state.clear();
			}
		}
	);
}
 
Example #4
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCopyDefaultValue() throws Exception {
	final AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ValueStateDescriptor<IntValue> kvId = new ValueStateDescriptor<>("id", IntValue.class, new IntValue(-1));

	ValueState<IntValue> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	IntValue default1 = state.value();

	backend.setCurrentKey(2);
	IntValue default2 = state.value();

	assertNotNull(default1);
	assertNotNull(default2);
	assertEquals(default1, default2);
	assertFalse(default1 == default2);

	backend.dispose();
}
 
Example #5
Source File: CepOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
private void migrateOldState() throws Exception {
	getKeyedStateBackend().applyToAllKeys(
		VoidNamespace.INSTANCE,
		VoidNamespaceSerializer.INSTANCE,
		new ValueStateDescriptor<>(
			"nfaOperatorStateName",
			new NFA.NFASerializer<>(inputSerializer)
		),
		new KeyedStateFunction<Object, ValueState<MigratedNFA<IN>>>() {
			@Override
			public void process(Object key, ValueState<MigratedNFA<IN>> state) throws Exception {
				MigratedNFA<IN> oldState = state.value();
				computationStates.update(new NFAState(oldState.getComputationStates()));
				org.apache.flink.cep.nfa.SharedBuffer<IN> sharedBuffer = oldState.getSharedBuffer();
				partialMatches.init(sharedBuffer.getEventsBuffer(), sharedBuffer.getPages());
				state.clear();
			}
		}
	);
}
 
Example #6
Source File: StateBackendTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testCopyDefaultValue() throws Exception {
	final AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ValueStateDescriptor<IntValue> kvId = new ValueStateDescriptor<>("id", IntValue.class, new IntValue(-1));

	ValueState<IntValue> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	IntValue default1 = state.value();

	backend.setCurrentKey(2);
	IntValue default2 = state.value();

	assertNotNull(default1);
	assertNotNull(default2);
	assertEquals(default1, default2);
	assertFalse(default1 == default2);

	backend.dispose();
}
 
Example #7
Source File: KeyedProcessOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void processElement(Integer value, Context ctx, Collector<String> out) throws Exception {
	final TimerService timerService = ctx.timerService();
	final ValueState<Integer> state = getRuntimeContext().getState(this.state);
	if (state.value() == null) {
		out.collect("INPUT:" + value);
		state.update(value);
		if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
			timerService.registerEventTimeTimer(timerService.currentWatermark() + 5);
		} else {
			timerService.registerProcessingTimeTimer(timerService.currentProcessingTime() + 5);
		}
	} else {
		state.clear();
		if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
			timerService.deleteEventTimeTimer(timerService.currentWatermark() + 4);
		} else {
			timerService.deleteProcessingTimeTimer(timerService.currentProcessingTime() + 4);
		}
	}
}
 
Example #8
Source File: KeyedProcessOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void processElement(Integer value, Context ctx, Collector<String> out) throws Exception {
	final TimerService timerService = ctx.timerService();
	final ValueState<Integer> state = getRuntimeContext().getState(this.state);
	if (state.value() == null) {
		out.collect("INPUT:" + value);
		state.update(value);
		if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
			timerService.registerEventTimeTimer(timerService.currentWatermark() + 5);
		} else {
			timerService.registerProcessingTimeTimer(timerService.currentProcessingTime() + 5);
		}
	} else {
		state.clear();
		if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
			timerService.deleteEventTimeTimer(timerService.currentWatermark() + 4);
		} else {
			timerService.deleteProcessingTimeTimer(timerService.currentProcessingTime() + 4);
		}
	}
}
 
Example #9
Source File: WindowOperator.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public <S extends Serializable> ValueState<S> getKeyValueState(String name,
	Class<S> stateType,
	S defaultState) {
	checkNotNull(stateType, "The state type class must not be null");

	TypeInformation<S> typeInfo;
	try {
		typeInfo = TypeExtractor.getForClass(stateType);
	}
	catch (Exception e) {
		throw new RuntimeException("Cannot analyze type '" + stateType.getName() +
			"' from the class alone, due to generic type parameters. " +
			"Please specify the TypeInformation directly.", e);
	}

	return getKeyValueState(name, typeInfo, defaultState);
}
 
Example #10
Source File: KeyedProcessOperatorTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void processElement(Integer value, Context ctx, Collector<String> out) throws Exception {
	final TimerService timerService = ctx.timerService();
	final ValueState<Integer> state = getRuntimeContext().getState(this.state);
	if (state.value() == null) {
		out.collect("INPUT:" + value);
		state.update(value);
		if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
			timerService.registerEventTimeTimer(timerService.currentWatermark() + 5);
		} else {
			timerService.registerProcessingTimeTimer(timerService.currentProcessingTime() + 5);
		}
	} else {
		state.clear();
		if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
			timerService.deleteEventTimeTimer(timerService.currentWatermark() + 4);
		} else {
			timerService.deleteProcessingTimeTimer(timerService.currentProcessingTime() + 4);
		}
	}
}
 
Example #11
Source File: CepOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
private void migrateOldState() throws Exception {
	getKeyedStateBackend().applyToAllKeys(
		VoidNamespace.INSTANCE,
		VoidNamespaceSerializer.INSTANCE,
		new ValueStateDescriptor<>(
			"nfaOperatorStateName",
			new NFA.NFASerializer<>(inputSerializer)
		),
		new KeyedStateFunction<Object, ValueState<MigratedNFA<IN>>>() {
			@Override
			public void process(Object key, ValueState<MigratedNFA<IN>> state) throws Exception {
				MigratedNFA<IN> oldState = state.value();
				computationStates.update(new NFAState(oldState.getComputationStates()));
				org.apache.flink.cep.nfa.SharedBuffer<IN> sharedBuffer = oldState.getSharedBuffer();
				partialMatches.init(sharedBuffer.getEventsBuffer(), sharedBuffer.getPages());
				state.clear();
			}
		}
	);
}
 
Example #12
Source File: OngoingRidesSolution.java    From flink-training-exercises with Apache License 2.0 6 votes vote down vote up
@Override
public void processBroadcastElement(String msg, Context ctx, Collector<TaxiRide> out) throws Exception {
	DateTimeFormatter timeFormatter =
			DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withLocale(Locale.US).withZoneUTC();

	Long thresholdInMinutes = Long.valueOf(msg);
	Long wm = ctx.currentWatermark();
	System.out.println("QUERY: " + thresholdInMinutes + " minutes at " + timeFormatter.print(wm));

	// Collect to the output all ongoing rides that started at least thresholdInMinutes ago.
	ctx.applyToKeyedState(taxiDescriptor, new KeyedStateFunction<Long, ValueState<TaxiRide>>() {
		@Override
		public void process(Long taxiId, ValueState<TaxiRide> taxiState) throws Exception {
			TaxiRide ride = taxiState.value();
			if (ride.isStart) {
				long minutes = (wm - ride.getEventTime()) / 60000;
				if (ride.isStart && (minutes >= thresholdInMinutes)) {
					out.collect(ride);
				}
			}
		}
	});
}
 
Example #13
Source File: DeduplicateFunctionHelper.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Processes element to deduplicate on keys, sends current element as last row, retracts previous element if
 * needed.
 *
 * @param currentRow latest row received by deduplicate function
 * @param generateRetraction whether need to send retract message to downstream
 * @param state state of function
 * @param out underlying collector
 * @throws Exception
 */
static void processLastRow(BaseRow currentRow, boolean generateRetraction, ValueState<BaseRow> state,
		Collector<BaseRow> out) throws Exception {
	// Check message should be accumulate
	Preconditions.checkArgument(BaseRowUtil.isAccumulateMsg(currentRow));
	if (generateRetraction) {
		// state stores complete row if generateRetraction is true
		BaseRow preRow = state.value();
		state.update(currentRow);
		if (preRow != null) {
			preRow.setHeader(BaseRowUtil.RETRACT_MSG);
			out.collect(preRow);
		}
	}
	out.collect(currentRow);
}
 
Example #14
Source File: CleanupState.java    From flink with Apache License 2.0 6 votes vote down vote up
default void registerProcessingCleanupTimer(
		ValueState<Long> cleanupTimeState,
		long currentTime,
		long minRetentionTime,
		long maxRetentionTime,
		TimerService timerService) throws Exception {

	// last registered timer
	Long curCleanupTime = cleanupTimeState.value();

	// check if a cleanup timer is registered and
	// that the current cleanup timer won't delete state we need to keep
	if (curCleanupTime == null || (currentTime + minRetentionTime) > curCleanupTime) {
		// we need to register a new (later) timer
		long cleanupTime = currentTime + maxRetentionTime;
		// register timer and remember clean-up time
		timerService.registerProcessingTimeTimer(cleanupTime);
		// delete expired timer
		if (curCleanupTime != null) {
			timerService.deleteProcessingTimeTimer(curCleanupTime);
		}
		cleanupTimeState.update(cleanupTime);
	}
}
 
Example #15
Source File: TypeSerializerSnapshotMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Tuple2<Long, Long> map(Tuple2<Long, Long> value) throws Exception {
	ValueState<Long> state = getRuntimeContext().getState(
		new ValueStateDescriptor<>("testState", new TestSerializer()));

	state.update(value.f1);
	return value;
}
 
Example #16
Source File: LegacyStatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void flatMap(Tuple2<Long, Long> value, Collector<Tuple2<Long, Long>> out) throws Exception {
	out.collect(value);

	ValueState<Long> state = getRuntimeContext().getState(stateDescriptor);
	if (state == null) {
		throw new RuntimeException("Missing key value state for " + value);
	}

	assertEquals(value.f1, state.value());
	getRuntimeContext().getAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR).add(1);
}
 
Example #17
Source File: LegacyStatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void flatMap(Tuple2<Long, Long> value, Collector<Tuple2<Long, Long>> out) throws Exception {
	out.collect(value);

	ValueState<Long> state = getRuntimeContext().getState(stateDescriptor);
	if (state == null) {
		throw new RuntimeException("Missing key value state for " + value);
	}

	assertEquals(value.f1, state.value());
	getRuntimeContext().getAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR).add(1);
}
 
Example #18
Source File: LegacyStatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void onEventTime(InternalTimer<Long, Long> timer) throws Exception {
	ValueState<Long> state = getKeyedStateBackend().getPartitionedState(
		timer.getNamespace(),
		LongSerializer.INSTANCE,
		stateDescriptor);

	assertEquals(state.value(), timer.getNamespace());
	getRuntimeContext().getAccumulator(SUCCESSFUL_EVENT_TIME_CHECK_ACCUMULATOR).add(1);
}
 
Example #19
Source File: LegacyStatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void onProcessingTime(InternalTimer<Long, Long> timer) throws Exception {
	ValueState<Long> state = getKeyedStateBackend().getPartitionedState(
		timer.getNamespace(),
		LongSerializer.INSTANCE,
		stateDescriptor);

	assertEquals(state.value(), timer.getNamespace());
	getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESSING_TIME_CHECK_ACCUMULATOR).add(1);
}
 
Example #20
Source File: ProcessingTimeoutTrigger.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void clear(W window, TriggerContext ctx) throws Exception {
	ValueState<Long> timeoutTimestampState = ctx.getPartitionedState(this.timeoutStateDesc);
	Long timeoutTimestamp = timeoutTimestampState.value();
	if (timeoutTimestamp != null) {
		ctx.deleteProcessingTimeTimer(timeoutTimestamp);
		timeoutTimestampState.clear();
	}
	this.nestedTrigger.clear(window, ctx);
}
 
Example #21
Source File: StatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void processElement(StreamRecord<Tuple2<Long, Long>> element) throws Exception {
	ValueState<Long> state = getKeyedStateBackend().getPartitionedState(
		element.getValue().f0,
		LongSerializer.INSTANCE,
		stateDescriptor);

	assertEquals(state.value(), element.getValue().f1);
	getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESS_CHECK_ACCUMULATOR).add(1);

	output.collect(element);
}
 
Example #22
Source File: StatefulJobSavepointMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public void onEventTime(InternalTimer<Long, Long> timer) throws Exception {
	ValueState<Long> state = getKeyedStateBackend().getPartitionedState(
		timer.getNamespace(),
		LongSerializer.INSTANCE,
		stateDescriptor);

	assertEquals(state.value(), timer.getNamespace());
	getRuntimeContext().getAccumulator(SUCCESSFUL_EVENT_TIME_CHECK_ACCUMULATOR).add(1);
}
 
Example #23
Source File: CEPOperatorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testKeyedCEPOperatorNFAUpdateTimes() throws Exception {
	CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOpearator(
		true,
		new SimpleNFAFactory());
	OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);

	try {
		harness.open();

		final ValueState nfaOperatorState = (ValueState) Whitebox.<ValueState>getInternalState(operator, "computationStates");
		final ValueState nfaOperatorStateSpy = Mockito.spy(nfaOperatorState);
		Whitebox.setInternalState(operator, "computationStates", nfaOperatorStateSpy);

		Event startEvent = new Event(42, "c", 1.0);
		SubEvent middleEvent = new SubEvent(42, "a", 1.0, 10.0);
		Event endEvent = new Event(42, "b", 1.0);

		harness.processElement(new StreamRecord<>(startEvent, 1L));
		harness.processElement(new StreamRecord<>(new Event(42, "d", 1.0), 4L));
		harness.processElement(new StreamRecord<Event>(middleEvent, 4L));
		harness.processElement(new StreamRecord<>(endEvent, 4L));

		// verify the number of invocations NFA is updated
		Mockito.verify(nfaOperatorStateSpy, Mockito.times(3)).update(Mockito.any());

		// get and verify the output
		Queue<Object> result = harness.getOutput();

		assertEquals(1, result.size());

		verifyPattern(result.poll(), startEvent, middleEvent, endEvent);
	} finally {
		harness.close();
	}
}
 
Example #24
Source File: LegacyStatefulJobSavepointMigrationITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void processElement(StreamRecord<Tuple2<Long, Long>> element) throws Exception {
	ValueState<Long> state = getKeyedStateBackend().getPartitionedState(
		element.getValue().f0,
		LongSerializer.INSTANCE,
		stateDescriptor);

	assertEquals(state.value(), element.getValue().f1);
	getRuntimeContext().getAccumulator(SUCCESSFUL_PROCESS_CHECK_ACCUMULATOR).add(1);

	output.collect(element);
}
 
Example #25
Source File: WindowOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public <S extends Serializable> ValueState<S> getKeyValueState(String name,
	TypeInformation<S> stateType,
	S defaultState) {

	checkNotNull(name, "The name of the state must not be null");
	checkNotNull(stateType, "The state type information must not be null");

	ValueStateDescriptor<S> stateDesc = new ValueStateDescriptor<>(name, stateType.createSerializer(getExecutionConfig()), defaultState);
	return getPartitionedState(stateDesc);
}
 
Example #26
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckConcurrencyProblemWhenPerformingCheckpointAsync() throws Exception {

	CheckpointStreamFactory streamFactory = createStreamFactory();
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE, env);

	ExecutorService executorService = Executors.newScheduledThreadPool(1);
	try {
		long checkpointID = 0;
		List<Future> futureList = new ArrayList();
		for (int i = 0; i < 10; ++i) {
			ValueStateDescriptor<Integer> kvId = new ValueStateDescriptor<>("id" + i, IntSerializer.INSTANCE);
			ValueState<Integer> state = backend.getOrCreateKeyedState(VoidNamespaceSerializer.INSTANCE, kvId);
			((InternalValueState) state).setCurrentNamespace(VoidNamespace.INSTANCE);
			backend.setCurrentKey(i);
			state.update(i);

			futureList.add(runSnapshotAsync(executorService,
				backend.snapshot(checkpointID++, System.currentTimeMillis(), streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation())));
		}

		for (Future future : futureList) {
			future.get(20, TimeUnit.SECONDS);
		}
	} catch (Exception e) {
		fail();
	} finally {
		backend.dispose();
		executorService.shutdown();
	}
}
 
Example #27
Source File: DefaultKeyedStateStore.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public <T> ValueState<T> getState(ValueStateDescriptor<T> stateProperties) {
	requireNonNull(stateProperties, "The state properties must not be null");
	try {
		stateProperties.initializeSerializerUnlessSet(executionConfig);
		return getPartitionedState(stateProperties);
	} catch (Exception e) {
		throw new RuntimeException("Error while getting state", e);
	}
}
 
Example #28
Source File: AbstractStreamOperatorTestHarnessTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetTtlTimeProvider() throws Exception {
	AbstractStreamOperator<Integer> operator = new AbstractStreamOperator<Integer>() {};
	try (AbstractStreamOperatorTestHarness<Integer> result = new AbstractStreamOperatorTestHarness<>(
			operator,
			1,
			1,
			0)) {

		result.config.setStateKeySerializer(IntSerializer.INSTANCE);

		Time timeToLive = Time.hours(1);
		result.initializeState(new OperatorSubtaskState());
		result.open();

		ValueStateDescriptor<Integer> stateDescriptor = new ValueStateDescriptor<>("test", IntSerializer.INSTANCE);
		stateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(timeToLive).build());
		KeyedStateBackend<Integer> keyedStateBackend = operator.getKeyedStateBackend();
		ValueState<Integer> state = keyedStateBackend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, stateDescriptor);

		int expectedValue = 42;
		keyedStateBackend.setCurrentKey(1);
		result.setStateTtlProcessingTime(0L);
		state.update(expectedValue);
		Assert.assertEquals(expectedValue, (int) state.value());
		result.setStateTtlProcessingTime(timeToLive.toMilliseconds() + 1);
		Assert.assertNull(state.value());
	}
}
 
Example #29
Source File: StatefulStreamingJob.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static <T> void touchState(ValueState<T> state, Supplier<T> elements) throws IOException {
	T elem = state.value();
	if (elem == null) {
		elem = elements.get();
	}
	state.update(elem);
}
 
Example #30
Source File: LegacyStatefulJobSavepointMigrationITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void flatMap(Tuple2<Long, Long> value, Collector<Tuple2<Long, Long>> out) throws Exception {
	out.collect(value);

	ValueState<Long> state = getRuntimeContext().getState(stateDescriptor);
	if (state == null) {
		throw new RuntimeException("Missing key value state for " + value);
	}

	assertEquals(value.f1, state.value());
	getRuntimeContext().getAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR).add(1);
}