org.apache.flink.api.common.state.ValueStateDescriptor Java Examples

The following examples show how to use org.apache.flink.api.common.state.ValueStateDescriptor. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 7 votes vote down vote up
/**
 * Verify that an empty {@code ValueState} will yield the default value.
 */
@Test
public void testValueStateDefaultValue() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);

	ValueStateDescriptor<String> kvId = new ValueStateDescriptor<>("id", String.class, "Hello");

	ValueState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);

	backend.setCurrentKey(1);
	assertEquals("Hello", state.value());

	state.update("Ciao");
	assertEquals("Ciao", state.value());

	state.clear();
	assertEquals("Hello", state.value());

	backend.dispose();
}
 
Example #2
Source File: KeyedStateDeduplication.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    super.open(parameters);
    ValueStateDescriptor<Boolean> keyedStateDuplicated =
            new ValueStateDescriptor<>("KeyedStateDeduplication",
                    TypeInformation.of(new TypeHint<Boolean>() {}));
    // 状态 TTL 相关配置,过期时间设定为 36 小时
    StateTtlConfig ttlConfig = StateTtlConfig
            .newBuilder(Time.hours(36))
            .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
            .setStateVisibility(
                    StateTtlConfig.StateVisibility.NeverReturnExpired)
            .cleanupInRocksdbCompactFilter(50000000L)
            .build();
    // 开启 TTL
    keyedStateDuplicated.enableTimeToLive(ttlConfig);
    // 从状态后端恢复状态
    isExist = getRuntimeContext().getState(keyedStateDuplicated);
}
 
Example #3
Source File: RowTimeSortOperator.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void open() throws Exception {
	super.open();

	LOG.info("Opening RowTimeSortOperator");
	if (gComparator != null) {
		comparator = gComparator.newInstance(getContainingTask().getUserCodeClassLoader());
		gComparator = null;
	}

	BasicTypeInfo<Long> keyTypeInfo = BasicTypeInfo.LONG_TYPE_INFO;
	ListTypeInfo<RowData> valueTypeInfo = new ListTypeInfo<>(inputRowType);
	MapStateDescriptor<Long, List<RowData>> mapStateDescriptor = new MapStateDescriptor<>(
			"dataState", keyTypeInfo, valueTypeInfo);
	dataState = getRuntimeContext().getMapState(mapStateDescriptor);

	ValueStateDescriptor<Long> lastTriggeringTsDescriptor = new ValueStateDescriptor<>("lastTriggeringTsState",
			Long.class);
	lastTriggeringTsState = getRuntimeContext().getState(lastTriggeringTsDescriptor);
}
 
Example #4
Source File: StatefulOperatorChainedTaskTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void initializeState(StateInitializationContext context) throws Exception {
	super.initializeState(context);

	counterState = context
		.getKeyedStateStore()
		.getState(new ValueStateDescriptor<>(prefix + "counter-state", LongSerializer.INSTANCE));

	// set key manually to make RocksDBListState get the serialized key.
	setCurrentKey("10");

	if (context.isRestored()) {
		counter =  counterState.value();
		assertEquals(snapshotOutData, counter);
		counterState.clear();
	}
}
 
Example #5
Source File: StreamingRuntimeContextTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testValueStateInstantiation() throws Exception {

	final ExecutionConfig config = new ExecutionConfig();
	config.registerKryoType(Path.class);

	final AtomicReference<Object> descriptorCapture = new AtomicReference<>();

	StreamingRuntimeContext context = new StreamingRuntimeContext(
			createDescriptorCapturingMockOp(descriptorCapture, config),
			createMockEnvironment(),
			Collections.<String, Accumulator<?, ?>>emptyMap());

	ValueStateDescriptor<TaskInfo> descr = new ValueStateDescriptor<>("name", TaskInfo.class);
	context.getState(descr);

	StateDescriptor<?, ?> descrIntercepted = (StateDescriptor<?, ?>) descriptorCapture.get();
	TypeSerializer<?> serializer = descrIntercepted.getSerializer();

	// check that the Path class is really registered, i.e., the execution config was applied
	assertTrue(serializer instanceof KryoSerializer);
	assertTrue(((KryoSerializer<?>) serializer).getKryo().getRegistration(Path.class).getId() > 0);
}
 
Example #6
Source File: StatefulOperatorChainedTaskTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void initializeState(StateInitializationContext context) throws Exception {
	super.initializeState(context);

	counterState = context
		.getKeyedStateStore()
		.getState(new ValueStateDescriptor<>(prefix + "counter-state", LongSerializer.INSTANCE));

	// set key manually to make RocksDBListState get the serialized key.
	setCurrentKey("10");

	if (context.isRestored()) {
		counter =  counterState.value();
		assertEquals(snapshotOutData, counter);
		counterState.clear();
	}
}
 
Example #7
Source File: CepOperator.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public void initializeState(StateInitializationContext context) throws Exception {
	super.initializeState(context);

	// initializeState through the provided context
	computationStates = context.getKeyedStateStore().getState(
		new ValueStateDescriptor<>(
			NFA_STATE_NAME,
			new NFAStateSerializer()));

	partialMatches = new SharedBuffer<>(context.getKeyedStateStore(), inputSerializer);

	elementQueueState = context.getKeyedStateStore().getMapState(
			new MapStateDescriptor<>(
					EVENT_QUEUE_STATE_NAME,
					LongSerializer.INSTANCE,
					new ListSerializer<>(inputSerializer)));

	migrateOldState();
}
 
Example #8
Source File: ProcTimeRangeBoundedPrecedingFunction.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader());
	function.open(new PerKeyStateDataViewStore(getRuntimeContext()));

	output = new JoinedRow();

	// input element are all binary row as they are came from network
	BaseRowTypeInfo inputType = new BaseRowTypeInfo(inputFieldTypes);
	// we keep the elements received in a map state indexed based on their ingestion time
	ListTypeInfo<BaseRow> rowListTypeInfo = new ListTypeInfo<BaseRow>(inputType);
	MapStateDescriptor<Long, List<BaseRow>> mapStateDescriptor = new MapStateDescriptor<Long, List<BaseRow>>(
		"inputState", BasicTypeInfo.LONG_TYPE_INFO, rowListTypeInfo);
	inputState = getRuntimeContext().getMapState(mapStateDescriptor);

	BaseRowTypeInfo accTypeInfo = new BaseRowTypeInfo(accTypes);
	ValueStateDescriptor<BaseRow> stateDescriptor =
		new ValueStateDescriptor<BaseRow>("accState", accTypeInfo);
	accState = getRuntimeContext().getState(stateDescriptor);

	initCleanupTimeState("ProcTimeBoundedRangeOverCleanupTime");
}
 
Example #9
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testValueStateWorkWithTtl() throws Exception {
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);
	try {
		ValueStateDescriptor<MutableLong> kvId = new ValueStateDescriptor<>("id", MutableLong.class);
		kvId.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(1)).build());

		ValueState<MutableLong> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
		backend.setCurrentKey(1);
		state.update(new MutableLong());
		state.value();
	} finally {
		backend.close();
		backend.dispose();
	}
}
 
Example #10
Source File: PerWindowStateDataViewStore.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public <N, UK, UV> StateMapView<N, UK, UV> getStateMapView(String stateName, MapViewTypeInfo<UK, UV> mapViewTypeInfo) throws Exception {
	MapStateDescriptor<UK, UV> mapStateDescriptor = new MapStateDescriptor<>(
		stateName,
		mapViewTypeInfo.getKeyType(),
		mapViewTypeInfo.getValueType());

	MapState<UK, UV> mapState = keyedStateBackend.getOrCreateKeyedState(windowSerializer, mapStateDescriptor);
	// explict cast to internal state
	InternalMapState<?, N, UK, UV> internalMapState = (InternalMapState<?, N, UK, UV>) mapState;

	if (mapViewTypeInfo.isNullAware()) {
		ValueStateDescriptor<UV> nullStateDescriptor = new ValueStateDescriptor<>(
			stateName + NULL_STATE_POSTFIX,
			mapViewTypeInfo.getValueType());
		ValueState<UV> nullState = keyedStateBackend.getOrCreateKeyedState(windowSerializer, nullStateDescriptor);
		// explict cast to internal state
		InternalValueState<?, N, UV> internalNullState = (InternalValueState<?, N, UV>) nullState;
		return new StateMapView.NamespacedStateMapViewWithKeysNullable<>(internalMapState, internalNullState);
	} else {
		return new StateMapView.NamespacedStateMapViewWithKeysNotNull<>(internalMapState);
	}
}
 
Example #11
Source File: StreamingRuntimeContextTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testValueStateInstantiation() throws Exception {

	final ExecutionConfig config = new ExecutionConfig();
	config.registerKryoType(Path.class);

	final AtomicReference<Object> descriptorCapture = new AtomicReference<>();

	StreamingRuntimeContext context = new StreamingRuntimeContext(
			createDescriptorCapturingMockOp(descriptorCapture, config),
			createMockEnvironment(),
			Collections.<String, Accumulator<?, ?>>emptyMap());

	ValueStateDescriptor<TaskInfo> descr = new ValueStateDescriptor<>("name", TaskInfo.class);
	context.getState(descr);

	StateDescriptor<?, ?> descrIntercepted = (StateDescriptor<?, ?>) descriptorCapture.get();
	TypeSerializer<?> serializer = descrIntercepted.getSerializer();

	// check that the Path class is really registered, i.e., the execution config was applied
	assertTrue(serializer instanceof KryoSerializer);
	assertTrue(((KryoSerializer<?>) serializer).getKryo().getRegistration(Path.class).getId() > 0);
}
 
Example #12
Source File: StreamingRuntimeContextTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testValueStateInstantiation() throws Exception {

	final ExecutionConfig config = new ExecutionConfig();
	config.registerKryoType(Path.class);

	final AtomicReference<Object> descriptorCapture = new AtomicReference<>();

	StreamingRuntimeContext context = createRuntimeContext(descriptorCapture, config);
	ValueStateDescriptor<TaskInfo> descr = new ValueStateDescriptor<>("name", TaskInfo.class);
	context.getState(descr);

	StateDescriptor<?, ?> descrIntercepted = (StateDescriptor<?, ?>) descriptorCapture.get();
	TypeSerializer<?> serializer = descrIntercepted.getSerializer();

	// check that the Path class is really registered, i.e., the execution config was applied
	assertTrue(serializer instanceof KryoSerializer);
	assertTrue(((KryoSerializer<?>) serializer).getKryo().getRegistration(Path.class).getId() > 0);
}
 
Example #13
Source File: PojoSerializerUpgradeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
	pojoClass = getRuntimeContext().getUserCodeClassLoader().loadClass(POJO_NAME);

	fieldA = pojoClass.getDeclaredField("a");
	fieldA.setAccessible(true);

	if (hasBField) {
		fieldB = pojoClass.getDeclaredField("b");
		fieldB.setAccessible(true);
	}

	if (keyed) {
		keyedValueState = context.getKeyedStateStore().getState(
			new ValueStateDescriptor<>("keyedValueState", (Class<Object>) pojoClass));
		keyedListState = context.getKeyedStateStore().getListState(
			new ListStateDescriptor<>("keyedListState", (Class<Object>) pojoClass));

		ReduceFunction<Object> reduceFunction = new FirstValueReducer<>();
		keyedReducingState = context.getKeyedStateStore().getReducingState(
			new ReducingStateDescriptor<>("keyedReducingState", reduceFunction, (Class<Object>) pojoClass));
	} else {
		partitionableListState = context.getOperatorStateStore().getListState(
			new ListStateDescriptor<>("partitionableListState", (Class<Object>) pojoClass));
		unionListState = context.getOperatorStateStore().getUnionListState(
			new ListStateDescriptor<>("unionListState", (Class<Object>) pojoClass));
	}
}
 
Example #14
Source File: AbstractQueryableStateTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Tests simple value state queryable state instance. Each source emits
 * (subtaskIndex, 0)..(subtaskIndex, numElements) tuples, which are then
 * queried. The tests succeeds after each subtask index is queried with
 * value numElements (the latest element updated the state).
 */
@Test
public void testValueState() throws Exception {
	final Deadline deadline = Deadline.now().plus(TEST_TIMEOUT);
	final long numElements = 1024L;

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStateBackend(stateBackend);
	env.setParallelism(maxParallelism);
	// Very important, because cluster is shared between tests and we
	// don't explicitly check that all slots are available before
	// submitting.
	env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 1000L));

	DataStream<Tuple2<Integer, Long>> source = env.addSource(new TestAscendingValueSource(numElements));

	// Value state
	ValueStateDescriptor<Tuple2<Integer, Long>> valueState = new ValueStateDescriptor<>("any", source.getType());

	source.keyBy(new KeySelector<Tuple2<Integer, Long>, Integer>() {
		private static final long serialVersionUID = 7662520075515707428L;

		@Override
		public Integer getKey(Tuple2<Integer, Long> value) {
			return value.f0;
		}
	}).asQueryableState("hakuna", valueState);

	try (AutoCancellableJob autoCancellableJob = new AutoCancellableJob(deadline, clusterClient, env)) {

		final JobID jobId = autoCancellableJob.getJobId();
		final JobGraph jobGraph = autoCancellableJob.getJobGraph();

		ClientUtils.submitJob(clusterClient, jobGraph);
		executeValueQuery(deadline, client, jobId, "hakuna", valueState, numElements);
	}
}
 
Example #15
Source File: StateBackendTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckConcurrencyProblemWhenPerformingCheckpointAsync() throws Exception {

	CheckpointStreamFactory streamFactory = createStreamFactory();
	Environment env = new DummyEnvironment();
	AbstractKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE, env);

	ExecutorService executorService = Executors.newScheduledThreadPool(1);
	try {
		long checkpointID = 0;
		List<Future> futureList = new ArrayList();
		for (int i = 0; i < 10; ++i) {
			ValueStateDescriptor<Integer> kvId = new ValueStateDescriptor<>("id" + i, IntSerializer.INSTANCE);
			ValueState<Integer> state = backend.getOrCreateKeyedState(VoidNamespaceSerializer.INSTANCE, kvId);
			((InternalValueState) state).setCurrentNamespace(VoidNamespace.INSTANCE);
			backend.setCurrentKey(i);
			state.update(i);

			futureList.add(runSnapshotAsync(executorService,
				backend.snapshot(checkpointID++, System.currentTimeMillis(), streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation())));
		}

		for (Future future : futureList) {
			future.get(20, TimeUnit.SECONDS);
		}
	} catch (Exception e) {
		fail();
	} finally {
		backend.dispose();
		executorService.shutdown();
	}
}
 
Example #16
Source File: SemanticsCheckMapper.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) {
	ValueStateDescriptor<Long> sequenceStateDescriptor =
		new ValueStateDescriptor<>("sequenceState", Long.class);

	sequenceValue = getRuntimeContext().getState(sequenceStateDescriptor);
}
 
Example #17
Source File: WorkingTimeMonitor.java    From infoworld-post with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration conf) {
    // register state handle
    shiftStart = getRuntimeContext().getState(
            new ValueStateDescriptor<>("shiftStart", Types.LONG));
    // initialize time formatter
    this.formatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
}
 
Example #18
Source File: FlinkStateInternals.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public <InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT> bindCombining(
    String id,
    StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
    Coder<AccumT> accumCoder,
    Combine.CombineFn<InputT, AccumT, OutputT> combineFn) {
  try {
    keyedStateBackend.getOrCreateKeyedState(
        StringSerializer.INSTANCE,
        new ValueStateDescriptor<>(id, new CoderTypeSerializer<>(accumCoder)));
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
  return null;
}
 
Example #19
Source File: BaseTwoInputStreamOperatorWithStateRetention.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open() throws Exception {
	initializeTimerService();

	if (stateCleaningEnabled) {
		ValueStateDescriptor<Long> cleanupStateDescriptor =
			new ValueStateDescriptor<>(CLEANUP_TIMESTAMP, Types.LONG);
		latestRegisteredCleanupTimer = getRuntimeContext().getState(cleanupStateDescriptor);
	}
}
 
Example #20
Source File: KeyedStream.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Publishes the keyed stream as queryable ValueState instance.
 *
 * @param queryableStateName Name under which to the publish the queryable state instance
 * @return Queryable state instance
 */
@PublicEvolving
public QueryableStateStream<KEY, T> asQueryableState(String queryableStateName) {
	ValueStateDescriptor<T> valueStateDescriptor = new ValueStateDescriptor<T>(
			UUID.randomUUID().toString(),
			getType());

	return asQueryableState(queryableStateName, valueStateDescriptor);
}
 
Example #21
Source File: RowTimeRangeBoundedPrecedingFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader());
	function.open(new PerKeyStateDataViewStore(getRuntimeContext()));

	output = new JoinedRow();

	ValueStateDescriptor<Long> lastTriggeringTsDescriptor = new ValueStateDescriptor<Long>(
		"lastTriggeringTsState",
		Types.LONG);
	lastTriggeringTsState = getRuntimeContext().getState(lastTriggeringTsDescriptor);

	BaseRowTypeInfo accTypeInfo = new BaseRowTypeInfo(accTypes);
	ValueStateDescriptor<BaseRow> accStateDesc = new ValueStateDescriptor<BaseRow>("accState", accTypeInfo);
	accState = getRuntimeContext().getState(accStateDesc);

	// input element are all binary row as they are came from network
	BaseRowTypeInfo inputType = new BaseRowTypeInfo(inputFieldTypes);
	ListTypeInfo<BaseRow> rowListTypeInfo = new ListTypeInfo<BaseRow>(inputType);
	MapStateDescriptor<Long, List<BaseRow>> inputStateDesc = new MapStateDescriptor<Long, List<BaseRow>>(
		"inputState",
		Types.LONG,
		rowListTypeInfo);
	inputState = getRuntimeContext().getMapState(inputStateDesc);

	initCleanupTimeState("RowTimeBoundedRangeOverCleanupTime");
}
 
Example #22
Source File: StateBackendMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testKeyedValueStateSerializerReconfiguration() throws Exception {
	final String stateName = "test-name";

	testKeyedValueStateUpgrade(
		new ValueStateDescriptor<>(
			stateName,
			new TestType.V1TestTypeSerializer()),
		new ValueStateDescriptor<>(
			stateName,
			// the test fails if this serializer is used instead of a reconfigured new serializer
			new TestType.ReconfigurationRequiringTestTypeSerializer()));
}
 
Example #23
Source File: TypeSerializerSnapshotMigrationITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public Tuple2<Long, Long> map(Tuple2<Long, Long> value) throws Exception {
	ValueState<Long> state = getRuntimeContext().getState(
		new ValueStateDescriptor<>("testState", new TestSerializer()));

	state.update(value.f1);
	return value;
}
 
Example #24
Source File: EventStateMachineMapper.java    From pravega-samples with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	ValueStateDescriptor<EventStateMachine.State> descriptor = new ValueStateDescriptor<>(
			"state", // the state name
			TypeInformation.of(EventStateMachine.State.class)); // type information
	state = getRuntimeContext().getState(descriptor);
}
 
Example #25
Source File: KeyedHTMInferenceOperator.java    From flink-htm with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void open() throws Exception {
    super.open();

    if (networkState == null) {
        networkState = getPartitionedState(new ValueStateDescriptor<Network>(
                HTM_INFERENCE_OPERATOR_STATE_NAME,
                new KryoSerializer<Network>((Class<Network>) (Class<?>) Network.class, getExecutionConfig())
        ));
    }

    initInputFunction();
}
 
Example #26
Source File: StateSnapshotTransformerTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private TestValueState() throws Exception {
	this.state = backend.createInternalState(
		VoidNamespaceSerializer.INSTANCE,
		new ValueStateDescriptor<>("TestValueState", StringSerializer.INSTANCE),
		snapshotTransformFactory);
	state.setCurrentNamespace(VoidNamespace.INSTANCE);
}
 
Example #27
Source File: TimeBoundedStreamJoin.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	LOGGER.debug("Instantiating JoinFunction: {} \n\n Code:\n{}", genJoinFunc.getClassName(),
			genJoinFunc.getCode());
	joinFunction = genJoinFunc.newInstance(getRuntimeContext().getUserCodeClassLoader());
	genJoinFunc = null;

	joinCollector = new EmitAwareCollector();

	// Initialize the data caches.
	ListTypeInfo<Tuple2<BaseRow, Boolean>> leftRowListTypeInfo = new ListTypeInfo<>(
			new TupleTypeInfo<>(leftType, BasicTypeInfo.BOOLEAN_TYPE_INFO));
	MapStateDescriptor<Long, List<Tuple2<BaseRow, Boolean>>> leftMapStateDescriptor = new MapStateDescriptor<>(
			"WindowJoinLeftCache",
			BasicTypeInfo.LONG_TYPE_INFO,
			leftRowListTypeInfo);
	leftCache = getRuntimeContext().getMapState(leftMapStateDescriptor);

	ListTypeInfo<Tuple2<BaseRow, Boolean>> rightRowListTypeInfo = new ListTypeInfo<>(
			new TupleTypeInfo<>(rightType, BasicTypeInfo.BOOLEAN_TYPE_INFO));
	MapStateDescriptor<Long, List<Tuple2<BaseRow, Boolean>>> rightMapStateDescriptor = new MapStateDescriptor<>(
			"WindowJoinRightCache",
			BasicTypeInfo.LONG_TYPE_INFO,
			rightRowListTypeInfo);
	rightCache = getRuntimeContext().getMapState(rightMapStateDescriptor);

	// Initialize the timer states.
	ValueStateDescriptor<Long> leftValueStateDescriptor = new ValueStateDescriptor<>(
			"WindowJoinLeftTimerState",
			Long.class);
	leftTimerState = getRuntimeContext().getState(leftValueStateDescriptor);

	ValueStateDescriptor<Long> rightValueStateDescriptor = new ValueStateDescriptor<>(
			"WindowJoinRightTimerState",
			Long.class);
	rightTimerState = getRuntimeContext().getState(rightValueStateDescriptor);

	paddingUtil = new OuterJoinPaddingUtil(leftType.getArity(), rightType.getArity());
}
 
Example #28
Source File: PojoSerializerUpgradeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
	pojoClass = getRuntimeContext().getUserCodeClassLoader().loadClass(POJO_NAME);

	fieldA = pojoClass.getDeclaredField("a");
	fieldA.setAccessible(true);

	if (hasBField) {
		fieldB = pojoClass.getDeclaredField("b");
		fieldB.setAccessible(true);
	}

	if (keyed) {
		keyedValueState = context.getKeyedStateStore().getState(
			new ValueStateDescriptor<>("keyedValueState", (Class<Object>) pojoClass));
		keyedListState = context.getKeyedStateStore().getListState(
			new ListStateDescriptor<>("keyedListState", (Class<Object>) pojoClass));

		ReduceFunction<Object> reduceFunction = new FirstValueReducer<>();
		keyedReducingState = context.getKeyedStateStore().getReducingState(
			new ReducingStateDescriptor<>("keyedReducingState", reduceFunction, (Class<Object>) pojoClass));
	} else {
		partitionableListState = context.getOperatorStateStore().getListState(
			new ListStateDescriptor<>("partitionableListState", (Class<Object>) pojoClass));
		unionListState = context.getOperatorStateStore().getUnionListState(
			new ListStateDescriptor<>("unionListState", (Class<Object>) pojoClass));
	}
}
 
Example #29
Source File: WindowOperator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public <S extends Serializable> ValueState<S> getKeyValueState(String name,
	TypeInformation<S> stateType,
	S defaultState) {

	checkNotNull(name, "The name of the state must not be null");
	checkNotNull(stateType, "The state type information must not be null");

	ValueStateDescriptor<S> stateDesc = new ValueStateDescriptor<>(name, stateType.createSerializer(getExecutionConfig()), defaultState);
	return getPartitionedState(stateDesc);
}
 
Example #30
Source File: SlidingWindowCheckMapper.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) {
	ValueStateDescriptor<List<Tuple2<Event, Integer>>> previousWindowDescriptor =
		new ValueStateDescriptor<>("eventsSeenSoFar",
			new ListTypeInfo<>(new TupleTypeInfo<>(TypeInformation.of(Event.class), BasicTypeInfo.INT_TYPE_INFO)));

	eventsSeenSoFar = getRuntimeContext().getState(previousWindowDescriptor);

	ValueStateDescriptor<Long> lastSequenceNumberDescriptor =
		new ValueStateDescriptor<>("lastSequenceNumber", BasicTypeInfo.LONG_TYPE_INFO);

	lastSequenceNumber = getRuntimeContext().getState(lastSequenceNumberDescriptor);
}