Java Code Examples for org.apache.flink.core.memory.DataInputDeserializer#setBuffer()

The following examples show how to use org.apache.flink.core.memory.DataInputDeserializer#setBuffer() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RocksDBKeySerializationUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);

		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);
	}
}
 
Example 2
Source File: RocksDBKeySerializationUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example 3
Source File: RocksDBKeySerializationUtilsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example 4
Source File: RocksDBKeySerializationUtilsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);

		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);
	}
}
 
Example 5
Source File: RocksDBMapState.java    From flink with Apache License 2.0 5 votes vote down vote up
private static <UV> UV deserializeUserValue(
	DataInputDeserializer dataInputView,
	byte[] rawValueBytes,
	TypeSerializer<UV> valueSerializer) throws IOException {

	dataInputView.setBuffer(rawValueBytes);

	boolean isNull = dataInputView.readBoolean();

	return isNull ? null : valueSerializer.deserialize(dataInputView);
}
 
Example 6
Source File: RocksDBMapState.java    From flink with Apache License 2.0 5 votes vote down vote up
private static <UV> UV deserializeUserValue(
	DataInputDeserializer dataInputView,
	byte[] rawValueBytes,
	TypeSerializer<UV> valueSerializer) throws IOException {

	dataInputView.setBuffer(rawValueBytes);

	boolean isNull = dataInputView.readBoolean();

	return isNull ? null : valueSerializer.deserialize(dataInputView);
}
 
Example 7
Source File: RocksDBMapState.java    From flink with Apache License 2.0 5 votes vote down vote up
private static <UK> UK deserializeUserKey(
	DataInputDeserializer dataInputView,
	int userKeyOffset,
	byte[] rawKeyBytes,
	TypeSerializer<UK> keySerializer) throws IOException {
	dataInputView.setBuffer(rawKeyBytes, userKeyOffset, rawKeyBytes.length - userKeyOffset);
	return keySerializer.deserialize(dataInputView);
}
 
Example 8
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private <K> void testSetKeyInternal(TypeSerializer<K> serializer, Collection<K> testKeys, int maxParallelism) throws IOException {
	final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;
	RocksDBSerializedCompositeKeyBuilder<K> keyBuilder =
		createRocksDBSerializedCompositeKeyBuilder(serializer, prefixBytes);

	final DataInputDeserializer deserializer = new DataInputDeserializer();
	for (K testKey : testKeys) {
		int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
		byte[] result = dataOutputSerializer.getCopyOfBuffer();
		deserializer.setBuffer(result);
		assertKeyKeyGroupBytes(testKey, keyGroup, prefixBytes, serializer, deserializer, false);
		Assert.assertEquals(0, deserializer.available());
	}
}
 
Example 9
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private <K> void testSetKeyInternal(TypeSerializer<K> serializer, Collection<K> testKeys, int maxParallelism) throws IOException {
	final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;
	RocksDBSerializedCompositeKeyBuilder<K> keyBuilder =
		createRocksDBSerializedCompositeKeyBuilder(serializer, prefixBytes);

	final DataInputDeserializer deserializer = new DataInputDeserializer();
	for (K testKey : testKeys) {
		int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
		byte[] result = dataOutputSerializer.getCopyOfBuffer();
		deserializer.setBuffer(result);
		assertKeyKeyGroupBytes(testKey, keyGroup, prefixBytes, serializer, deserializer, false);
		Assert.assertEquals(0, deserializer.available());
	}
}
 
Example 10
Source File: RocksDBMapState.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static <UK> UK deserializeUserKey(
	DataInputDeserializer dataInputView,
	int userKeyOffset,
	byte[] rawKeyBytes,
	TypeSerializer<UK> keySerializer) throws IOException {
	dataInputView.setBuffer(rawKeyBytes, userKeyOffset, rawKeyBytes.length - userKeyOffset);
	return keySerializer.deserialize(dataInputView);
}
 
Example 11
Source File: RocksDBMapState.java    From flink with Apache License 2.0 5 votes vote down vote up
private static <UK> UK deserializeUserKey(
	DataInputDeserializer dataInputView,
	int userKeyOffset,
	byte[] rawKeyBytes,
	TypeSerializer<UK> keySerializer) throws IOException {
	dataInputView.setBuffer(rawKeyBytes, userKeyOffset, rawKeyBytes.length - userKeyOffset);
	return keySerializer.deserialize(dataInputView);
}
 
Example 12
Source File: RocksStateKeysIterator.java    From flink with Apache License 2.0 5 votes vote down vote up
private K deserializeKey(byte[] keyBytes, DataInputDeserializer readView) throws IOException {
	readView.setBuffer(keyBytes, keyGroupPrefixBytes, keyBytes.length - keyGroupPrefixBytes);
	return RocksDBKeySerializationUtils.readKey(
		keySerializer,
		byteArrayDataInputView,
		ambiguousKeyPossible);
}
 
Example 13
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private <K> void testSetKeyInternal(TypeSerializer<K> serializer, Collection<K> testKeys, int maxParallelism) throws IOException {
	final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;
	RocksDBSerializedCompositeKeyBuilder<K> keyBuilder =
		createRocksDBSerializedCompositeKeyBuilder(serializer, prefixBytes);

	final DataInputDeserializer deserializer = new DataInputDeserializer();
	for (K testKey : testKeys) {
		int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
		byte[] result = dataOutputSerializer.getCopyOfBuffer();
		deserializer.setBuffer(result);
		assertKeyKeyGroupBytes(testKey, keyGroup, prefixBytes, serializer, deserializer, false);
		Assert.assertEquals(0, deserializer.available());
	}
}
 
Example 14
Source File: RocksStateKeysIterator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private K deserializeKey(byte[] keyBytes, DataInputDeserializer readView) throws IOException {
	readView.setBuffer(keyBytes, keyGroupPrefixBytes, keyBytes.length - keyGroupPrefixBytes);
	return RocksDBKeySerializationUtils.readKey(
		keySerializer,
		byteArrayDataInputView,
		ambiguousKeyPossible);
}
 
Example 15
Source File: RocksDBMapState.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static <UV> UV deserializeUserValue(
	DataInputDeserializer dataInputView,
	byte[] rawValueBytes,
	TypeSerializer<UV> valueSerializer) throws IOException {

	dataInputView.setBuffer(rawValueBytes);

	boolean isNull = dataInputView.readBoolean();

	return isNull ? null : valueSerializer.deserialize(dataInputView);
}
 
Example 16
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private <K, N, U> void testSetKeyNamespaceUserKeyInternal(
	TypeSerializer<K> keySerializer,
	TypeSerializer<N> namespaceSerializer,
	TypeSerializer<U> userKeySerializer,
	Collection<K> testKeys,
	Collection<N> testNamespaces,
	Collection<U> testUserKeys,
	int maxParallelism) throws IOException {
	final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;

	RocksDBSerializedCompositeKeyBuilder<K> keyBuilder =
		createRocksDBSerializedCompositeKeyBuilder(keySerializer, prefixBytes);

	final DataInputDeserializer deserializer = new DataInputDeserializer();

	final boolean ambiguousPossible = keyBuilder.isAmbiguousCompositeKeyPossible(namespaceSerializer);

	for (K testKey : testKeys) {
		int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
		for (N testNamespace : testNamespaces) {
			for (U testUserKey : testUserKeys) {
				byte[] compositeBytes = keyBuilder.buildCompositeKeyNamesSpaceUserKey(
					testNamespace,
					namespaceSerializer,
					testUserKey,
					userKeySerializer);

				deserializer.setBuffer(compositeBytes);
				assertKeyGroupKeyNamespaceUserKeyBytes(
					testKey,
					keyGroup,
					prefixBytes,
					keySerializer,
					testNamespace,
					namespaceSerializer,
					testUserKey,
					userKeySerializer,
					deserializer,
					ambiguousPossible);

				Assert.assertEquals(0, deserializer.available());
			}
		}
	}
}
 
Example 17
Source File: RocksDBKeyedStateBackend.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Migrate only the state value, that is the "value" that is stored in RocksDB. We don't migrate
 * the key here, which is made up of key group, key, namespace and map key
 * (in case of MapState).
 */
@SuppressWarnings("unchecked")
private <N, S extends State, SV> void migrateStateValues(
	StateDescriptor<S, SV> stateDesc,
	Tuple2<ColumnFamilyHandle, RegisteredKeyValueStateBackendMetaInfo<N, SV>> stateMetaInfo) throws Exception {

	if (stateDesc.getType() == StateDescriptor.Type.MAP) {
		TypeSerializerSnapshot<SV> previousSerializerSnapshot = stateMetaInfo.f1.getPreviousStateSerializerSnapshot();
		checkState(previousSerializerSnapshot != null, "the previous serializer snapshot should exist.");
		checkState(previousSerializerSnapshot instanceof MapSerializerSnapshot, "previous serializer snapshot should be a MapSerializerSnapshot.");

		TypeSerializer<SV> newSerializer = stateMetaInfo.f1.getStateSerializer();
		checkState(newSerializer instanceof MapSerializer, "new serializer should be a MapSerializer.");

		MapSerializer<?, ?> mapSerializer = (MapSerializer<?, ?>) newSerializer;
		MapSerializerSnapshot<?, ?> mapSerializerSnapshot = (MapSerializerSnapshot<?, ?>) previousSerializerSnapshot;
		if (!checkMapStateKeySchemaCompatibility(mapSerializerSnapshot, mapSerializer)) {
			throw new StateMigrationException(
				"The new serializer for a MapState requires state migration in order for the job to proceed, since the key schema has changed. However, migration for MapState currently only allows value schema evolutions.");
		}
	}

	LOG.info(
		"Performing state migration for state {} because the state serializer's schema, i.e. serialization format, has changed.",
		stateDesc);

	// we need to get an actual state instance because migration is different
	// for different state types. For example, ListState needs to deal with
	// individual elements
	StateFactory stateFactory = STATE_FACTORIES.get(stateDesc.getClass());
	if (stateFactory == null) {
		String message = String.format("State %s is not supported by %s",
			stateDesc.getClass(), this.getClass());
		throw new FlinkRuntimeException(message);
	}
	State state = stateFactory.createState(
		stateDesc,
		stateMetaInfo,
		RocksDBKeyedStateBackend.this);
	if (!(state instanceof AbstractRocksDBState)) {
		throw new FlinkRuntimeException(
			"State should be an AbstractRocksDBState but is " + state);
	}

	@SuppressWarnings("unchecked")
	AbstractRocksDBState<?, ?, SV> rocksDBState = (AbstractRocksDBState<?, ?, SV>) state;

	Snapshot rocksDBSnapshot = db.getSnapshot();
	try (
		RocksIteratorWrapper iterator = RocksDBOperationUtils.getRocksIterator(db, stateMetaInfo.f0, readOptions);
		RocksDBWriteBatchWrapper batchWriter = new RocksDBWriteBatchWrapper(db, getWriteOptions(), getWriteBatchSize())
	) {
		iterator.seekToFirst();

		DataInputDeserializer serializedValueInput = new DataInputDeserializer();
		DataOutputSerializer migratedSerializedValueOutput = new DataOutputSerializer(512);
		while (iterator.isValid()) {
			serializedValueInput.setBuffer(iterator.value());

			rocksDBState.migrateSerializedValue(
				serializedValueInput,
				migratedSerializedValueOutput,
				stateMetaInfo.f1.getPreviousStateSerializer(),
				stateMetaInfo.f1.getStateSerializer());

			batchWriter.put(stateMetaInfo.f0, iterator.key(), migratedSerializedValueOutput.getCopyOfBuffer());

			migratedSerializedValueOutput.clear();
			iterator.next();
		}
	} finally {
		db.releaseSnapshot(rocksDBSnapshot);
		rocksDBSnapshot.close();
	}
}
 
Example 18
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private <K, N, U> void testSetKeyNamespaceUserKeyInternal(
	TypeSerializer<K> keySerializer,
	TypeSerializer<N> namespaceSerializer,
	TypeSerializer<U> userKeySerializer,
	Collection<K> testKeys,
	Collection<N> testNamespaces,
	Collection<U> testUserKeys,
	int maxParallelism) throws IOException {
	final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;

	RocksDBSerializedCompositeKeyBuilder<K> keyBuilder =
		createRocksDBSerializedCompositeKeyBuilder(keySerializer, prefixBytes);

	final DataInputDeserializer deserializer = new DataInputDeserializer();

	final boolean ambiguousPossible = keyBuilder.isAmbiguousCompositeKeyPossible(namespaceSerializer);

	for (K testKey : testKeys) {
		int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
		for (N testNamespace : testNamespaces) {
			for (U testUserKey : testUserKeys) {
				byte[] compositeBytes = keyBuilder.buildCompositeKeyNamesSpaceUserKey(
					testNamespace,
					namespaceSerializer,
					testUserKey,
					userKeySerializer);

				deserializer.setBuffer(compositeBytes);
				assertKeyGroupKeyNamespaceUserKeyBytes(
					testKey,
					keyGroup,
					prefixBytes,
					keySerializer,
					testNamespace,
					namespaceSerializer,
					testUserKey,
					userKeySerializer,
					deserializer,
					ambiguousPossible);

				Assert.assertEquals(0, deserializer.available());
			}
		}
	}
}
 
Example 19
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private <K, N, U> void testSetKeyNamespaceUserKeyInternal(
	TypeSerializer<K> keySerializer,
	TypeSerializer<N> namespaceSerializer,
	TypeSerializer<U> userKeySerializer,
	Collection<K> testKeys,
	Collection<N> testNamespaces,
	Collection<U> testUserKeys,
	int maxParallelism) throws IOException {
	final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;

	RocksDBSerializedCompositeKeyBuilder<K> keyBuilder =
		createRocksDBSerializedCompositeKeyBuilder(keySerializer, prefixBytes);

	final DataInputDeserializer deserializer = new DataInputDeserializer();

	final boolean ambiguousPossible = keyBuilder.isAmbiguousCompositeKeyPossible(namespaceSerializer);

	for (K testKey : testKeys) {
		int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
		for (N testNamespace : testNamespaces) {
			for (U testUserKey : testUserKeys) {
				byte[] compositeBytes = keyBuilder.buildCompositeKeyNamesSpaceUserKey(
					testNamespace,
					namespaceSerializer,
					testUserKey,
					userKeySerializer);

				deserializer.setBuffer(compositeBytes);
				assertKeyGroupKeyNamespaceUserKeyBytes(
					testKey,
					keyGroup,
					prefixBytes,
					keySerializer,
					testNamespace,
					namespaceSerializer,
					testUserKey,
					userKeySerializer,
					deserializer,
					ambiguousPossible);

				Assert.assertEquals(0, deserializer.available());
			}
		}
	}
}
 
Example 20
Source File: RocksDBKeyedStateBackend.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
/**
 * Migrate only the state value, that is the "value" that is stored in RocksDB. We don't migrate
 * the key here, which is made up of key group, key, namespace and map key
 * (in case of MapState).
 */
private <N, S extends State, SV> void migrateStateValues(
	StateDescriptor<S, SV> stateDesc,
	Tuple2<ColumnFamilyHandle, RegisteredKeyValueStateBackendMetaInfo<N, SV>> stateMetaInfo) throws Exception {

	if (stateDesc.getType() == StateDescriptor.Type.MAP) {
		throw new StateMigrationException("The new serializer for a MapState requires state migration in order for the job to proceed." +
			" However, migration for MapState currently isn't supported.");
	}

	LOG.info(
		"Performing state migration for state {} because the state serializer's schema, i.e. serialization format, has changed.",
		stateDesc);

	// we need to get an actual state instance because migration is different
	// for different state types. For example, ListState needs to deal with
	// individual elements
	StateFactory stateFactory = STATE_FACTORIES.get(stateDesc.getClass());
	if (stateFactory == null) {
		String message = String.format("State %s is not supported by %s",
			stateDesc.getClass(), this.getClass());
		throw new FlinkRuntimeException(message);
	}
	State state = stateFactory.createState(
		stateDesc,
		stateMetaInfo,
		RocksDBKeyedStateBackend.this);
	if (!(state instanceof AbstractRocksDBState)) {
		throw new FlinkRuntimeException(
			"State should be an AbstractRocksDBState but is " + state);
	}

	@SuppressWarnings("unchecked")
	AbstractRocksDBState<?, ?, SV> rocksDBState = (AbstractRocksDBState<?, ?, SV>) state;

	Snapshot rocksDBSnapshot = db.getSnapshot();
	try (
		RocksIteratorWrapper iterator = RocksDBOperationUtils.getRocksIterator(db, stateMetaInfo.f0);
		RocksDBWriteBatchWrapper batchWriter = new RocksDBWriteBatchWrapper(db, getWriteOptions())
	) {
		iterator.seekToFirst();

		DataInputDeserializer serializedValueInput = new DataInputDeserializer();
		DataOutputSerializer migratedSerializedValueOutput = new DataOutputSerializer(512);
		while (iterator.isValid()) {
			serializedValueInput.setBuffer(iterator.value());

			rocksDBState.migrateSerializedValue(
				serializedValueInput,
				migratedSerializedValueOutput,
				stateMetaInfo.f1.getPreviousStateSerializer(),
				stateMetaInfo.f1.getStateSerializer());

			batchWriter.put(stateMetaInfo.f0, iterator.key(), migratedSerializedValueOutput.getCopyOfBuffer());

			migratedSerializedValueOutput.clear();
			iterator.next();
		}
	} finally {
		db.releaseSnapshot(rocksDBSnapshot);
		rocksDBSnapshot.close();
	}
}