org.apache.flink.core.memory.DataOutputSerializer Java Examples

The following examples show how to use org.apache.flink.core.memory.DataOutputSerializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RawType.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Returns the serialized {@link TypeSerializerSnapshot} in Base64 encoding of this raw type.
 */
public String getSerializerString() {
	if (serializerString == null) {
		final DataOutputSerializer outputSerializer = new DataOutputSerializer(128);
		try {
			TypeSerializerSnapshot.writeVersionedSnapshot(outputSerializer, serializer.snapshotConfiguration());
			serializerString = EncodingUtils.encodeBytesToBase64(outputSerializer.getCopyOfBuffer());
			return serializerString;
		} catch (Exception e) {
			throw new TableException(String.format(
				"Unable to generate a string representation of the serializer snapshot of '%s' " +
					"describing the class '%s' for the RAW type.",
				serializer.getClass().getName(),
				clazz.toString()), e);
		}
	}
	return serializerString;
}
 
Example #2
Source File: KvStateSerializer.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Serializes all values of the Iterable with the given serializer.
 *
 * @param entries         Key-value pairs to serialize
 * @param keySerializer   Serializer for UK
 * @param valueSerializer Serializer for UV
 * @param <UK>            Type of the keys
 * @param <UV>            Type of the values
 * @return Serialized values or <code>null</code> if values <code>null</code> or empty
 * @throws IOException On failure during serialization
 */
public static <UK, UV> byte[] serializeMap(Iterable<Map.Entry<UK, UV>> entries, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException {
	if (entries != null) {
		// Serialize
		DataOutputSerializer dos = new DataOutputSerializer(32);

		for (Map.Entry<UK, UV> entry : entries) {
			keySerializer.serialize(entry.getKey(), dos);

			if (entry.getValue() == null) {
				dos.writeBoolean(true);
			} else {
				dos.writeBoolean(false);
				valueSerializer.serialize(entry.getValue(), dos);
			}
		}

		return dos.getCopyOfBuffer();
	} else {
		return null;
	}
}
 
Example #3
Source File: RocksDBMapState.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void migrateSerializedValue(
	DataInputDeserializer serializedOldValueInput,
	DataOutputSerializer serializedMigratedValueOutput,
	TypeSerializer<Map<UK, UV>> priorSerializer,
	TypeSerializer<Map<UK, UV>> newSerializer) throws StateMigrationException {

	checkArgument(priorSerializer instanceof MapSerializer);
	checkArgument(newSerializer instanceof MapSerializer);

	TypeSerializer<UV> priorMapValueSerializer = ((MapSerializer<UK, UV>) priorSerializer).getValueSerializer();
	TypeSerializer<UV> newMapValueSerializer = ((MapSerializer<UK, UV>) newSerializer).getValueSerializer();

	try {
		boolean isNull = serializedOldValueInput.readBoolean();
		UV mapUserValue = null;
		if (!isNull) {
			mapUserValue = priorMapValueSerializer.deserialize(serializedOldValueInput);
		}
		serializedMigratedValueOutput.writeBoolean(mapUserValue == null);
		newMapValueSerializer.serialize(mapUserValue, serializedMigratedValueOutput);
	} catch (Exception e) {
		throw new StateMigrationException("Error while trying to migrate RocksDB map state.", e);
	}
}
 
Example #4
Source File: KeyGroupPartitionedPriorityQueueWithRocksDBStoreTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private KeyGroupPartitionedPriorityQueue.PartitionQueueSetFactory<
	TestElement, RocksDBCachingPriorityQueueSet<TestElement>> newFactory() {

	return (keyGroupId, numKeyGroups, keyExtractorFunction, elementComparator) -> {
		DataOutputSerializer outputStreamWithPos = new DataOutputSerializer(128);
		DataInputDeserializer inputStreamWithPos = new DataInputDeserializer();
		int keyGroupPrefixBytes = RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numKeyGroups);
		TreeOrderedSetCache orderedSetCache = new TreeOrderedSetCache(32);
		return new RocksDBCachingPriorityQueueSet<>(
			keyGroupId,
			keyGroupPrefixBytes,
			rocksDBResource.getRocksDB(),
			rocksDBResource.getReadOptions(),
			rocksDBResource.getDefaultColumnFamily(),
			TestElementSerializer.INSTANCE,
			outputStreamWithPos,
			inputStreamWithPos,
			rocksDBResource.getBatchWrapper(),
			orderedSetCache);
	};
}
 
Example #5
Source File: AbstractRocksDBState.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new RocksDB backed state.
 *
 * @param columnFamily The RocksDB column family that this state is associated to.
 * @param namespaceSerializer The serializer for the namespace.
 * @param valueSerializer The serializer for the state.
 * @param defaultValue The default value for the state.
 * @param backend The backend for which this state is bind to.
 */
protected AbstractRocksDBState(
		ColumnFamilyHandle columnFamily,
		TypeSerializer<N> namespaceSerializer,
		TypeSerializer<V> valueSerializer,
		V defaultValue,
		RocksDBKeyedStateBackend<K> backend) {

	this.namespaceSerializer = namespaceSerializer;
	this.backend = backend;

	this.columnFamily = columnFamily;

	this.writeOptions = backend.getWriteOptions();
	this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer");
	this.defaultValue = defaultValue;

	this.dataOutputView = new DataOutputSerializer(128);
	this.dataInputView = new DataInputDeserializer();
	this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder();
}
 
Example #6
Source File: SerializedCheckpointData.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Converts a list of checkpoints into an array of SerializedCheckpointData.
 *
 * @param checkpoints The checkpoints to be converted into IdsCheckpointData.
 * @param serializer The serializer to serialize the IDs.
 * @param outputBuffer The reusable serialization buffer.
 * @param <T> The type of the ID.
 * @return An array of serializable SerializedCheckpointData, one per entry in the queue.
 *
 * @throws IOException Thrown, if the serialization fails.
 */
public static <T> SerializedCheckpointData[] fromDeque(
		ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
		TypeSerializer<T> serializer,
		DataOutputSerializer outputBuffer) throws IOException {

	SerializedCheckpointData[] serializedCheckpoints = new SerializedCheckpointData[checkpoints.size()];

	int pos = 0;
	for (Tuple2<Long, Set<T>> checkpoint : checkpoints) {
		outputBuffer.clear();
		Set<T> checkpointIds = checkpoint.f1;

		for (T id : checkpointIds) {
			serializer.serialize(id, outputBuffer);
		}

		serializedCheckpoints[pos++] = new SerializedCheckpointData(
				checkpoint.f0, outputBuffer.getCopyOfBuffer(), checkpointIds.size());
	}

	return serializedCheckpoints;
}
 
Example #7
Source File: AbstractRocksDBState.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new RocksDB backed state.
 *
 * @param columnFamily The RocksDB column family that this state is associated to.
 * @param namespaceSerializer The serializer for the namespace.
 * @param valueSerializer The serializer for the state.
 * @param defaultValue The default value for the state.
 * @param backend The backend for which this state is bind to.
 */
protected AbstractRocksDBState(
		ColumnFamilyHandle columnFamily,
		TypeSerializer<N> namespaceSerializer,
		TypeSerializer<V> valueSerializer,
		V defaultValue,
		RocksDBKeyedStateBackend<K> backend) {

	this.namespaceSerializer = namespaceSerializer;
	this.backend = backend;

	this.columnFamily = columnFamily;

	this.writeOptions = backend.getWriteOptions();
	this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer");
	this.defaultValue = defaultValue;

	this.dataOutputView = new DataOutputSerializer(128);
	this.dataInputView = new DataInputDeserializer();
	this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder();
}
 
Example #8
Source File: RocksDBCachingPriorityQueueSet.java    From flink with Apache License 2.0 6 votes vote down vote up
RocksDBCachingPriorityQueueSet(
	@Nonnegative int keyGroupId,
	@Nonnegative int keyGroupPrefixBytes,
	@Nonnull RocksDB db,
	@Nonnull ReadOptions readOptions,
	@Nonnull ColumnFamilyHandle columnFamilyHandle,
	@Nonnull TypeSerializer<E> byteOrderProducingSerializer,
	@Nonnull DataOutputSerializer outputStream,
	@Nonnull DataInputDeserializer inputStream,
	@Nonnull RocksDBWriteBatchWrapper batchWrapper,
	@Nonnull OrderedByteArraySetCache orderedByteArraySetCache) {
	this.db = db;
	this.readOptions = readOptions;
	this.columnFamilyHandle = columnFamilyHandle;
	this.byteOrderProducingSerializer = byteOrderProducingSerializer;
	this.batchWrapper = batchWrapper;
	this.outputView = outputStream;
	this.inputView = inputStream;
	this.orderedCache = orderedByteArraySetCache;
	this.allElementsInCache = false;
	this.groupPrefixBytes = createKeyGroupBytes(keyGroupId, keyGroupPrefixBytes);
	this.seekHint = groupPrefixBytes;
	this.internalIndex = HeapPriorityQueueElement.NOT_CONTAINED;
}
 
Example #9
Source File: RocksDBPriorityQueueSetFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
RocksDBPriorityQueueSetFactory(
	KeyGroupRange keyGroupRange,
	int keyGroupPrefixBytes,
	int numberOfKeyGroups,
	Map<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
	RocksDB db,
	RocksDBWriteBatchWrapper writeBatchWrapper,
	RocksDBNativeMetricMonitor nativeMetricMonitor,
	Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory) {
	this.keyGroupRange = keyGroupRange;
	this.keyGroupPrefixBytes = keyGroupPrefixBytes;
	this.numberOfKeyGroups = numberOfKeyGroups;
	this.kvStateInformation = kvStateInformation;
	this.db = db;
	this.writeBatchWrapper = writeBatchWrapper;
	this.nativeMetricMonitor = nativeMetricMonitor;
	this.columnFamilyOptionsFactory = columnFamilyOptionsFactory;
	this.sharedElementOutView = new DataOutputSerializer(128);
	this.sharedElementInView = new DataInputDeserializer();
}
 
Example #10
Source File: RocksDBKeySerializationUtilsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);

		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);
	}
}
 
Example #11
Source File: RocksDBKeySerializationUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example #12
Source File: RocksDBKeySerializationUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);

		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);
	}
}
 
Example #13
Source File: KeyGroupPartitionedPriorityQueueWithRocksDBStoreTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private KeyGroupPartitionedPriorityQueue.PartitionQueueSetFactory<
	TestElement, RocksDBCachingPriorityQueueSet<TestElement>> newFactory() {

	return (keyGroupId, numKeyGroups, keyExtractorFunction, elementComparator) -> {
		DataOutputSerializer outputStreamWithPos = new DataOutputSerializer(128);
		DataInputDeserializer inputStreamWithPos = new DataInputDeserializer();
		int keyGroupPrefixBytes = RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numKeyGroups);
		TreeOrderedSetCache orderedSetCache = new TreeOrderedSetCache(32);
		return new RocksDBCachingPriorityQueueSet<>(
			keyGroupId,
			keyGroupPrefixBytes,
			rocksDBResource.getRocksDB(),
			rocksDBResource.getDefaultColumnFamily(),
			TestElementSerializer.INSTANCE,
			outputStreamWithPos,
			inputStreamWithPos,
			rocksDBResource.getBatchWrapper(),
			orderedSetCache);
	};
}
 
Example #14
Source File: KvStateSerializer.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Serializes all values of the Iterable with the given serializer.
 *
 * @param entries         Key-value pairs to serialize
 * @param keySerializer   Serializer for UK
 * @param valueSerializer Serializer for UV
 * @param <UK>            Type of the keys
 * @param <UV>            Type of the values
 * @return Serialized values or <code>null</code> if values <code>null</code> or empty
 * @throws IOException On failure during serialization
 */
public static <UK, UV> byte[] serializeMap(Iterable<Map.Entry<UK, UV>> entries, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException {
	if (entries != null) {
		// Serialize
		DataOutputSerializer dos = new DataOutputSerializer(32);

		for (Map.Entry<UK, UV> entry : entries) {
			keySerializer.serialize(entry.getKey(), dos);

			if (entry.getValue() == null) {
				dos.writeBoolean(true);
			} else {
				dos.writeBoolean(false);
				valueSerializer.serialize(entry.getValue(), dos);
			}
		}

		return dos.getCopyOfBuffer();
	} else {
		return null;
	}
}
 
Example #15
Source File: AnyType.java    From flink with Apache License 2.0 6 votes vote down vote up
private String getOrCreateSerializerString() {
	if (serializerString == null) {
		final DataOutputSerializer outputSerializer = new DataOutputSerializer(128);
		try {
			TypeSerializerSnapshot.writeVersionedSnapshot(outputSerializer, serializer.snapshotConfiguration());
			serializerString = EncodingUtils.encodeBytesToBase64(outputSerializer.getCopyOfBuffer());
			return serializerString;
		} catch (Exception e) {
			throw new TableException(String.format(
				"Unable to generate a string representation of the serializer snapshot of '%s' " +
					"describing the class '%s' for the ANY type.",
				serializer.getClass().getName(),
				clazz.toString()), e);
		}
	}
	return serializerString;
}
 
Example #16
Source File: RocksDBCachingPriorityQueueSet.java    From flink with Apache License 2.0 6 votes vote down vote up
RocksDBCachingPriorityQueueSet(
	@Nonnegative int keyGroupId,
	@Nonnegative int keyGroupPrefixBytes,
	@Nonnull RocksDB db,
	@Nonnull ColumnFamilyHandle columnFamilyHandle,
	@Nonnull TypeSerializer<E> byteOrderProducingSerializer,
	@Nonnull DataOutputSerializer outputStream,
	@Nonnull DataInputDeserializer inputStream,
	@Nonnull RocksDBWriteBatchWrapper batchWrapper,
	@Nonnull OrderedByteArraySetCache orderedByteArraySetCache) {
	this.db = db;
	this.columnFamilyHandle = columnFamilyHandle;
	this.byteOrderProducingSerializer = byteOrderProducingSerializer;
	this.batchWrapper = batchWrapper;
	this.outputView = outputStream;
	this.inputView = inputStream;
	this.orderedCache = orderedByteArraySetCache;
	this.allElementsInCache = false;
	this.groupPrefixBytes = createKeyGroupBytes(keyGroupId, keyGroupPrefixBytes);
	this.seekHint = groupPrefixBytes;
	this.internalIndex = HeapPriorityQueueElement.NOT_CONTAINED;
}
 
Example #17
Source File: SerializedCheckpointData.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Converts a list of checkpoints into an array of SerializedCheckpointData.
 *
 * @param checkpoints The checkpoints to be converted into IdsCheckpointData.
 * @param serializer The serializer to serialize the IDs.
 * @param outputBuffer The reusable serialization buffer.
 * @param <T> The type of the ID.
 * @return An array of serializable SerializedCheckpointData, one per entry in the queue.
 *
 * @throws IOException Thrown, if the serialization fails.
 */
public static <T> SerializedCheckpointData[] fromDeque(
		ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
		TypeSerializer<T> serializer,
		DataOutputSerializer outputBuffer) throws IOException {

	SerializedCheckpointData[] serializedCheckpoints = new SerializedCheckpointData[checkpoints.size()];

	int pos = 0;
	for (Tuple2<Long, Set<T>> checkpoint : checkpoints) {
		outputBuffer.clear();
		Set<T> checkpointIds = checkpoint.f1;

		for (T id : checkpointIds) {
			serializer.serialize(id, outputBuffer);
		}

		serializedCheckpoints[pos++] = new SerializedCheckpointData(
				checkpoint.f0, outputBuffer.getCopyOfBuffer(), checkpointIds.size());
	}

	return serializedCheckpoints;
}
 
Example #18
Source File: CompositeTypeSerializerSnapshotTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private TypeSerializerSchemaCompatibility<String> snapshotCompositeSerializerAndGetSchemaCompatibilityAfterRestore(
		TypeSerializer<?>[] initialNestedSerializers,
		TypeSerializer<?>[] newNestedSerializer,
		String initialOuterConfiguration,
		String newOuterConfiguration) throws IOException {
	TestCompositeTypeSerializer testSerializer =
		new TestCompositeTypeSerializer(initialOuterConfiguration, initialNestedSerializers);

	TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration();

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot);

	DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
	testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot(
		in, Thread.currentThread().getContextClassLoader());

	TestCompositeTypeSerializer newTestSerializer =
		new TestCompositeTypeSerializer(newOuterConfiguration, newNestedSerializer);
	return testSerializerSnapshot.resolveSchemaCompatibility(newTestSerializer);
}
 
Example #19
Source File: AbstractRocksDBState.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new RocksDB backed state.
 *
 * @param columnFamily The RocksDB column family that this state is associated to.
 * @param namespaceSerializer The serializer for the namespace.
 * @param valueSerializer The serializer for the state.
 * @param defaultValue The default value for the state.
 * @param backend The backend for which this state is bind to.
 */
protected AbstractRocksDBState(
		ColumnFamilyHandle columnFamily,
		TypeSerializer<N> namespaceSerializer,
		TypeSerializer<V> valueSerializer,
		V defaultValue,
		RocksDBKeyedStateBackend<K> backend) {

	this.namespaceSerializer = namespaceSerializer;
	this.backend = backend;

	this.columnFamily = columnFamily;

	this.writeOptions = backend.getWriteOptions();
	this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer");
	this.defaultValue = defaultValue;

	this.dataOutputView = new DataOutputSerializer(128);
	this.dataInputView = new DataInputDeserializer();
	this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder();
}
 
Example #20
Source File: RocksDBKeySerializationUtilsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example #21
Source File: KvStateSerializer.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Serializes all values of the Iterable with the given serializer.
 *
 * @param entries         Key-value pairs to serialize
 * @param keySerializer   Serializer for UK
 * @param valueSerializer Serializer for UV
 * @param <UK>            Type of the keys
 * @param <UV>            Type of the values
 * @return Serialized values or <code>null</code> if values <code>null</code> or empty
 * @throws IOException On failure during serialization
 */
public static <UK, UV> byte[] serializeMap(Iterable<Map.Entry<UK, UV>> entries, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException {
	if (entries != null) {
		// Serialize
		DataOutputSerializer dos = new DataOutputSerializer(32);

		for (Map.Entry<UK, UV> entry : entries) {
			keySerializer.serialize(entry.getKey(), dos);

			if (entry.getValue() == null) {
				dos.writeBoolean(true);
			} else {
				dos.writeBoolean(false);
				valueSerializer.serialize(entry.getValue(), dos);
			}
		}

		return dos.getCopyOfBuffer();
	} else {
		return null;
	}
}
 
Example #22
Source File: CompositeTypeSerializerSnapshotTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private TypeSerializerSchemaCompatibility<String> snapshotCompositeSerializerAndGetSchemaCompatibilityAfterRestore(
		TypeSerializer<?>[] initialNestedSerializers,
		TypeSerializer<?>[] newNestedSerializer,
		String initialOuterConfiguration,
		String newOuterConfiguration) throws IOException {
	TestCompositeTypeSerializer testSerializer =
		new TestCompositeTypeSerializer(initialOuterConfiguration, initialNestedSerializers);

	TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration();

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot);

	DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
	testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot(
		in, Thread.currentThread().getContextClassLoader());

	TestCompositeTypeSerializer newTestSerializer =
		new TestCompositeTypeSerializer(newOuterConfiguration, newNestedSerializer);
	return testSerializerSnapshot.resolveSchemaCompatibility(newTestSerializer);
}
 
Example #23
Source File: SerializedCheckpointData.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Converts a list of checkpoints into an array of SerializedCheckpointData.
 *
 * @param checkpoints The checkpoints to be converted into IdsCheckpointData.
 * @param serializer The serializer to serialize the IDs.
 * @param outputBuffer The reusable serialization buffer.
 * @param <T> The type of the ID.
 * @return An array of serializable SerializedCheckpointData, one per entry in the queue.
 *
 * @throws IOException Thrown, if the serialization fails.
 */
public static <T> SerializedCheckpointData[] fromDeque(
		ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
		TypeSerializer<T> serializer,
		DataOutputSerializer outputBuffer) throws IOException {

	SerializedCheckpointData[] serializedCheckpoints = new SerializedCheckpointData[checkpoints.size()];

	int pos = 0;
	for (Tuple2<Long, Set<T>> checkpoint : checkpoints) {
		outputBuffer.clear();
		Set<T> checkpointIds = checkpoint.f1;

		for (T id : checkpointIds) {
			serializer.serialize(id, outputBuffer);
		}

		serializedCheckpoints[pos++] = new SerializedCheckpointData(
				checkpoint.f0, outputBuffer.getCopyOfBuffer(), checkpointIds.size());
	}

	return serializedCheckpoints;
}
 
Example #24
Source File: RocksDBSerializedCompositeKeyBuilder.java    From flink with Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
RocksDBSerializedCompositeKeyBuilder(
	@Nonnull TypeSerializer<K> keySerializer,
	@Nonnull DataOutputSerializer keyOutView,
	@Nonnegative int keyGroupPrefixBytes,
	boolean keySerializerTypeVariableSized,
	@Nonnegative int afterKeyMark) {
	this.keySerializer = keySerializer;
	this.keyOutView = keyOutView;
	this.keyGroupPrefixBytes = keyGroupPrefixBytes;
	this.keySerializerTypeVariableSized = keySerializerTypeVariableSized;
	this.afterKeyMark = afterKeyMark;
}
 
Example #25
Source File: TypeSerializerSnapshotMigrationTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
private TypeSerializerSnapshot<ElementT> writeAndThenReadTheSnapshot(
	TypeSerializer<ElementT> serializer,
	TypeSerializerSnapshot<ElementT> newSnapshot) throws IOException {

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshotSerializationUtil.writeSerializerSnapshot(out, newSnapshot, serializer);

	DataInputView in = new DataInputDeserializer(out.wrapAsByteBuffer());
	return readSnapshot(in);
}
 
Example #26
Source File: RocksDBListState.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void migrateSerializedValue(
		DataInputDeserializer serializedOldValueInput,
		DataOutputSerializer serializedMigratedValueOutput,
		TypeSerializer<List<V>> priorSerializer,
		TypeSerializer<List<V>> newSerializer) throws StateMigrationException {

	Preconditions.checkArgument(priorSerializer instanceof ListSerializer);
	Preconditions.checkArgument(newSerializer instanceof ListSerializer);

	TypeSerializer<V> priorElementSerializer =
		((ListSerializer<V>) priorSerializer).getElementSerializer();

	TypeSerializer<V> newElementSerializer =
		((ListSerializer<V>) newSerializer).getElementSerializer();

	try {
		while (serializedOldValueInput.available() > 0) {
			V element = deserializeNextElement(serializedOldValueInput, priorElementSerializer);
			newElementSerializer.serialize(element, serializedMigratedValueOutput);
			if (serializedOldValueInput.available() > 0) {
				serializedMigratedValueOutput.write(DELIMITER);
			}
		}
	} catch (Exception e) {
		throw new StateMigrationException("Error while trying to migrate RocksDB list state.", e);
	}
}
 
Example #27
Source File: SimpleVersionedSerializationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializeEmpty() throws IOException {
	final String testString = "beeeep!";

	SimpleVersionedSerializer<String> emptySerializer = new SimpleVersionedSerializer<String>() {

		@Override
		public int getVersion() {
			return 42;
		}

		@Override
		public byte[] serialize(String obj) throws IOException {
			return new byte[0];
		}

		@Override
		public String deserialize(int version, byte[] serialized) throws IOException {
			assertEquals(42, version);
			assertEquals(0, serialized.length);
			return testString;
		}
	};

	final DataOutputSerializer out = new DataOutputSerializer(32);
	SimpleVersionedSerialization.writeVersionAndSerialize(emptySerializer, "abc", out);
	final byte[] outBytes = out.getCopyOfBuffer();

	final byte[] bytes = SimpleVersionedSerialization.writeVersionAndSerialize(emptySerializer, "abc");
	assertArrayEquals(bytes, outBytes);

	final DataInputDeserializer in = new DataInputDeserializer(bytes);
	final String deserialized = SimpleVersionedSerialization.readVersionAndDeSerialize(emptySerializer, in);
	final String deserializedFromBytes = SimpleVersionedSerialization.readVersionAndDeSerialize(emptySerializer, outBytes);
	assertEquals(testString, deserialized);
	assertEquals(testString, deserializedFromBytes);
}
 
Example #28
Source File: SimpleVersionedSerializationTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializationRoundTrip() throws IOException {
	final SimpleVersionedSerializer<String> utfEncoder = new SimpleVersionedSerializer<String>() {

		private static final int VERSION = Integer.MAX_VALUE / 2; // version should occupy many bytes

		@Override
		public int getVersion() {
			return VERSION;
		}

		@Override
		public byte[] serialize(String str) throws IOException {
			return str.getBytes(StandardCharsets.UTF_8);
		}

		@Override
		public String deserialize(int version, byte[] serialized) throws IOException {
			assertEquals(VERSION, version);
			return new String(serialized, StandardCharsets.UTF_8);
		}
	};

	final String testString = "dugfakgs";
	final DataOutputSerializer out = new DataOutputSerializer(32);
	SimpleVersionedSerialization.writeVersionAndSerialize(utfEncoder, testString, out);
	final byte[] outBytes = out.getCopyOfBuffer();

	final byte[] bytes = SimpleVersionedSerialization.writeVersionAndSerialize(utfEncoder, testString);
	assertArrayEquals(bytes, outBytes);

	final DataInputDeserializer in = new DataInputDeserializer(bytes);
	final String deserialized = SimpleVersionedSerialization.readVersionAndDeSerialize(utfEncoder, in);
	final String deserializedFromBytes = SimpleVersionedSerialization.readVersionAndDeSerialize(utfEncoder, outBytes);
	assertEquals(testString, deserialized);
	assertEquals(testString, deserializedFromBytes);
}
 
Example #29
Source File: CompositeTypeSerializerSnapshotTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testRestoreCompositeTypeSerializer() throws IOException {
	// the target compatibilities of the nested serializers doesn't matter,
	// because we're only testing the restore serializer
	TypeSerializer<?>[] testNestedSerializers = {
		new NestedSerializer(TargetCompatibility.COMPATIBLE_AS_IS),
		new NestedSerializer(TargetCompatibility.INCOMPATIBLE),
		new NestedSerializer(TargetCompatibility.COMPATIBLE_AFTER_MIGRATION)
	};

	TestCompositeTypeSerializer testSerializer = new TestCompositeTypeSerializer("outer-config", testNestedSerializers);

	TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration();

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot);

	DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
	testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot(
		in, Thread.currentThread().getContextClassLoader());

	// now, restore the composite type serializer;
	// the restored nested serializer should be a RestoredNestedSerializer
	testSerializer = (TestCompositeTypeSerializer) testSerializerSnapshot.restoreSerializer();
	Assert.assertTrue(testSerializer.getNestedSerializers()[0].getClass() == RestoredNestedSerializer.class);
	Assert.assertTrue(testSerializer.getNestedSerializers()[1].getClass() == RestoredNestedSerializer.class);
	Assert.assertTrue(testSerializer.getNestedSerializers()[2].getClass() == RestoredNestedSerializer.class);
}
 
Example #30
Source File: AvroSerializerConcurrencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final AvroSerializer<String> serializer = new AvroSerializer<>(String.class);

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}