Java Code Examples for org.apache.flink.core.memory.DataOutputSerializer

The following examples show how to use org.apache.flink.core.memory.DataOutputSerializer. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private TypeSerializerSchemaCompatibility<String> snapshotCompositeSerializerAndGetSchemaCompatibilityAfterRestore(
		TypeSerializer<?>[] initialNestedSerializers,
		TypeSerializer<?>[] newNestedSerializer,
		String initialOuterConfiguration,
		String newOuterConfiguration) throws IOException {
	TestCompositeTypeSerializer testSerializer =
		new TestCompositeTypeSerializer(initialOuterConfiguration, initialNestedSerializers);

	TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration();

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot);

	DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
	testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot(
		in, Thread.currentThread().getContextClassLoader());

	TestCompositeTypeSerializer newTestSerializer =
		new TestCompositeTypeSerializer(newOuterConfiguration, newNestedSerializer);
	return testSerializerSnapshot.resolveSchemaCompatibility(newTestSerializer);
}
 
Example 2
Source Project: Flink-CEPplus   Source File: AbstractRocksDBState.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new RocksDB backed state.
 *
 * @param columnFamily The RocksDB column family that this state is associated to.
 * @param namespaceSerializer The serializer for the namespace.
 * @param valueSerializer The serializer for the state.
 * @param defaultValue The default value for the state.
 * @param backend The backend for which this state is bind to.
 */
protected AbstractRocksDBState(
		ColumnFamilyHandle columnFamily,
		TypeSerializer<N> namespaceSerializer,
		TypeSerializer<V> valueSerializer,
		V defaultValue,
		RocksDBKeyedStateBackend<K> backend) {

	this.namespaceSerializer = namespaceSerializer;
	this.backend = backend;

	this.columnFamily = columnFamily;

	this.writeOptions = backend.getWriteOptions();
	this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer");
	this.defaultValue = defaultValue;

	this.dataOutputView = new DataOutputSerializer(128);
	this.dataInputView = new DataInputDeserializer();
	this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder();
}
 
Example 3
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);

		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);
	}
}
 
Example 4
Source Project: flink   Source File: RocksDBKeySerializationUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example 5
Source Project: flink   Source File: RocksDBMapState.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void migrateSerializedValue(
	DataInputDeserializer serializedOldValueInput,
	DataOutputSerializer serializedMigratedValueOutput,
	TypeSerializer<Map<UK, UV>> priorSerializer,
	TypeSerializer<Map<UK, UV>> newSerializer) throws StateMigrationException {

	checkArgument(priorSerializer instanceof MapSerializer);
	checkArgument(newSerializer instanceof MapSerializer);

	TypeSerializer<UV> priorMapValueSerializer = ((MapSerializer<UK, UV>) priorSerializer).getValueSerializer();
	TypeSerializer<UV> newMapValueSerializer = ((MapSerializer<UK, UV>) newSerializer).getValueSerializer();

	try {
		boolean isNull = serializedOldValueInput.readBoolean();
		UV mapUserValue = null;
		if (!isNull) {
			mapUserValue = priorMapValueSerializer.deserialize(serializedOldValueInput);
		}
		serializedMigratedValueOutput.writeBoolean(mapUserValue == null);
		newMapValueSerializer.serialize(mapUserValue, serializedMigratedValueOutput);
	} catch (Exception e) {
		throw new StateMigrationException("Error while trying to migrate RocksDB map state.", e);
	}
}
 
Example 6
Source Project: Flink-CEPplus   Source File: SerializedCheckpointData.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Converts a list of checkpoints into an array of SerializedCheckpointData.
 *
 * @param checkpoints The checkpoints to be converted into IdsCheckpointData.
 * @param serializer The serializer to serialize the IDs.
 * @param outputBuffer The reusable serialization buffer.
 * @param <T> The type of the ID.
 * @return An array of serializable SerializedCheckpointData, one per entry in the queue.
 *
 * @throws IOException Thrown, if the serialization fails.
 */
public static <T> SerializedCheckpointData[] fromDeque(
		ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
		TypeSerializer<T> serializer,
		DataOutputSerializer outputBuffer) throws IOException {

	SerializedCheckpointData[] serializedCheckpoints = new SerializedCheckpointData[checkpoints.size()];

	int pos = 0;
	for (Tuple2<Long, Set<T>> checkpoint : checkpoints) {
		outputBuffer.clear();
		Set<T> checkpointIds = checkpoint.f1;

		for (T id : checkpointIds) {
			serializer.serialize(id, outputBuffer);
		}

		serializedCheckpoints[pos++] = new SerializedCheckpointData(
				checkpoint.f0, outputBuffer.getCopyOfBuffer(), checkpointIds.size());
	}

	return serializedCheckpoints;
}
 
Example 7
Source Project: flink   Source File: RawType.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Returns the serialized {@link TypeSerializerSnapshot} in Base64 encoding of this raw type.
 */
public String getSerializerString() {
	if (serializerString == null) {
		final DataOutputSerializer outputSerializer = new DataOutputSerializer(128);
		try {
			TypeSerializerSnapshot.writeVersionedSnapshot(outputSerializer, serializer.snapshotConfiguration());
			serializerString = EncodingUtils.encodeBytesToBase64(outputSerializer.getCopyOfBuffer());
			return serializerString;
		} catch (Exception e) {
			throw new TableException(String.format(
				"Unable to generate a string representation of the serializer snapshot of '%s' " +
					"describing the class '%s' for the RAW type.",
				serializer.getClass().getName(),
				clazz.toString()), e);
		}
	}
	return serializerString;
}
 
Example 8
Source Project: Flink-CEPplus   Source File: KvStateSerializer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Serializes all values of the Iterable with the given serializer.
 *
 * @param entries         Key-value pairs to serialize
 * @param keySerializer   Serializer for UK
 * @param valueSerializer Serializer for UV
 * @param <UK>            Type of the keys
 * @param <UV>            Type of the values
 * @return Serialized values or <code>null</code> if values <code>null</code> or empty
 * @throws IOException On failure during serialization
 */
public static <UK, UV> byte[] serializeMap(Iterable<Map.Entry<UK, UV>> entries, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException {
	if (entries != null) {
		// Serialize
		DataOutputSerializer dos = new DataOutputSerializer(32);

		for (Map.Entry<UK, UV> entry : entries) {
			keySerializer.serialize(entry.getKey(), dos);

			if (entry.getValue() == null) {
				dos.writeBoolean(true);
			} else {
				dos.writeBoolean(false);
				valueSerializer.serialize(entry.getValue(), dos);
			}
		}

		return dos.getCopyOfBuffer();
	} else {
		return null;
	}
}
 
Example 9
Source Project: flink   Source File: AnyType.java    License: Apache License 2.0 6 votes vote down vote up
private String getOrCreateSerializerString() {
	if (serializerString == null) {
		final DataOutputSerializer outputSerializer = new DataOutputSerializer(128);
		try {
			TypeSerializerSnapshot.writeVersionedSnapshot(outputSerializer, serializer.snapshotConfiguration());
			serializerString = EncodingUtils.encodeBytesToBase64(outputSerializer.getCopyOfBuffer());
			return serializerString;
		} catch (Exception e) {
			throw new TableException(String.format(
				"Unable to generate a string representation of the serializer snapshot of '%s' " +
					"describing the class '%s' for the ANY type.",
				serializer.getClass().getName(),
				clazz.toString()), e);
		}
	}
	return serializerString;
}
 
Example 10
Source Project: flink   Source File: SerializedCheckpointData.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Converts a list of checkpoints into an array of SerializedCheckpointData.
 *
 * @param checkpoints The checkpoints to be converted into IdsCheckpointData.
 * @param serializer The serializer to serialize the IDs.
 * @param outputBuffer The reusable serialization buffer.
 * @param <T> The type of the ID.
 * @return An array of serializable SerializedCheckpointData, one per entry in the queue.
 *
 * @throws IOException Thrown, if the serialization fails.
 */
public static <T> SerializedCheckpointData[] fromDeque(
		ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
		TypeSerializer<T> serializer,
		DataOutputSerializer outputBuffer) throws IOException {

	SerializedCheckpointData[] serializedCheckpoints = new SerializedCheckpointData[checkpoints.size()];

	int pos = 0;
	for (Tuple2<Long, Set<T>> checkpoint : checkpoints) {
		outputBuffer.clear();
		Set<T> checkpointIds = checkpoint.f1;

		for (T id : checkpointIds) {
			serializer.serialize(id, outputBuffer);
		}

		serializedCheckpoints[pos++] = new SerializedCheckpointData(
				checkpoint.f0, outputBuffer.getCopyOfBuffer(), checkpointIds.size());
	}

	return serializedCheckpoints;
}
 
Example 11
Source Project: flink   Source File: CompositeTypeSerializerSnapshotTest.java    License: Apache License 2.0 6 votes vote down vote up
private TypeSerializerSchemaCompatibility<String> snapshotCompositeSerializerAndGetSchemaCompatibilityAfterRestore(
		TypeSerializer<?>[] initialNestedSerializers,
		TypeSerializer<?>[] newNestedSerializer,
		String initialOuterConfiguration,
		String newOuterConfiguration) throws IOException {
	TestCompositeTypeSerializer testSerializer =
		new TestCompositeTypeSerializer(initialOuterConfiguration, initialNestedSerializers);

	TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration();

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot);

	DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
	testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot(
		in, Thread.currentThread().getContextClassLoader());

	TestCompositeTypeSerializer newTestSerializer =
		new TestCompositeTypeSerializer(newOuterConfiguration, newNestedSerializer);
	return testSerializerSnapshot.resolveSchemaCompatibility(newTestSerializer);
}
 
Example 12
Source Project: flink   Source File: AbstractRocksDBState.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new RocksDB backed state.
 *
 * @param columnFamily The RocksDB column family that this state is associated to.
 * @param namespaceSerializer The serializer for the namespace.
 * @param valueSerializer The serializer for the state.
 * @param defaultValue The default value for the state.
 * @param backend The backend for which this state is bind to.
 */
protected AbstractRocksDBState(
		ColumnFamilyHandle columnFamily,
		TypeSerializer<N> namespaceSerializer,
		TypeSerializer<V> valueSerializer,
		V defaultValue,
		RocksDBKeyedStateBackend<K> backend) {

	this.namespaceSerializer = namespaceSerializer;
	this.backend = backend;

	this.columnFamily = columnFamily;

	this.writeOptions = backend.getWriteOptions();
	this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer");
	this.defaultValue = defaultValue;

	this.dataOutputView = new DataOutputSerializer(128);
	this.dataInputView = new DataInputDeserializer();
	this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder();
}
 
Example 13
Source Project: flink   Source File: RocksDBPriorityQueueSetFactory.java    License: Apache License 2.0 6 votes vote down vote up
RocksDBPriorityQueueSetFactory(
	KeyGroupRange keyGroupRange,
	int keyGroupPrefixBytes,
	int numberOfKeyGroups,
	Map<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
	RocksDB db,
	RocksDBWriteBatchWrapper writeBatchWrapper,
	RocksDBNativeMetricMonitor nativeMetricMonitor,
	Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory) {
	this.keyGroupRange = keyGroupRange;
	this.keyGroupPrefixBytes = keyGroupPrefixBytes;
	this.numberOfKeyGroups = numberOfKeyGroups;
	this.kvStateInformation = kvStateInformation;
	this.db = db;
	this.writeBatchWrapper = writeBatchWrapper;
	this.nativeMetricMonitor = nativeMetricMonitor;
	this.columnFamilyOptionsFactory = columnFamilyOptionsFactory;
	this.sharedElementOutView = new DataOutputSerializer(128);
	this.sharedElementInView = new DataInputDeserializer();
}
 
Example 14
Source Project: flink   Source File: KvStateSerializer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Serializes all values of the Iterable with the given serializer.
 *
 * @param entries         Key-value pairs to serialize
 * @param keySerializer   Serializer for UK
 * @param valueSerializer Serializer for UV
 * @param <UK>            Type of the keys
 * @param <UV>            Type of the values
 * @return Serialized values or <code>null</code> if values <code>null</code> or empty
 * @throws IOException On failure during serialization
 */
public static <UK, UV> byte[] serializeMap(Iterable<Map.Entry<UK, UV>> entries, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException {
	if (entries != null) {
		// Serialize
		DataOutputSerializer dos = new DataOutputSerializer(32);

		for (Map.Entry<UK, UV> entry : entries) {
			keySerializer.serialize(entry.getKey(), dos);

			if (entry.getValue() == null) {
				dos.writeBoolean(true);
			} else {
				dos.writeBoolean(false);
				valueSerializer.serialize(entry.getValue(), dos);
			}
		}

		return dos.getCopyOfBuffer();
	} else {
		return null;
	}
}
 
Example 15
Source Project: flink   Source File: RocksDBCachingPriorityQueueSet.java    License: Apache License 2.0 6 votes vote down vote up
RocksDBCachingPriorityQueueSet(
	@Nonnegative int keyGroupId,
	@Nonnegative int keyGroupPrefixBytes,
	@Nonnull RocksDB db,
	@Nonnull ColumnFamilyHandle columnFamilyHandle,
	@Nonnull TypeSerializer<E> byteOrderProducingSerializer,
	@Nonnull DataOutputSerializer outputStream,
	@Nonnull DataInputDeserializer inputStream,
	@Nonnull RocksDBWriteBatchWrapper batchWrapper,
	@Nonnull OrderedByteArraySetCache orderedByteArraySetCache) {
	this.db = db;
	this.columnFamilyHandle = columnFamilyHandle;
	this.byteOrderProducingSerializer = byteOrderProducingSerializer;
	this.batchWrapper = batchWrapper;
	this.outputView = outputStream;
	this.inputView = inputStream;
	this.orderedCache = orderedByteArraySetCache;
	this.allElementsInCache = false;
	this.groupPrefixBytes = createKeyGroupBytes(keyGroupId, keyGroupPrefixBytes);
	this.seekHint = groupPrefixBytes;
	this.internalIndex = HeapPriorityQueueElement.NOT_CONTAINED;
}
 
Example 16
private KeyGroupPartitionedPriorityQueue.PartitionQueueSetFactory<
	TestElement, RocksDBCachingPriorityQueueSet<TestElement>> newFactory() {

	return (keyGroupId, numKeyGroups, keyExtractorFunction, elementComparator) -> {
		DataOutputSerializer outputStreamWithPos = new DataOutputSerializer(128);
		DataInputDeserializer inputStreamWithPos = new DataInputDeserializer();
		int keyGroupPrefixBytes = RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numKeyGroups);
		TreeOrderedSetCache orderedSetCache = new TreeOrderedSetCache(32);
		return new RocksDBCachingPriorityQueueSet<>(
			keyGroupId,
			keyGroupPrefixBytes,
			rocksDBResource.getRocksDB(),
			rocksDBResource.getReadOptions(),
			rocksDBResource.getDefaultColumnFamily(),
			TestElementSerializer.INSTANCE,
			outputStreamWithPos,
			inputStreamWithPos,
			rocksDBResource.getBatchWrapper(),
			orderedSetCache);
	};
}
 
Example 17
Source Project: flink   Source File: AbstractRocksDBState.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new RocksDB backed state.
 *
 * @param columnFamily The RocksDB column family that this state is associated to.
 * @param namespaceSerializer The serializer for the namespace.
 * @param valueSerializer The serializer for the state.
 * @param defaultValue The default value for the state.
 * @param backend The backend for which this state is bind to.
 */
protected AbstractRocksDBState(
		ColumnFamilyHandle columnFamily,
		TypeSerializer<N> namespaceSerializer,
		TypeSerializer<V> valueSerializer,
		V defaultValue,
		RocksDBKeyedStateBackend<K> backend) {

	this.namespaceSerializer = namespaceSerializer;
	this.backend = backend;

	this.columnFamily = columnFamily;

	this.writeOptions = backend.getWriteOptions();
	this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "State value serializer");
	this.defaultValue = defaultValue;

	this.dataOutputView = new DataOutputSerializer(128);
	this.dataInputView = new DataInputDeserializer();
	this.sharedKeyNamespaceSerializer = backend.getSharedRocksKeyBuilder();
}
 
Example 18
Source Project: flink   Source File: RocksDBCachingPriorityQueueSet.java    License: Apache License 2.0 6 votes vote down vote up
RocksDBCachingPriorityQueueSet(
	@Nonnegative int keyGroupId,
	@Nonnegative int keyGroupPrefixBytes,
	@Nonnull RocksDB db,
	@Nonnull ReadOptions readOptions,
	@Nonnull ColumnFamilyHandle columnFamilyHandle,
	@Nonnull TypeSerializer<E> byteOrderProducingSerializer,
	@Nonnull DataOutputSerializer outputStream,
	@Nonnull DataInputDeserializer inputStream,
	@Nonnull RocksDBWriteBatchWrapper batchWrapper,
	@Nonnull OrderedByteArraySetCache orderedByteArraySetCache) {
	this.db = db;
	this.readOptions = readOptions;
	this.columnFamilyHandle = columnFamilyHandle;
	this.byteOrderProducingSerializer = byteOrderProducingSerializer;
	this.batchWrapper = batchWrapper;
	this.outputView = outputStream;
	this.inputView = inputStream;
	this.orderedCache = orderedByteArraySetCache;
	this.allElementsInCache = false;
	this.groupPrefixBytes = createKeyGroupBytes(keyGroupId, keyGroupPrefixBytes);
	this.seekHint = groupPrefixBytes;
	this.internalIndex = HeapPriorityQueueElement.NOT_CONTAINED;
}
 
Example 19
Source Project: flink   Source File: RocksDBKeySerializationUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testKeySerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	// test for key
	for (int orgKey = 0; orgKey < 100; ++orgKey) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgKey, deserializedKey);

		RocksDBKeySerializationUtils.writeKey(orgKey, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedKey = RocksDBKeySerializationUtils.readKey(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgKey, deserializedKey);
	}
}
 
Example 20
Source Project: flink   Source File: RocksDBKeySerializationUtilsTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNamespaceSerializationAndDeserialization() throws Exception {
	final DataOutputSerializer outputView = new DataOutputSerializer(8);
	final DataInputDeserializer inputView = new DataInputDeserializer();

	for (int orgNamespace = 0; orgNamespace < 100; ++orgNamespace) {
		outputView.clear();
		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, false);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		int deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, false);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);

		RocksDBKeySerializationUtils.writeNameSpace(orgNamespace, IntSerializer.INSTANCE, outputView, true);
		inputView.setBuffer(outputView.getCopyOfBuffer());
		deserializedNamepsace = RocksDBKeySerializationUtils.readNamespace(IntSerializer.INSTANCE, inputView, true);
		Assert.assertEquals(orgNamespace, deserializedNamepsace);
	}
}
 
Example 21
private KeyGroupPartitionedPriorityQueue.PartitionQueueSetFactory<
	TestElement, RocksDBCachingPriorityQueueSet<TestElement>> newFactory() {

	return (keyGroupId, numKeyGroups, keyExtractorFunction, elementComparator) -> {
		DataOutputSerializer outputStreamWithPos = new DataOutputSerializer(128);
		DataInputDeserializer inputStreamWithPos = new DataInputDeserializer();
		int keyGroupPrefixBytes = RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numKeyGroups);
		TreeOrderedSetCache orderedSetCache = new TreeOrderedSetCache(32);
		return new RocksDBCachingPriorityQueueSet<>(
			keyGroupId,
			keyGroupPrefixBytes,
			rocksDBResource.getRocksDB(),
			rocksDBResource.getDefaultColumnFamily(),
			TestElementSerializer.INSTANCE,
			outputStreamWithPos,
			inputStreamWithPos,
			rocksDBResource.getBatchWrapper(),
			orderedSetCache);
	};
}
 
Example 22
Source Project: flink   Source File: SerializedCheckpointData.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Converts a list of checkpoints into an array of SerializedCheckpointData.
 *
 * @param checkpoints The checkpoints to be converted into IdsCheckpointData.
 * @param serializer The serializer to serialize the IDs.
 * @param outputBuffer The reusable serialization buffer.
 * @param <T> The type of the ID.
 * @return An array of serializable SerializedCheckpointData, one per entry in the queue.
 *
 * @throws IOException Thrown, if the serialization fails.
 */
public static <T> SerializedCheckpointData[] fromDeque(
		ArrayDeque<Tuple2<Long, Set<T>>> checkpoints,
		TypeSerializer<T> serializer,
		DataOutputSerializer outputBuffer) throws IOException {

	SerializedCheckpointData[] serializedCheckpoints = new SerializedCheckpointData[checkpoints.size()];

	int pos = 0;
	for (Tuple2<Long, Set<T>> checkpoint : checkpoints) {
		outputBuffer.clear();
		Set<T> checkpointIds = checkpoint.f1;

		for (T id : checkpointIds) {
			serializer.serialize(id, outputBuffer);
		}

		serializedCheckpoints[pos++] = new SerializedCheckpointData(
				checkpoint.f0, outputBuffer.getCopyOfBuffer(), checkpointIds.size());
	}

	return serializedCheckpoints;
}
 
Example 23
Source Project: flink   Source File: KvStateSerializer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Serializes all values of the Iterable with the given serializer.
 *
 * @param entries         Key-value pairs to serialize
 * @param keySerializer   Serializer for UK
 * @param valueSerializer Serializer for UV
 * @param <UK>            Type of the keys
 * @param <UV>            Type of the values
 * @return Serialized values or <code>null</code> if values <code>null</code> or empty
 * @throws IOException On failure during serialization
 */
public static <UK, UV> byte[] serializeMap(Iterable<Map.Entry<UK, UV>> entries, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) throws IOException {
	if (entries != null) {
		// Serialize
		DataOutputSerializer dos = new DataOutputSerializer(32);

		for (Map.Entry<UK, UV> entry : entries) {
			keySerializer.serialize(entry.getKey(), dos);

			if (entry.getValue() == null) {
				dos.writeBoolean(true);
			} else {
				dos.writeBoolean(false);
				valueSerializer.serialize(entry.getValue(), dos);
			}
		}

		return dos.getCopyOfBuffer();
	} else {
		return null;
	}
}
 
Example 24
@Test
public void testConcurrentUseOfSerializer() throws Exception {
	final AvroSerializer<String> serializer = new AvroSerializer<>(String.class);

	final BlockerSync sync = new BlockerSync();

	final DataOutputView regularOut = new DataOutputSerializer(32);
	final DataOutputView lockingOut = new LockingView(sync);

	// this thread serializes and gets stuck there
	final CheckedThread thread = new CheckedThread("serializer") {
		@Override
		public void go() throws Exception {
			serializer.serialize("a value", lockingOut);
		}
	};

	thread.start();
	sync.awaitBlocker();

	// this should fail with an exception
	try {
		serializer.serialize("value", regularOut);
		fail("should have failed with an exception");
	}
	catch (IllegalStateException e) {
		// expected
	}
	finally {
		// release the thread that serializes
		sync.releaseBlocker();
	}

	// this propagates exceptions from the spawned thread
	thread.sync();
}
 
Example 25
Source Project: flink   Source File: BucketStateSerializer.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public byte[] serialize(BucketState<BucketID> state) throws IOException {
	DataOutputSerializer out = new DataOutputSerializer(256);
	out.writeInt(MAGIC_NUMBER);
	serializeV2(state, out);
	return out.getCopyOfBuffer();
}
 
Example 26
Source Project: flink   Source File: TypeSerializerUpgradeTestBase.java    License: Apache License 2.0 5 votes vote down vote up
private static <T> TypeSerializerSnapshot<T> writeAndThenReadSerializerSnapshot(
		TypeSerializer<T> serializer) throws IOException {

	DataOutputSerializer out = new DataOutputSerializer(INITIAL_OUTPUT_BUFFER_SIZE);
	writeSerializerSnapshotCurrentFormat(out, serializer);

	DataInputDeserializer in = new DataInputDeserializer(out.wrapAsByteBuffer());
	return readSerializerSnapshotCurrentFormat(
			in,
			Thread.currentThread().getContextClassLoader());
}
 
Example 27
@Test
public void testRestoreCompositeTypeSerializer() throws IOException {
	// the target compatibilities of the nested serializers doesn't matter,
	// because we're only testing the restore serializer
	TypeSerializer<?>[] testNestedSerializers = {
		new NestedSerializer(TargetCompatibility.COMPATIBLE_AS_IS),
		new NestedSerializer(TargetCompatibility.INCOMPATIBLE),
		new NestedSerializer(TargetCompatibility.COMPATIBLE_AFTER_MIGRATION)
	};

	TestCompositeTypeSerializer testSerializer = new TestCompositeTypeSerializer("outer-config", testNestedSerializers);

	TypeSerializerSnapshot<String> testSerializerSnapshot = testSerializer.snapshotConfiguration();

	DataOutputSerializer out = new DataOutputSerializer(128);
	TypeSerializerSnapshot.writeVersionedSnapshot(out, testSerializerSnapshot);

	DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
	testSerializerSnapshot = TypeSerializerSnapshot.readVersionedSnapshot(
		in, Thread.currentThread().getContextClassLoader());

	// now, restore the composite type serializer;
	// the restored nested serializer should be a RestoredNestedSerializer
	testSerializer = (TestCompositeTypeSerializer) testSerializerSnapshot.restoreSerializer();
	Assert.assertTrue(testSerializer.getNestedSerializers()[0].getClass() == RestoredNestedSerializer.class);
	Assert.assertTrue(testSerializer.getNestedSerializers()[1].getClass() == RestoredNestedSerializer.class);
	Assert.assertTrue(testSerializer.getNestedSerializers()[2].getClass() == RestoredNestedSerializer.class);
}
 
Example 28
Source Project: flink   Source File: TypeSerializerSnapshotTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSerializeConfigWhenSerializerMissing() throws Exception {
	TestSerializer ser = new TestSerializer();
	TypeSerializerConfigSnapshot<Object> snap = (TypeSerializerConfigSnapshot<Object>) ser.snapshotConfiguration();

	try {
		TypeSerializerSnapshot.writeVersionedSnapshot(new DataOutputSerializer(64), snap);
		fail("exception expected");
	}
	catch (IllegalStateException e) {
		// expected
	}
}
 
Example 29
@Test
public void testSerializationRoundTrip() throws IOException {
	final SimpleVersionedSerializer<String> utfEncoder = new SimpleVersionedSerializer<String>() {

		private static final int VERSION = Integer.MAX_VALUE / 2; // version should occupy many bytes

		@Override
		public int getVersion() {
			return VERSION;
		}

		@Override
		public byte[] serialize(String str) throws IOException {
			return str.getBytes(StandardCharsets.UTF_8);
		}

		@Override
		public String deserialize(int version, byte[] serialized) throws IOException {
			assertEquals(VERSION, version);
			return new String(serialized, StandardCharsets.UTF_8);
		}
	};

	final String testString = "dugfakgs";
	final DataOutputSerializer out = new DataOutputSerializer(32);
	SimpleVersionedSerialization.writeVersionAndSerialize(utfEncoder, testString, out);
	final byte[] outBytes = out.getCopyOfBuffer();

	final byte[] bytes = SimpleVersionedSerialization.writeVersionAndSerialize(utfEncoder, testString);
	assertArrayEquals(bytes, outBytes);

	final DataInputDeserializer in = new DataInputDeserializer(bytes);
	final String deserialized = SimpleVersionedSerialization.readVersionAndDeSerialize(utfEncoder, in);
	final String deserializedFromBytes = SimpleVersionedSerialization.readVersionAndDeSerialize(utfEncoder, outBytes);
	assertEquals(testString, deserialized);
	assertEquals(testString, deserializedFromBytes);
}
 
Example 30
@Test
public void testSerializeEmpty() throws IOException {
	final String testString = "beeeep!";

	SimpleVersionedSerializer<String> emptySerializer = new SimpleVersionedSerializer<String>() {

		@Override
		public int getVersion() {
			return 42;
		}

		@Override
		public byte[] serialize(String obj) throws IOException {
			return new byte[0];
		}

		@Override
		public String deserialize(int version, byte[] serialized) throws IOException {
			assertEquals(42, version);
			assertEquals(0, serialized.length);
			return testString;
		}
	};

	final DataOutputSerializer out = new DataOutputSerializer(32);
	SimpleVersionedSerialization.writeVersionAndSerialize(emptySerializer, "abc", out);
	final byte[] outBytes = out.getCopyOfBuffer();

	final byte[] bytes = SimpleVersionedSerialization.writeVersionAndSerialize(emptySerializer, "abc");
	assertArrayEquals(bytes, outBytes);

	final DataInputDeserializer in = new DataInputDeserializer(bytes);
	final String deserialized = SimpleVersionedSerialization.readVersionAndDeSerialize(emptySerializer, in);
	final String deserializedFromBytes = SimpleVersionedSerialization.readVersionAndDeSerialize(emptySerializer, outBytes);
	assertEquals(testString, deserialized);
	assertEquals(testString, deserializedFromBytes);
}