org.apache.flink.api.common.typeutils.TypeSerializer Java Examples

The following examples show how to use org.apache.flink.api.common.typeutils.TypeSerializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OperatorStateRestoreOperation.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private <K, V> void deserializeBroadcastStateValues(
	final BackendWritableBroadcastState<K, V> broadcastStateForName,
	final FSDataInputStream in,
	final OperatorStateHandle.StateMetaInfo metaInfo) throws Exception {

	if (metaInfo != null) {
		long[] offsets = metaInfo.getOffsets();
		if (offsets != null) {

			TypeSerializer<K> keySerializer = broadcastStateForName.getStateMetaInfo().getKeySerializer();
			TypeSerializer<V> valueSerializer = broadcastStateForName.getStateMetaInfo().getValueSerializer();

			in.seek(offsets[0]);

			DataInputView div = new DataInputViewStreamWrapper(in);
			int size = div.readInt();
			for (int i = 0; i < size; i++) {
				broadcastStateForName.put(keySerializer.deserialize(div), valueSerializer.deserialize(div));
			}
		}
	}
}
 
Example #2
Source File: RocksDBSerializedCompositeKeyBuilderTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private <K, N, U> void assertKeyGroupKeyNamespaceUserKeyBytes(
	K key,
	int keyGroup,
	int prefixBytes,
	TypeSerializer<K> keySerializer,
	N namespace,
	TypeSerializer<N> namespaceSerializer,
	U userKey,
	TypeSerializer<U> userKeySerializer,
	DataInputDeserializer deserializer,
	boolean ambiguousCompositeKeyPossible) throws IOException {
	assertKeyGroupKeyNamespaceBytes(
		key,
		keyGroup,
		prefixBytes,
		keySerializer,
		namespace,
		namespaceSerializer,
		deserializer,
		ambiguousCompositeKeyPossible);
	Assert.assertEquals(userKey, userKeySerializer.deserialize(deserializer));
}
 
Example #3
Source File: ReusingMergeOuterJoinIterator.java    From flink with Apache License 2.0 6 votes vote down vote up
public ReusingMergeOuterJoinIterator(
		OuterJoinType outerJoinType,
		MutableObjectIterator<T1> input1,
		MutableObjectIterator<T2> input2,
		TypeSerializer<T1> serializer1, TypeComparator<T1> comparator1,
		TypeSerializer<T2> serializer2, TypeComparator<T2> comparator2,
		TypePairComparator<T1, T2> pairComparator,
		MemoryManager memoryManager,
		IOManager ioManager,
		int numMemoryPages,
		AbstractInvokable parentTask)
		throws MemoryAllocationException {
	super(outerJoinType, input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparator, memoryManager, ioManager, numMemoryPages, parentTask);

	this.copy1 = serializer1.createInstance();
	this.spillHeadCopy = serializer1.createInstance();
	this.copy2 = serializer2.createInstance();
	this.blockHeadCopy = serializer2.createInstance();
}
 
Example #4
Source File: PojoSerializerSnapshotTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testRestoreSerializerWithSameFields() {
	final PojoSerializerSnapshot<TestPojo> testSnapshot = buildTestSnapshot(Arrays.asList(
		ID_FIELD,
		NAME_FIELD,
		HEIGHT_FIELD
	));

	final TypeSerializer<TestPojo> restoredSerializer = testSnapshot.restoreSerializer();
	assertSame(restoredSerializer.getClass(), PojoSerializer.class);
	final PojoSerializer<TestPojo> restoredPojoSerializer = (PojoSerializer<TestPojo>) restoredSerializer;

	final Field[] restoredFields = restoredPojoSerializer.getFields();
	assertArrayEquals(
		new Field[] { ID_FIELD.field, NAME_FIELD.field, HEIGHT_FIELD.field },
		restoredFields);

	final TypeSerializer<?>[] restoredFieldSerializers = restoredPojoSerializer.getFieldSerializers();
	assertArrayEquals(
		new TypeSerializer[] { IntSerializer.INSTANCE, StringSerializer.INSTANCE, DoubleSerializer.INSTANCE },
		restoredFieldSerializers);
}
 
Example #5
Source File: SerializationProxiesTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testKeyedStateMetaInfoSerialization() throws Exception {

	String name = "test";
	TypeSerializer<?> namespaceSerializer = LongSerializer.INSTANCE;
	TypeSerializer<?> stateSerializer = DoubleSerializer.INSTANCE;

	StateMetaInfoSnapshot metaInfo = new RegisteredKeyValueStateBackendMetaInfo<>(
		StateDescriptor.Type.VALUE, name, namespaceSerializer, stateSerializer).snapshot();

	byte[] serialized;
	try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
		StateMetaInfoSnapshotReadersWriters.getWriter().
			writeStateMetaInfoSnapshot(metaInfo, new DataOutputViewStreamWrapper(out));
		serialized = out.toByteArray();
	}

	try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
		final StateMetaInfoReader reader = StateMetaInfoSnapshotReadersWriters.getReader(
			CURRENT_STATE_META_INFO_SNAPSHOT_VERSION, StateMetaInfoSnapshotReadersWriters.StateTypeHint.KEYED_STATE);
		metaInfo = reader.readStateMetaInfoSnapshot(
			new DataInputViewStreamWrapper(in), Thread.currentThread().getContextClassLoader());
	}

	Assert.assertEquals(name, metaInfo.getName());
}
 
Example #6
Source File: VectorTypesTest.java    From Alink with Apache License 2.0 6 votes vote down vote up
@Test
public void testVectorsSerDeser() throws IOException {
	// Prepare data
	SparseVector sparseVector = new SparseVector(10, new HashMap<Integer, Double>() {{
		ThreadLocalRandom rand = ThreadLocalRandom.current();
		for (int i = 0; i < 10; i += 2) {
			this.put(i, rand.nextDouble());
		}
	}});
	DenseVector denseVector = DenseVector.rand(10);

	// Prepare serializer
	ExecutionConfig config = new ExecutionConfig();
	TypeSerializer<Vector> vecSer = VectorTypes.VECTOR.createSerializer(config);
	TypeSerializer<SparseVector> sparseSer = VectorTypes.SPARSE_VECTOR.createSerializer(config);
	TypeSerializer<DenseVector> denseSer = VectorTypes.DENSE_VECTOR.createSerializer(config);

	// Do tests.
	doVectorSerDeserTest(vecSer, sparseVector);
	doVectorSerDeserTest(vecSer, denseVector);
	doVectorSerDeserTest(sparseSer, sparseVector);
	doVectorSerDeserTest(denseSer, denseVector);
}
 
Example #7
Source File: StreamConfig.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void setTypeSerializer(String key, TypeSerializer<?> typeWrapper) {
	try {
		InstantiationUtil.writeObjectToConfig(typeWrapper, this.config, key);
	} catch (IOException e) {
		throw new StreamTaskException("Could not serialize type serializer.", e);
	}
}
 
Example #8
Source File: CopyOnWriteStateMap.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Constructs a new {@code StateMap} instance with the specified capacity.
 *
 * @param capacity      the initial capacity of this hash map.
 * @param stateSerializer the serializer of the key.
 * @throws IllegalArgumentException when the capacity is less than zero.
 */
@SuppressWarnings("unchecked")
private CopyOnWriteStateMap(
	int capacity, TypeSerializer<S> stateSerializer) {
	this.stateSerializer = Preconditions.checkNotNull(stateSerializer);

	// initialized maps to EMPTY_TABLE.
	this.primaryTable = (StateMapEntry<K, N, S>[]) EMPTY_TABLE;
	this.incrementalRehashTable = (StateMapEntry<K, N, S>[]) EMPTY_TABLE;

	// initialize sizes to 0.
	this.primaryTableSize = 0;
	this.incrementalRehashTableSize = 0;

	this.rehashIndex = 0;
	this.stateMapVersion = 0;
	this.highestRequiredSnapshotVersion = 0;
	this.snapshotVersions = new TreeSet<>();

	if (capacity < 0) {
		throw new IllegalArgumentException("Capacity: " + capacity);
	}

	if (capacity == 0) {
		threshold = -1;
		return;
	}

	if (capacity < MINIMUM_CAPACITY) {
		capacity = MINIMUM_CAPACITY;
	} else if (capacity > MAXIMUM_CAPACITY) {
		capacity = MAXIMUM_CAPACITY;
	} else {
		capacity = MathUtils.roundUpToPowerOfTwo(capacity);
	}
	primaryTable = makeTable(capacity);
}
 
Example #9
Source File: PojoSerializer.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates an array of serializers for provided list of registered subclasses.
 * Order of returned serializers will correspond to order of provided subclasses.
 */
private static TypeSerializer<?>[] createRegisteredSubclassSerializers(
		LinkedHashSet<Class<?>> registeredSubclasses,
		ExecutionConfig executionConfig) {

	final TypeSerializer<?>[] subclassSerializers = new TypeSerializer[registeredSubclasses.size()];

	int i = 0;
	for (Class<?> registeredClass : registeredSubclasses) {
		subclassSerializers[i] = TypeExtractor.createTypeInfo(registeredClass).createSerializer(executionConfig);
		i++;
	}

	return subclassSerializers;
}
 
Example #10
Source File: StreamConfig.java    From flink with Apache License 2.0 5 votes vote down vote up
public <T> TypeSerializer<T> getTypeSerializerOut(ClassLoader cl) {
	try {
		return InstantiationUtil.readObjectFromConfig(this.config, TYPE_SERIALIZER_OUT_1, cl);
	} catch (Exception e) {
		throw new StreamTaskException("Could not instantiate serializer.", e);
	}
}
 
Example #11
Source File: TaggedBootstrapDataTypeInfo.java    From flink-statefun with Apache License 2.0 5 votes vote down vote up
@Override
public TypeSerializer<TaggedBootstrapData> createSerializer(ExecutionConfig executionConfig) {
  final List<TypeSerializer<?>> payloadSerializers =
      payloadTypeInfos.stream()
          .map(typeInfo -> typeInfo.createSerializer(executionConfig))
          .collect(Collectors.toList());

  return new TaggedBootstrapDataSerializer(payloadSerializers);
}
 
Example #12
Source File: ValueWithTs.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
protected Serializer createOuterSerializerWithNestedSerializers(TypeSerializer<?>[] nestedSerializers) {
	TypeSerializer<?> valueSerializer = nestedSerializers[0];
	TypeSerializer<Long> timestampSerializer = (TypeSerializer<Long>) nestedSerializers[1];
	return new Serializer(valueSerializer, timestampSerializer);
}
 
Example #13
Source File: TimerSerializer.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static TypeSerializer<?>[] init(
	@Nonnull TypeSerializer<?> keySerializer,
	@Nonnull TypeSerializer<?> namespaceSerializer) {
	TypeSerializer<?>[] timerSerializers = new TypeSerializer[2];
	timerSerializers[KEY_SERIALIZER_SNAPSHOT_INDEX] = keySerializer;
	timerSerializers[NAMESPACE_SERIALIZER_SNAPSHOT_INDEX] = namespaceSerializer;
	return timerSerializers;
}
 
Example #14
Source File: ReusingBlockResettableIterator.java    From flink with Apache License 2.0 5 votes vote down vote up
public ReusingBlockResettableIterator(MemoryManager memoryManager, Iterator<T> input,
		TypeSerializer<T> serializer, int numPages,
		AbstractInvokable ownerTask)
throws MemoryAllocationException
{
	this(memoryManager, serializer, numPages, ownerTask);
	this.input = input;
}
 
Example #15
Source File: TtlMapState.java    From flink with Apache License 2.0 5 votes vote down vote up
@Nullable
@Override
public Map<UK, TtlValue<UV>> getUnexpiredOrNull(@Nonnull Map<UK, TtlValue<UV>> ttlValue) {
	Map<UK, TtlValue<UV>> unexpired = new HashMap<>();
	TypeSerializer<TtlValue<UV>> valueSerializer =
		((MapSerializer<UK, TtlValue<UV>>) original.getValueSerializer()).getValueSerializer();
	for (Map.Entry<UK, TtlValue<UV>> e : ttlValue.entrySet()) {
		if (!expired(e.getValue())) {
			// we have to do the defensive copy to update the value
			unexpired.put(e.getKey(), valueSerializer.copy(e.getValue()));
		}
	}
	return ttlValue.size() == unexpired.size() ? ttlValue : unexpired;
}
 
Example #16
Source File: StateBackendMigrationTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
private <K> AbstractKeyedStateBackend<K> createKeyedBackend(TypeSerializer<K> keySerializer, Environment env) throws Exception {
	return createKeyedBackend(
		keySerializer,
		10,
		new KeyGroupRange(0, 9),
		env);
}
 
Example #17
Source File: TupleComparatorILD2Test.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected TupleComparator<Tuple3<Integer, Long, Double>> createComparator(boolean ascending) {
	return new TupleComparator<Tuple3<Integer, Long, Double>>(
			new int[]{0, 1},
			new TypeComparator[]{
				new IntComparator(ascending),
				new LongComparator(ascending)
			},
			new TypeSerializer[]{ IntSerializer.INSTANCE, LongSerializer.INSTANCE });
}
 
Example #18
Source File: CoGroupedStreams.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TypeSerializerSchemaCompatibility<TaggedUnion<T1, T2>> resolveSchemaCompatibility(TypeSerializer<TaggedUnion<T1, T2>> newSerializer) {
	List<Tuple2<TypeSerializer<?>, TypeSerializerSnapshot<?>>> nestedSerializersAndConfigs = getNestedSerializersAndConfigs();

	return CompositeTypeSerializerUtil.delegateCompatibilityCheckToNewSnapshot(
		newSerializer,
		new UnionSerializerSnapshot<>(),
		nestedSerializersAndConfigs.get(0).f1,
		nestedSerializersAndConfigs.get(1).f1
	);
}
 
Example #19
Source File: ReusingBlockResettableIterator.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public ReusingBlockResettableIterator(MemoryManager memoryManager, TypeSerializer<T>
		serializer, int numPages, AbstractInvokable ownerTask)
throws MemoryAllocationException
{
	super(memoryManager, serializer, numPages, ownerTask);
	
	this.reuseElement = serializer.createInstance();
}
 
Example #20
Source File: HeapValueState.java    From flink with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
static <K, N, SV, S extends State, IS extends S> IS create(
	StateDescriptor<S, SV> stateDesc,
	StateTable<K, N, SV> stateTable,
	TypeSerializer<K> keySerializer) {
	return (IS) new HeapValueState<>(
		stateTable,
		keySerializer,
		stateTable.getStateSerializer(),
		stateTable.getNamespaceSerializer(),
		stateDesc.getDefaultValue());
}
 
Example #21
Source File: InternalTimersSnapshotReaderWriters.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public AbstractInternalTimersSnapshotWriter(
		InternalTimersSnapshot<K, N> timersSnapshot,
		TypeSerializer<K> keySerializer,
		TypeSerializer<N> namespaceSerializer) {
	this.timersSnapshot = checkNotNull(timersSnapshot);
	this.keySerializer = checkNotNull(keySerializer);
	this.namespaceSerializer = checkNotNull(namespaceSerializer);
}
 
Example #22
Source File: TupleComparatorISD3Test.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
@Override
protected TupleSerializer<Tuple3<Integer, String, Double>> createSerializer() {
	return new TupleSerializer<Tuple3<Integer, String, Double>>(
			(Class<Tuple3<Integer, String, Double>>) (Class<?>) Tuple3.class,
			new TypeSerializer[]{
				new IntSerializer(),
				new StringSerializer(),
				new DoubleSerializer()});
}
 
Example #23
Source File: TimerSerializerSnapshot.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
protected TimerSerializer<K, N> createOuterSerializerWithNestedSerializers(TypeSerializer<?>[] nestedSerializers) {
	@SuppressWarnings("unchecked")
	final TypeSerializer<K> keySerializer = (TypeSerializer<K>) nestedSerializers[0];

	@SuppressWarnings("unchecked")
	final TypeSerializer<N> namespaceSerializer = (TypeSerializer<N>) nestedSerializers[1];

	return new TimerSerializer<>(keySerializer, namespaceSerializer);
}
 
Example #24
Source File: ReusingKeyGroupedIterator.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Initializes the KeyGroupedIterator. It requires an iterator which returns its result
 * sorted by the key fields.
 * 
 * @param iterator An iterator over records, which are sorted by the key fields, in any order.
 * @param serializer The serializer for the data type iterated over.
 * @param comparator The comparator for the data type iterated over.
 */
public ReusingKeyGroupedIterator(MutableObjectIterator<E> iterator, TypeSerializer<E>
		serializer, TypeComparator<E> comparator)
{
	if (iterator == null || serializer == null || comparator == null) {
		throw new NullPointerException();
	}
	
	this.iterator = iterator;
	this.serializer = serializer;
	this.comparator = comparator;
	this.reuse = this.serializer.createInstance();
}
 
Example #25
Source File: ArrayDataSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TypeSerializerSchemaCompatibility<ArrayData> resolveSchemaCompatibility(TypeSerializer<ArrayData> newSerializer) {
	if (!(newSerializer instanceof ArrayDataSerializer)) {
		return TypeSerializerSchemaCompatibility.incompatible();
	}

	ArrayDataSerializer newArrayDataSerializer = (ArrayDataSerializer) newSerializer;
	if (!previousType.equals(newArrayDataSerializer.eleType) ||
		!previousEleSer.equals(newArrayDataSerializer.eleSer)) {
		return TypeSerializerSchemaCompatibility.incompatible();
	} else {
		return TypeSerializerSchemaCompatibility.compatibleAsIs();
	}
}
 
Example #26
Source File: WindowOperator.java    From flink with Apache License 2.0 5 votes vote down vote up
WindowOperator(
		WindowAssigner<W> windowAssigner,
		Trigger<W> trigger,
		TypeSerializer<W> windowSerializer,
		LogicalType[] inputFieldTypes,
		LogicalType[] accumulatorTypes,
		LogicalType[] aggResultTypes,
		LogicalType[] windowPropertyTypes,
		int rowtimeIndex,
		boolean produceUpdates,
		long allowedLateness) {
	checkArgument(allowedLateness >= 0);
	this.windowAssigner = checkNotNull(windowAssigner);
	this.trigger = checkNotNull(trigger);
	this.windowSerializer = checkNotNull(windowSerializer);
	this.inputFieldTypes = checkNotNull(inputFieldTypes);
	this.accumulatorTypes = checkNotNull(accumulatorTypes);
	this.aggResultTypes = checkNotNull(aggResultTypes);
	this.windowPropertyTypes = checkNotNull(windowPropertyTypes);
	this.allowedLateness = allowedLateness;
	this.produceUpdates = produceUpdates;

	// rowtime index should >= 0 when in event time mode
	checkArgument(!windowAssigner.isEventTime() || rowtimeIndex >= 0);
	this.rowtimeIndex = rowtimeIndex;

	setChainingStrategy(ChainingStrategy.ALWAYS);
}
 
Example #27
Source File: AbstractStreamOperatorTestHarness.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Calls {@link SetupableStreamOperator#setup(StreamTask, StreamConfig, Output)} ()}.
 */
public void setup(TypeSerializer<OUT> outputSerializer) {
	if (!setupCalled) {
		streamTaskStateInitializer =
			createStreamTaskStateManager(environment, stateBackend, processingTimeService);
		mockTask.setStreamTaskStateInitializer(streamTaskStateInitializer);
		if (operator instanceof SetupableStreamOperator) {
			((SetupableStreamOperator) operator).setup(mockTask, config, new MockOutput(outputSerializer));
		}
		setupCalled = true;
	}
}
 
Example #28
Source File: TwoPhaseCommitSinkFunction.java    From flink with Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
TwoPhaseCommitSinkFunction(
	TypeSerializer<TXN> transactionSerializer,
	TypeSerializer<CONTEXT> contextSerializer,
	Clock clock) {
	this.stateDescriptor =
		new ListStateDescriptor<>(
			"state",
			new StateSerializer<>(transactionSerializer, contextSerializer));
	this.clock = clock;
}
 
Example #29
Source File: UnboundedFeedbackLogger.java    From flink-statefun with Apache License 2.0 5 votes vote down vote up
public UnboundedFeedbackLogger(
    Supplier<KeyGroupStream<T>> supplier,
    ToIntFunction<T> keyGroupAssigner,
    CheckpointedStreamOperations ops,
    TypeSerializer<T> serializer) {
  this.supplier = Objects.requireNonNull(supplier);
  this.keyGroupAssigner = Objects.requireNonNull(keyGroupAssigner);
  this.serializer = Objects.requireNonNull(serializer);
  this.keyGroupStreams = new TreeMap<>();
  this.checkpointedStreamOperations = Objects.requireNonNull(ops);
}
 
Example #30
Source File: RowTypeInfo.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public TypeSerializer<Row> createSerializer(ExecutionConfig config) {
	int len = getArity();
	TypeSerializer<?>[] fieldSerializers = new TypeSerializer[len];
	for (int i = 0; i < len; i++) {
		fieldSerializers[i] = types[i].createSerializer(config);
	}
	return new RowSerializer(fieldSerializers);
}