Java Code Examples for org.apache.flink.api.common.typeutils.TypeSerializer#deserialize()
The following examples show how to use
org.apache.flink.api.common.typeutils.TypeSerializer#deserialize() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StateTableByKeyGroupReaders.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void readMappingsInKeyGroup(@Nonnull DataInputView inView, @Nonnegative int keyGroupId) throws IOException { if (inView.readByte() == 0) { return; } final TypeSerializer<K> keySerializer = stateTable.keyContext.getKeySerializer(); final TypeSerializer<N> namespaceSerializer = stateTable.getNamespaceSerializer(); final TypeSerializer<S> stateSerializer = stateTable.getStateSerializer(); // V1 uses kind of namespace compressing format int numNamespaces = inView.readInt(); for (int k = 0; k < numNamespaces; k++) { N namespace = namespaceSerializer.deserialize(inView); int numEntries = inView.readInt(); for (int l = 0; l < numEntries; l++) { K key = keySerializer.deserialize(inView); S state = stateSerializer.deserialize(inView); stateTable.put(key, keyGroupId, namespace, state); } } }
Example 2
Source File: RocksDBMapState.java From flink with Apache License 2.0 | 6 votes |
@Override public void migrateSerializedValue( DataInputDeserializer serializedOldValueInput, DataOutputSerializer serializedMigratedValueOutput, TypeSerializer<Map<UK, UV>> priorSerializer, TypeSerializer<Map<UK, UV>> newSerializer) throws StateMigrationException { checkArgument(priorSerializer instanceof MapSerializer); checkArgument(newSerializer instanceof MapSerializer); TypeSerializer<UV> priorMapValueSerializer = ((MapSerializer<UK, UV>) priorSerializer).getValueSerializer(); TypeSerializer<UV> newMapValueSerializer = ((MapSerializer<UK, UV>) newSerializer).getValueSerializer(); try { boolean isNull = serializedOldValueInput.readBoolean(); UV mapUserValue = null; if (!isNull) { mapUserValue = priorMapValueSerializer.deserialize(serializedOldValueInput); } serializedMigratedValueOutput.writeBoolean(mapUserValue == null); newMapValueSerializer.serialize(mapUserValue, serializedMigratedValueOutput); } catch (Exception e) { throw new StateMigrationException("Error while trying to migrate RocksDB map state.", e); } }
Example 3
Source File: InstantiationUtil.java From flink with Apache License 2.0 | 5 votes |
public static <T> T deserializeFromByteArray(TypeSerializer<T> serializer, byte[] buf) throws IOException { if (buf == null) { throw new NullPointerException("Byte array to deserialize from must not be null."); } DataInputViewStreamWrapper inputViewWrapper = new DataInputViewStreamWrapper(new ByteArrayInputStream(buf)); return serializer.deserialize(inputViewWrapper); }
Example 4
Source File: MigrationUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Skips bytes corresponding to serialized states. In flink 1.6+ the states are no longer kept in state. */ static void skipSerializedStates(DataInputView in) throws IOException { TypeSerializer<String> nameSerializer = StringSerializer.INSTANCE; TypeSerializer<State.StateType> stateTypeSerializer = new EnumSerializer<>(State.StateType.class); TypeSerializer<StateTransitionAction> actionSerializer = new EnumSerializer<>(StateTransitionAction.class); final int noOfStates = in.readInt(); for (int i = 0; i < noOfStates; i++) { nameSerializer.deserialize(in); stateTypeSerializer.deserialize(in); } for (int i = 0; i < noOfStates; i++) { String srcName = nameSerializer.deserialize(in); int noOfTransitions = in.readInt(); for (int j = 0; j < noOfTransitions; j++) { String src = nameSerializer.deserialize(in); Preconditions.checkState(src.equals(srcName), "Source Edge names do not match (" + srcName + " - " + src + ")."); nameSerializer.deserialize(in); actionSerializer.deserialize(in); try { skipCondition(in); } catch (ClassNotFoundException e) { e.printStackTrace(); } } } }
Example 5
Source File: RocksDBListState.java From flink with Apache License 2.0 | 5 votes |
private static <V> V deserializeNextElement(DataInputDeserializer in, TypeSerializer<V> elementSerializer) { try { if (in.available() > 0) { V element = elementSerializer.deserialize(in); if (in.available() > 0) { in.readByte(); } return element; } } catch (IOException e) { throw new FlinkRuntimeException("Unexpected list element deserialization failure", e); } return null; }
Example 6
Source File: KeyGroupStream.java From stateful-functions with Apache License 2.0 | 5 votes |
static <T> void readFrom( DataInputView source, TypeSerializer<T> serializer, FeedbackConsumer<T> consumer) throws Exception { final int elementCount = source.readInt(); for (int i = 0; i < elementCount; i++) { T envelope = serializer.deserialize(source); consumer.processFeedback(envelope); } }
Example 7
Source File: SharedBuffer.java From flink with Apache License 2.0 | 5 votes |
public static <V> ValueTimeWrapper<V> deserialize( final TypeSerializer<V> valueSerializer, final DataInputView source) throws IOException { final V value = valueSerializer.deserialize(source); final long timestamp = source.readLong(); final int counter = source.readInt(); return new ValueTimeWrapper<>(value, timestamp, counter); }
Example 8
Source File: MigrationUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Skips bytes corresponding to serialized states. In flink 1.6+ the states are no longer kept in state. */ static void skipSerializedStates(DataInputView in) throws IOException { TypeSerializer<String> nameSerializer = StringSerializer.INSTANCE; TypeSerializer<State.StateType> stateTypeSerializer = new EnumSerializer<>(State.StateType.class); TypeSerializer<StateTransitionAction> actionSerializer = new EnumSerializer<>(StateTransitionAction.class); final int noOfStates = in.readInt(); for (int i = 0; i < noOfStates; i++) { nameSerializer.deserialize(in); stateTypeSerializer.deserialize(in); } for (int i = 0; i < noOfStates; i++) { String srcName = nameSerializer.deserialize(in); int noOfTransitions = in.readInt(); for (int j = 0; j < noOfTransitions; j++) { String src = nameSerializer.deserialize(in); Preconditions.checkState(src.equals(srcName), "Source Edge names do not match (" + srcName + " - " + src + ")."); nameSerializer.deserialize(in); actionSerializer.deserialize(in); try { skipCondition(in); } catch (ClassNotFoundException e) { e.printStackTrace(); } } } }
Example 9
Source File: MigrationUtils.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
static <T> Queue<ComputationState> deserializeComputationStates( org.apache.flink.cep.nfa.SharedBuffer<T> sharedBuffer, TypeSerializer<T> eventSerializer, DataInputView source) throws IOException { Queue<ComputationState> computationStates = new LinkedList<>(); StringSerializer stateNameSerializer = StringSerializer.INSTANCE; LongSerializer timestampSerializer = LongSerializer.INSTANCE; DeweyNumber.DeweyNumberSerializer versionSerializer = DeweyNumber.DeweyNumberSerializer.INSTANCE; int computationStateNo = source.readInt(); for (int i = 0; i < computationStateNo; i++) { String state = stateNameSerializer.deserialize(source); String prevState = stateNameSerializer.deserialize(source); long timestamp = timestampSerializer.deserialize(source); DeweyNumber version = versionSerializer.deserialize(source); long startTimestamp = timestampSerializer.deserialize(source); int counter = source.readInt(); T event = null; if (source.readBoolean()) { event = eventSerializer.deserialize(source); } NodeId nodeId; EventId startEventId; if (prevState != null) { nodeId = sharedBuffer.getNodeId(prevState, timestamp, counter, event); startEventId = sharedBuffer.getStartEventId(version.getRun()); } else { nodeId = null; startEventId = null; } computationStates.add(ComputationState.createState(state, nodeId, version, startTimestamp, startEventId)); } return computationStates; }
Example 10
Source File: MigrationUtils.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Skips bytes corresponding to serialized states. In flink 1.6+ the states are no longer kept in state. */ static void skipSerializedStates(DataInputView in) throws IOException { TypeSerializer<String> nameSerializer = StringSerializer.INSTANCE; TypeSerializer<State.StateType> stateTypeSerializer = new EnumSerializer<>(State.StateType.class); TypeSerializer<StateTransitionAction> actionSerializer = new EnumSerializer<>(StateTransitionAction.class); final int noOfStates = in.readInt(); for (int i = 0; i < noOfStates; i++) { nameSerializer.deserialize(in); stateTypeSerializer.deserialize(in); } for (int i = 0; i < noOfStates; i++) { String srcName = nameSerializer.deserialize(in); int noOfTransitions = in.readInt(); for (int j = 0; j < noOfTransitions; j++) { String src = nameSerializer.deserialize(in); Preconditions.checkState(src.equals(srcName), "Source Edge names do not match (" + srcName + " - " + src + ")."); nameSerializer.deserialize(in); actionSerializer.deserialize(in); try { skipCondition(in); } catch (ClassNotFoundException e) { e.printStackTrace(); } } } }
Example 11
Source File: InstantiationUtil.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static <T> T deserializeFromByteArray(TypeSerializer<T> serializer, T reuse, byte[] buf) throws IOException { if (buf == null) { throw new NullPointerException("Byte array to deserialize from must not be null."); } DataInputViewStreamWrapper inputViewWrapper = new DataInputViewStreamWrapper(new ByteArrayInputStream(buf)); return serializer.deserialize(reuse, inputViewWrapper); }
Example 12
Source File: StateDescriptor.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException { // read the non-transient fields in.defaultReadObject(); // read the default value field boolean hasDefaultValue = in.readBoolean(); if (hasDefaultValue) { TypeSerializer<T> serializer = serializerAtomicReference.get(); checkNotNull(serializer, "Serializer not initialized."); int size = in.readInt(); byte[] buffer = new byte[size]; in.readFully(buffer); try (ByteArrayInputStream bais = new ByteArrayInputStream(buffer); DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(bais)) { defaultValue = serializer.deserialize(inView); } catch (Exception e) { throw new IOException("Unable to deserialize default value.", e); } } else { defaultValue = null; } }
Example 13
Source File: SerializedListAccumulator.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public static <T> List<T> deserializeList(ArrayList<byte[]> data, TypeSerializer<T> serializer) throws IOException, ClassNotFoundException { List<T> result = new ArrayList<T>(data.size()); for (byte[] bytes : data) { ByteArrayInputStream inStream = new ByteArrayInputStream(bytes); DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(inStream); T val = serializer.deserialize(in); result.add(val); } return result; }
Example 14
Source File: RocksDBUtils.java From bravo with Apache License 2.0 | 5 votes |
public static <K> K readKey( TypeSerializer<K> keySerializer, ByteArrayInputStreamWithPos inputStream, DataInputView inputView, boolean ambiguousKeyPossible) throws IOException { int beforeRead = inputStream.getPosition(); K key = keySerializer.deserialize(inputView); if (ambiguousKeyPossible) { int length = inputStream.getPosition() - beforeRead; readVariableIntBytes(inputView, length); } return key; }
Example 15
Source File: AvroSerializerSnapshotTest.java From flink with Apache License 2.0 | 4 votes |
private static <T> T deserialize(TypeSerializer<T> serializer, ByteBuffer serializedRecord) throws IOException { DataInputView in = new DataInputDeserializer(serializedRecord); return serializer.deserialize(in); }
Example 16
Source File: ChannelViewsTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testWriteAndReadLongRecords() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_LONG; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_LONG; i++) { generator.next(rec); serializer.deserialize(readRec, inView); final int k1 = rec.f0; final String v1 = rec.f1; final int k2 = readRec.f0; final String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); }
Example 17
Source File: ChannelViewsTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testReadTooMany() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them try { final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT + 1; i++) { generator.next(rec); serializer.deserialize(readRec, inView); final int k1 = rec.f0; final String v1 = rec.f1; final int k2 = readRec.f0; final String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } Assert.fail("Expected an EOFException which did not occur."); } catch (EOFException eofex) { // expected } catch (Throwable t) { // unexpected Assert.fail("Unexpected Exception: " + t.getMessage()); } this.memoryManager.release(inView.close()); reader.deleteChannel(); }
Example 18
Source File: ChannelViewsTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testReadTooMany() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them try { final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT + 1; i++) { generator.next(rec); serializer.deserialize(readRec, inView); final int k1 = rec.f0; final String v1 = rec.f1; final int k2 = readRec.f0; final String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } Assert.fail("Expected an EOFException which did not occur."); } catch (EOFException eofex) { // expected } catch (Throwable t) { // unexpected Assert.fail("Unexpected Exception: " + t.getMessage()); } this.memoryManager.release(inView.close()); reader.deleteChannel(); }
Example 19
Source File: ChannelViewsTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testWriteReadSmallRecords() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.deserialize(readRec, inView); int k1 = rec.f0; String v1 = rec.f1; int k2 = readRec.f0; String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); }
Example 20
Source File: ChannelViewsTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testWriteAndReadLongRecords() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_LONG; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_LONG; i++) { generator.next(rec); serializer.deserialize(readRec, inView); final int k1 = rec.f0; final String v1 = rec.f1; final int k2 = readRec.f0; final String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); }