Java Code Examples for org.apache.flink.util.InstantiationUtil#serializeObject()

The following examples show how to use org.apache.flink.util.InstantiationUtil#serializeObject() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FileCacheReadsFromBlobTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testFileDownloadedFromBlob() throws Exception {
	JobID jobID = new JobID();
	ExecutionAttemptID attemptID = new ExecutionAttemptID();

	final String fileName = "test_file";
	// copy / create the file
	final DistributedCache.DistributedCacheEntry entry = new DistributedCache.DistributedCacheEntry(
		fileName,
		false,
		InstantiationUtil.serializeObject(permanentBlobKey));
	Future<Path> copyResult = fileCache.createTmpFile(fileName, entry, jobID, attemptID);

	final Path dstPath = copyResult.get();
	final String actualContent = Files.toString(new File(dstPath.toUri()), StandardCharsets.UTF_8);
	assertTrue(dstPath.getFileSystem().exists(dstPath));
	assertEquals(testFileContent, actualContent);
}
 
Example 2
Source File: FileCacheReadsFromBlobTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testFileDownloadedFromBlob() throws Exception {
	JobID jobID = new JobID();
	ExecutionAttemptID attemptID = new ExecutionAttemptID();

	final String fileName = "test_file";
	// copy / create the file
	final DistributedCache.DistributedCacheEntry entry = new DistributedCache.DistributedCacheEntry(
		fileName,
		false,
		InstantiationUtil.serializeObject(permanentBlobKey));
	Future<Path> copyResult = fileCache.createTmpFile(fileName, entry, jobID, attemptID);

	final Path dstPath = copyResult.get();
	final String actualContent = Files.toString(new File(dstPath.toUri()), StandardCharsets.UTF_8);
	assertTrue(dstPath.getFileSystem().exists(dstPath));
	assertEquals(testFileContent, actualContent);
}
 
Example 3
Source File: BaseRowSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void writeSnapshot(DataOutputView out) throws IOException {
	out.writeInt(previousTypes.length);
	DataOutputViewStream stream = new DataOutputViewStream(out);
	for (LogicalType previousType : previousTypes) {
		InstantiationUtil.serializeObject(stream, previousType);
	}
	nestedSerializersSnapshotDelegate.writeNestedSerializerSnapshots(out);
}
 
Example 4
Source File: CheckpointCoordinatorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static Tuple2<byte[], List<long[]>> serializeTogetherAndTrackOffsets(
		List<List<? extends Serializable>> serializables) throws IOException {

	List<long[]> offsets = new ArrayList<>(serializables.size());
	List<byte[]> serializedGroupValues = new ArrayList<>();

	int runningGroupsOffset = 0;
	for(List<? extends Serializable> list : serializables) {

		long[] currentOffsets = new long[list.size()];
		offsets.add(currentOffsets);

		for (int i = 0; i < list.size(); ++i) {
			currentOffsets[i] = runningGroupsOffset;
			byte[] serializedValue = InstantiationUtil.serializeObject(list.get(i));
			serializedGroupValues.add(serializedValue);
			runningGroupsOffset += serializedValue.length;
		}
	}

	//write all generated values in a single byte array, which is index by groupOffsetsInFinalByteArray
	byte[] allSerializedValuesConcatenated = new byte[runningGroupsOffset];
	runningGroupsOffset = 0;
	for (byte[] serializedGroupValue : serializedGroupValues) {
		System.arraycopy(
				serializedGroupValue,
				0,
				allSerializedValuesConcatenated,
				runningGroupsOffset,
				serializedGroupValue.length);
		runningGroupsOffset += serializedGroupValue.length;
	}
	return new Tuple2<>(allSerializedValuesConcatenated, offsets);
}
 
Example 5
Source File: ZooKeeperStateHandleStore.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Replaces a state handle in ZooKeeper and discards the old state handle.
 *
 * @param pathInZooKeeper Destination path in ZooKeeper (expected to exist and start with a '/')
 * @param expectedVersion Expected version of the node to replace
 * @param state           The new state to replace the old one
 * @throws Exception If a ZooKeeper or state handle operation fails
 */
public void replace(String pathInZooKeeper, int expectedVersion, T state) throws Exception {
	checkNotNull(pathInZooKeeper, "Path in ZooKeeper");
	checkNotNull(state, "State");

	final String path = normalizePath(pathInZooKeeper);

	RetrievableStateHandle<T> oldStateHandle = get(path, false);

	RetrievableStateHandle<T> newStateHandle = storage.store(state);

	boolean success = false;

	try {
		// Serialize the new state handle. This writes the state to the backend.
		byte[] serializedStateHandle = InstantiationUtil.serializeObject(newStateHandle);

		// Replace state handle in ZooKeeper.
		client.setData()
				.withVersion(expectedVersion)
				.forPath(path, serializedStateHandle);
		success = true;
	} catch (KeeperException.NoNodeException e) {
		throw new ConcurrentModificationException("ZooKeeper unexpectedly modified", e);
	} finally {
		if (success) {
			oldStateHandle.discardState();
		} else {
			newStateHandle.discardState();
		}
	}

}
 
Example 6
Source File: ZooKeeperStateHandleStore.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Replaces a state handle in ZooKeeper and discards the old state handle.
 *
 * @param pathInZooKeeper Destination path in ZooKeeper (expected to exist and start with a '/')
 * @param expectedVersion Expected version of the node to replace
 * @param state           The new state to replace the old one
 * @throws Exception If a ZooKeeper or state handle operation fails
 */
public void replace(String pathInZooKeeper, int expectedVersion, T state) throws Exception {
	checkNotNull(pathInZooKeeper, "Path in ZooKeeper");
	checkNotNull(state, "State");

	final String path = normalizePath(pathInZooKeeper);

	RetrievableStateHandle<T> oldStateHandle = get(path, false);

	RetrievableStateHandle<T> newStateHandle = storage.store(state);

	boolean success = false;

	try {
		// Serialize the new state handle. This writes the state to the backend.
		byte[] serializedStateHandle = InstantiationUtil.serializeObject(newStateHandle);

		// Replace state handle in ZooKeeper.
		client.setData()
				.withVersion(expectedVersion)
				.forPath(path, serializedStateHandle);
		success = true;
	} catch (KeeperException.NoNodeException e) {
		throw new ConcurrentModificationException("ZooKeeper unexpectedly modified", e);
	} finally {
		if (success) {
			oldStateHandle.discardState();
		} else {
			newStateHandle.discardState();
		}
	}

}
 
Example 7
Source File: EncodingUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static String encodeObjectToString(Serializable obj) {
	try {
		final byte[] bytes = InstantiationUtil.serializeObject(obj);
		return new String(BASE64_ENCODER.encode(bytes), UTF_8);
	} catch (Exception e) {
		throw new ValidationException(
			"Unable to serialize object '" + obj.toString() + "' of class '" + obj.getClass().getName() + "'.");
	}
}
 
Example 8
Source File: ZooKeeperStateHandleStore.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Replaces a state handle in ZooKeeper and discards the old state handle.
 *
 * @param pathInZooKeeper Destination path in ZooKeeper (expected to exist and start with a '/')
 * @param expectedVersion Expected version of the node to replace
 * @param state           The new state to replace the old one
 * @throws Exception If a ZooKeeper or state handle operation fails
 */
public void replace(String pathInZooKeeper, int expectedVersion, T state) throws Exception {
	checkNotNull(pathInZooKeeper, "Path in ZooKeeper");
	checkNotNull(state, "State");

	final String path = normalizePath(pathInZooKeeper);

	RetrievableStateHandle<T> oldStateHandle = get(path, false);

	RetrievableStateHandle<T> newStateHandle = storage.store(state);

	boolean success = false;

	try {
		// Serialize the new state handle. This writes the state to the backend.
		byte[] serializedStateHandle = InstantiationUtil.serializeObject(newStateHandle);

		// Replace state handle in ZooKeeper.
		client.setData()
				.withVersion(expectedVersion)
				.forPath(path, serializedStateHandle);
		success = true;
	} catch (KeeperException.NoNodeException e) {
		throw new ConcurrentModificationException("ZooKeeper unexpectedly modified", e);
	} finally {
		if (success) {
			oldStateHandle.discardState();
		} else {
			newStateHandle.discardState();
		}
	}

}
 
Example 9
Source File: JobGraph.java    From flink with Apache License 2.0 5 votes vote down vote up
public void setUserArtifactBlobKey(String entryName, PermanentBlobKey blobKey) throws IOException {
	byte[] serializedBlobKey;
	serializedBlobKey = InstantiationUtil.serializeObject(blobKey);

	userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry(
		originalEntry.filePath,
		originalEntry.isExecutable,
		serializedBlobKey,
		originalEntry.isZipped
	));
}
 
Example 10
Source File: RowDataSerializer.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void writeSnapshot(DataOutputView out) throws IOException {
	out.writeInt(previousTypes.length);
	DataOutputViewStream stream = new DataOutputViewStream(out);
	for (LogicalType previousType : previousTypes) {
		InstantiationUtil.serializeObject(stream, previousType);
	}
	nestedSerializersSnapshotDelegate.writeNestedSerializerSnapshots(out);
}
 
Example 11
Source File: CheckpointCoordinatorTestingUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
static Tuple2<byte[], List<long[]>> serializeTogetherAndTrackOffsets(
	List<List<? extends Serializable>> serializables) throws IOException {

	List<long[]> offsets = new ArrayList<>(serializables.size());
	List<byte[]> serializedGroupValues = new ArrayList<>();

	int runningGroupsOffset = 0;
	for (List<? extends Serializable> list : serializables) {

		long[] currentOffsets = new long[list.size()];
		offsets.add(currentOffsets);

		for (int i = 0; i < list.size(); ++i) {
			currentOffsets[i] = runningGroupsOffset;
			byte[] serializedValue = InstantiationUtil.serializeObject(list.get(i));
			serializedGroupValues.add(serializedValue);
			runningGroupsOffset += serializedValue.length;
		}
	}

	//write all generated values in a single byte array, which is index by groupOffsetsInFinalByteArray
	byte[] allSerializedValuesConcatenated = new byte[runningGroupsOffset];
	runningGroupsOffset = 0;
	for (byte[] serializedGroupValue : serializedGroupValues) {
		System.arraycopy(
			serializedGroupValue,
			0,
			allSerializedValuesConcatenated,
			runningGroupsOffset,
			serializedGroupValue.length);
		runningGroupsOffset += serializedGroupValue.length;
	}
	return new Tuple2<>(allSerializedValuesConcatenated, offsets);
}
 
Example 12
Source File: JobGraph.java    From flink with Apache License 2.0 5 votes vote down vote up
public void setUserArtifactBlobKey(String entryName, PermanentBlobKey blobKey) throws IOException {
	byte[] serializedBlobKey;
	serializedBlobKey = InstantiationUtil.serializeObject(blobKey);

	userArtifacts.computeIfPresent(entryName, (key, originalEntry) -> new DistributedCache.DistributedCacheEntry(
		originalEntry.filePath,
		originalEntry.isExecutable,
		serializedBlobKey,
		originalEntry.isZipped
	));
}
 
Example 13
Source File: AbstractParameterToolTest.java    From flink with Apache License 2.0 5 votes vote down vote up
protected void validate(AbstractParameterTool parameter) {
	ClosureCleaner.ensureSerializable(parameter);
	internalValidate(parameter);

	// -------- test behaviour after serialization ------------
	try {
		byte[] b = InstantiationUtil.serializeObject(parameter);
		final AbstractParameterTool copy = InstantiationUtil.deserializeObject(b, getClass().getClassLoader());
		internalValidate(copy);
	} catch (IOException | ClassNotFoundException e) {
		throw new RuntimeException(e);
	}
}
 
Example 14
Source File: SortedMapSerializerSnapshot.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void writeSnapshot(DataOutputView out) throws IOException {
	checkState(comparator != null, "Comparator cannot be null.");
	InstantiationUtil.serializeObject(new DataOutputViewStream(out), comparator);
	nestedSerializersSnapshotDelegate.writeNestedSerializerSnapshots(out);
}
 
Example 15
Source File: JobMasterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests the updateGlobalAggregate functionality
 */
@Test
public void testJobMasterAggregatesValuesCorrectly() throws Exception {
	final JobMaster jobMaster = createJobMaster(
		configuration,
		jobGraph,
		haServices,
		new TestingJobManagerSharedServicesBuilder().build(),
		heartbeatServices);

	CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId);
	final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class);

	try {
		// wait for the start to complete
		startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS);

		CompletableFuture<Object> updateAggregateFuture;

		AggregateFunction<Integer, Integer, Integer> aggregateFunction = createAggregateFunction();

		ClosureCleaner.clean(aggregateFunction, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
		byte[] serializedAggregateFunction = InstantiationUtil.serializeObject(aggregateFunction);

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 1, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(1));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 2, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(3));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 3, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(6));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 4, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(10));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 10, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(10));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 23, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(33));

	} finally {
		RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout);
	}
}
 
Example 16
Source File: ArrayDataSerializer.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void writeSnapshot(DataOutputView out) throws IOException {
	DataOutputViewStream outStream = new DataOutputViewStream(out);
	InstantiationUtil.serializeObject(outStream, previousType);
	InstantiationUtil.serializeObject(outStream, previousEleSer);
}
 
Example 17
Source File: ArrayDataSerializer.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void writeSnapshot(DataOutputView out) throws IOException {
	DataOutputViewStream outStream = new DataOutputViewStream(out);
	InstantiationUtil.serializeObject(outStream, previousType);
	InstantiationUtil.serializeObject(outStream, previousEleSer);
}
 
Example 18
Source File: JobMasterTest.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Tests the updateGlobalAggregate functionality.
 */
@Test
public void testJobMasterAggregatesValuesCorrectly() throws Exception {
	final JobMaster jobMaster = createJobMaster(
		configuration,
		jobGraph,
		haServices,
		new TestingJobManagerSharedServicesBuilder().build(),
		heartbeatServices);

	CompletableFuture<Acknowledge> startFuture = jobMaster.start(jobMasterId);
	final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class);

	try {
		// wait for the start to complete
		startFuture.get(testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS);

		CompletableFuture<Object> updateAggregateFuture;

		AggregateFunction<Integer, Integer, Integer> aggregateFunction = createAggregateFunction();

		ClosureCleaner.clean(aggregateFunction, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
		byte[] serializedAggregateFunction = InstantiationUtil.serializeObject(aggregateFunction);

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 1, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(1));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 2, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(3));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 3, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(6));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 4, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(10));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 10, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(10));

		updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 23, serializedAggregateFunction);
		assertThat(updateAggregateFuture.get(), equalTo(33));

	} finally {
		RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout);
	}
}
 
Example 19
Source File: MockSplitEnumeratorCheckpointSerializer.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public byte[] serialize(Set<MockSourceSplit> obj) throws IOException {
	return InstantiationUtil.serializeObject(new ArrayList<>(obj));
}
 
Example 20
Source File: BlobServerConnection.java    From Flink-CEPplus with Apache License 2.0 3 votes vote down vote up
/**
 * Writes to the output stream the error return code, and the given exception in serialized form.
 *
 * @param out Thr output stream to write to.
 * @param t The exception to send.
 * @throws IOException Thrown, if the output stream could not be written to.
 */
private static void writeErrorToStream(OutputStream out, Throwable t) throws IOException {
	byte[] bytes = InstantiationUtil.serializeObject(t);
	out.write(RETURN_ERROR);
	writeLength(bytes.length, out);
	out.write(bytes);
}