Java Code Examples for org.apache.flink.util.IOUtils#copyBytes()

The following examples show how to use org.apache.flink.util.IOUtils#copyBytes() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BlobClient.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Uploads data from the given input stream to the BLOB server.
 *
 * @param jobId
 * 		the ID of the job the BLOB belongs to (or <tt>null</tt> if job-unrelated)
 * @param inputStream
 * 		the input stream to read the data from
 * @param blobType
 * 		whether the BLOB should become permanent or transient
 *
 * @return the computed BLOB key of the uploaded BLOB
 *
 * @throws IOException
 * 		thrown if an I/O error occurs while uploading the data to the BLOB server
 */
BlobKey putInputStream(@Nullable JobID jobId, InputStream inputStream, BlobKey.BlobType blobType)
		throws IOException {

	if (this.socket.isClosed()) {
		throw new IllegalStateException("BLOB Client is not connected. " +
				"Client has been shut down or encountered an error before.");
	}
	checkNotNull(inputStream);

	if (LOG.isDebugEnabled()) {
		LOG.debug("PUT BLOB stream to {}.", socket.getLocalSocketAddress());
	}

	try (BlobOutputStream os = new BlobOutputStream(jobId, blobType, socket)) {
		IOUtils.copyBytes(inputStream, os, BUFFER_SIZE, false);
		return os.finish();
	} catch (Throwable t) {
		BlobUtils.closeSilently(socket, LOG);
		throw new IOException("PUT operation failed: " + t.getMessage(), t);
	}
}
 
Example 2
Source File: FsJobArchivist.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Reads the given archive file and returns a {@link Collection} of contained {@link ArchivedJson}.
 *
 * @param file archive to extract
 * @return collection of archived jsons
 * @throws IOException if the file can't be opened, read or doesn't contain valid json
 */
public static Collection<ArchivedJson> getArchivedJsons(Path file) throws IOException {
	try (FSDataInputStream input = file.getFileSystem().open(file);
		ByteArrayOutputStream output = new ByteArrayOutputStream()) {
		IOUtils.copyBytes(input, output);

		JsonNode archive = mapper.readTree(output.toByteArray());

		Collection<ArchivedJson> archives = new ArrayList<>();
		for (JsonNode archivePart : archive.get(ARCHIVE)) {
			String path = archivePart.get(PATH).asText();
			String json = archivePart.get(JSON).asText();
			archives.add(new ArchivedJson(path, json));
		}
		return archives;
	}
}
 
Example 3
Source File: BlobClient.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Uploads data from the given input stream to the BLOB server.
 *
 * @param jobId
 * 		the ID of the job the BLOB belongs to (or <tt>null</tt> if job-unrelated)
 * @param inputStream
 * 		the input stream to read the data from
 * @param blobType
 * 		whether the BLOB should become permanent or transient
 *
 * @return the computed BLOB key of the uploaded BLOB
 *
 * @throws IOException
 * 		thrown if an I/O error occurs while uploading the data to the BLOB server
 */
BlobKey putInputStream(@Nullable JobID jobId, InputStream inputStream, BlobKey.BlobType blobType)
		throws IOException {

	if (this.socket.isClosed()) {
		throw new IllegalStateException("BLOB Client is not connected. " +
				"Client has been shut down or encountered an error before.");
	}
	checkNotNull(inputStream);

	if (LOG.isDebugEnabled()) {
		LOG.debug("PUT BLOB stream to {}.", socket.getLocalSocketAddress());
	}

	try (BlobOutputStream os = new BlobOutputStream(jobId, blobType, socket)) {
		IOUtils.copyBytes(inputStream, os, BUFFER_SIZE, false);
		return os.finish();
	} catch (Throwable t) {
		BlobUtils.closeSilently(socket, LOG);
		throw new IOException("PUT operation failed: " + t.getMessage(), t);
	}
}
 
Example 4
Source File: FsJobArchivist.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Reads the given archive file and returns a {@link Collection} of contained {@link ArchivedJson}.
 *
 * @param file archive to extract
 * @return collection of archived jsons
 * @throws IOException if the file can't be opened, read or doesn't contain valid json
 */
public static Collection<ArchivedJson> getArchivedJsons(Path file) throws IOException {
	try (FSDataInputStream input = file.getFileSystem().open(file);
		ByteArrayOutputStream output = new ByteArrayOutputStream()) {
		IOUtils.copyBytes(input, output);

		JsonNode archive = mapper.readTree(output.toByteArray());

		Collection<ArchivedJson> archives = new ArrayList<>();
		for (JsonNode archivePart : archive.get(ARCHIVE)) {
			String path = archivePart.get(PATH).asText();
			String json = archivePart.get(JSON).asText();
			archives.add(new ArchivedJson(path, json));
		}
		return archives;
	}
}
 
Example 5
Source File: BlobClient.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Uploads data from the given input stream to the BLOB server.
 *
 * @param jobId
 * 		the ID of the job the BLOB belongs to (or <tt>null</tt> if job-unrelated)
 * @param inputStream
 * 		the input stream to read the data from
 * @param blobType
 * 		whether the BLOB should become permanent or transient
 *
 * @return the computed BLOB key of the uploaded BLOB
 *
 * @throws IOException
 * 		thrown if an I/O error occurs while uploading the data to the BLOB server
 */
BlobKey putInputStream(@Nullable JobID jobId, InputStream inputStream, BlobKey.BlobType blobType)
		throws IOException {

	if (this.socket.isClosed()) {
		throw new IllegalStateException("BLOB Client is not connected. " +
				"Client has been shut down or encountered an error before.");
	}
	checkNotNull(inputStream);

	if (LOG.isDebugEnabled()) {
		LOG.debug("PUT BLOB stream to {}.", socket.getLocalSocketAddress());
	}

	try (BlobOutputStream os = new BlobOutputStream(jobId, blobType, socket)) {
		IOUtils.copyBytes(inputStream, os, BUFFER_SIZE, false);
		return os.finish();
	} catch (Throwable t) {
		BlobUtils.closeSilently(socket, LOG);
		throw new IOException("PUT operation failed: " + t.getMessage(), t);
	}
}
 
Example 6
Source File: FsJobArchivist.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Reads the given archive file and returns a {@link Collection} of contained {@link ArchivedJson}.
 *
 * @param file archive to extract
 * @return collection of archived jsons
 * @throws IOException if the file can't be opened, read or doesn't contain valid json
 */
public static Collection<ArchivedJson> getArchivedJsons(Path file) throws IOException {
	try (FSDataInputStream input = file.getFileSystem().open(file);
		ByteArrayOutputStream output = new ByteArrayOutputStream()) {
		IOUtils.copyBytes(input, output);

		try {
			JsonNode archive = mapper.readTree(output.toByteArray());

			Collection<ArchivedJson> archives = new ArrayList<>();
			for (JsonNode archivePart : archive.get(ARCHIVE)) {
				String path = archivePart.get(PATH).asText();
				String json = archivePart.get(JSON).asText();
				archives.add(new ArchivedJson(path, json));
			}
			return archives;
		} catch (NullPointerException npe) {
			// occurs if the archive is empty or any of the expected fields are not present
			throw new IOException("Job archive (" + file.getPath() + ") did not conform to expected format.");
		}
	}
}
 
Example 7
Source File: PythonPlanBinder.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void unzipPythonLibrary(Path targetDir) throws IOException {
	FileSystem targetFs = targetDir.getFileSystem();
	ClassLoader classLoader = PythonPlanBinder.class.getClassLoader();
	try (ZipInputStream zis = new ZipInputStream(classLoader.getResourceAsStream("python-source.zip"))) {
		ZipEntry entry = zis.getNextEntry();
		while (entry != null) {
			String fileName = entry.getName();
			Path newFile = new Path(targetDir, fileName);
			if (entry.isDirectory()) {
				targetFs.mkdirs(newFile);
			} else {
				try {
					LOG.debug("Unzipping to {}.", newFile);
					FSDataOutputStream fsDataOutputStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE);
					IOUtils.copyBytes(zis, fsDataOutputStream, false);
				} catch (Exception e) {
					zis.closeEntry();
					throw new IOException("Failed to unzip flink python library.", e);
				}
			}

			zis.closeEntry();
			entry = zis.getNextEntry();
		}
		zis.closeEntry();
	}
}
 
Example 8
Source File: OrcColumnarRowSplitReaderTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private String getPath(String fileName) {
	try {
		File file = TEMPORARY_FOLDER.newFile();
		IOUtils.copyBytes(
				getClass().getClassLoader().getResource(fileName).openStream(),
				new FileOutputStream(file),
				true);
		return file.getPath();
	} catch (IOException e) {
		throw new RuntimeException(e);
	}
}
 
Example 9
Source File: ClassLoaderUtilsTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
private static void copyClassFileToJar(final Class<?> clazz, final JarOutputStream jarOutputStream) throws IOException {
	try (InputStream classInputStream = clazz.getResourceAsStream(clazz.getSimpleName() + ".class")) {
		IOUtils.copyBytes(classInputStream, jarOutputStream, 128, false);
	}
	jarOutputStream.closeEntry();
}
 
Example 10
Source File: ClassLoaderUtilsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private static void copyClassFileToJar(final Class<?> clazz, final JarOutputStream jarOutputStream) throws IOException {
	try (InputStream classInputStream = clazz.getResourceAsStream(clazz.getSimpleName() + ".class")) {
		IOUtils.copyBytes(classInputStream, jarOutputStream, 128, false);
	}
	jarOutputStream.closeEntry();
}
 
Example 11
Source File: ClassLoaderUtilsTest.java    From flink with Apache License 2.0 4 votes vote down vote up
private static void copyClassFileToJar(final Class<?> clazz, final JarOutputStream jarOutputStream) throws IOException {
	try (InputStream classInputStream = clazz.getResourceAsStream(clazz.getSimpleName() + ".class")) {
		IOUtils.copyBytes(classInputStream, jarOutputStream, 128, false);
	}
	jarOutputStream.closeEntry();
}