Java Code Examples for org.apache.flink.core.fs.FileSystem#mkdirs()

The following examples show how to use org.apache.flink.core.fs.FileSystem#mkdirs() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DistributedCacheDfsTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setup() throws Exception {
	File dataDir = TEMP_FOLDER.newFolder();

	conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dataDir.getAbsolutePath());
	MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
	hdfsCluster = builder.build();

	String hdfsURI = "hdfs://"
		+ NetUtils.hostAndPortToUrlString(hdfsCluster.getURI().getHost(), hdfsCluster.getNameNodePort())
		+ "/";

	FileSystem dfs = FileSystem.get(new URI(hdfsURI));
	testFile = writeFile(dfs, dfs.getHomeDirectory(), "testFile");

	testDir = new Path(dfs.getHomeDirectory(), "testDir");
	dfs.mkdirs(testDir);
	writeFile(dfs, testDir, "testFile1");
	writeFile(dfs, testDir, "testFile2");
}
 
Example 2
Source File: DistributedCacheDfsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setup() throws Exception {
	File dataDir = TEMP_FOLDER.newFolder();

	conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dataDir.getAbsolutePath());
	MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
	hdfsCluster = builder.build();

	String hdfsURI = "hdfs://"
		+ NetUtils.hostAndPortToUrlString(hdfsCluster.getURI().getHost(), hdfsCluster.getNameNodePort())
		+ "/";

	FileSystem dfs = FileSystem.get(new URI(hdfsURI));
	testFile = writeFile(dfs, dfs.getHomeDirectory(), "testFile");

	testDir = new Path(dfs.getHomeDirectory(), "testDir");
	dfs.mkdirs(testDir);
	writeFile(dfs, testDir, "testFile1");
	writeFile(dfs, testDir, "testFile2");
}
 
Example 3
Source File: DistributedCacheDfsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setup() throws Exception {
	File dataDir = TEMP_FOLDER.newFolder();

	conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dataDir.getAbsolutePath());
	MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
	hdfsCluster = builder.build();

	String hdfsURI = "hdfs://"
		+ NetUtils.hostAndPortToUrlString(hdfsCluster.getURI().getHost(), hdfsCluster.getNameNodePort())
		+ "/";

	FileSystem dfs = FileSystem.get(new URI(hdfsURI));
	testFile = writeFile(dfs, dfs.getHomeDirectory(), "testFile");

	testDir = new Path(dfs.getHomeDirectory(), "testDir");
	dfs.mkdirs(testDir);
	writeFile(dfs, testDir, "testFile1");
	writeFile(dfs, testDir, "testFile2");
}
 
Example 4
Source File: FileUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static Path expandDirectory(Path file, Path targetDirectory) throws IOException {
	FileSystem sourceFs = file.getFileSystem();
	FileSystem targetFs = targetDirectory.getFileSystem();
	Path rootDir = null;
	try (ZipInputStream zis = new ZipInputStream(sourceFs.open(file))) {
		ZipEntry entry;
		while ((entry = zis.getNextEntry()) != null) {
			Path relativePath = new Path(entry.getName());
			if (rootDir == null) {
				// the first entry contains the name of the original directory that was zipped
				rootDir = relativePath;
			}

			Path newFile = new Path(targetDirectory, relativePath);
			if (entry.isDirectory()) {
				targetFs.mkdirs(newFile);
			} else {
				try (FSDataOutputStream fileStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE)) {
					// do not close the streams here as it prevents access to further zip entries
					IOUtils.copyBytes(zis, fileStream, false);
				}
			}
			zis.closeEntry();
		}
	}
	return new Path(targetDirectory, rootDir);
}
 
Example 5
Source File: FileSystemBlobStore.java    From flink with Apache License 2.0 5 votes vote down vote up
public FileSystemBlobStore(FileSystem fileSystem, String storagePath) throws IOException {
	this.fileSystem = checkNotNull(fileSystem);
	this.basePath = checkNotNull(storagePath) + "/" + BLOB_PATH_NAME;

	LOG.info("Creating highly available BLOB storage directory at {}", basePath);

	fileSystem.mkdirs(new Path(basePath));
	LOG.debug("Created highly available BLOB storage directory at {}", basePath);
}
 
Example 6
Source File: HDFSTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Test that {@link FileUtils#deletePathIfEmpty(FileSystem, Path)} deletes the path if it is
 * empty. A path can only be empty if it is a directory which does not contain any
 * files/directories.
 */
@Test
public void testDeletePathIfEmpty() throws IOException {
	final Path basePath = new Path(hdfsURI);
	final Path directory = new Path(basePath, UUID.randomUUID().toString());
	final Path directoryFile = new Path(directory, UUID.randomUUID().toString());
	final Path singleFile = new Path(basePath, UUID.randomUUID().toString());

	FileSystem fs = basePath.getFileSystem();

	fs.mkdirs(directory);

	byte[] data = "HDFSTest#testDeletePathIfEmpty".getBytes(ConfigConstants.DEFAULT_CHARSET);

	for (Path file: Arrays.asList(singleFile, directoryFile)) {
		org.apache.flink.core.fs.FSDataOutputStream outputStream = fs.create(file, FileSystem.WriteMode.OVERWRITE);
		outputStream.write(data);
		outputStream.close();
	}

	// verify that the files have been created
	assertTrue(fs.exists(singleFile));
	assertTrue(fs.exists(directoryFile));

	// delete the single file
	assertFalse(FileUtils.deletePathIfEmpty(fs, singleFile));
	assertTrue(fs.exists(singleFile));

	// try to delete the non-empty directory
	assertFalse(FileUtils.deletePathIfEmpty(fs, directory));
	assertTrue(fs.exists(directory));

	// delete the file contained in the directory
	assertTrue(fs.delete(directoryFile, false));

	// now the deletion should work
	assertTrue(FileUtils.deletePathIfEmpty(fs, directory));
	assertFalse(fs.exists(directory));
}
 
Example 7
Source File: FileUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
public static Path expandDirectory(Path file, Path targetDirectory) throws IOException {
	FileSystem sourceFs = file.getFileSystem();
	FileSystem targetFs = targetDirectory.getFileSystem();
	Path rootDir = null;
	try (ZipInputStream zis = new ZipInputStream(sourceFs.open(file))) {
		ZipEntry entry;
		while ((entry = zis.getNextEntry()) != null) {
			Path relativePath = new Path(entry.getName());
			if (rootDir == null) {
				// the first entry contains the name of the original directory that was zipped
				rootDir = relativePath;
			}

			Path newFile = new Path(targetDirectory, relativePath);
			if (entry.isDirectory()) {
				targetFs.mkdirs(newFile);
			} else {
				try (FSDataOutputStream fileStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE)) {
					// do not close the streams here as it prevents access to further zip entries
					IOUtils.copyBytes(zis, fileStream, false);
				}
			}
			zis.closeEntry();
		}
	}
	return new Path(targetDirectory, rootDir);
}
 
Example 8
Source File: FileUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void internalCopyDirectory(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException {
	tFS.mkdirs(targetPath);
	FileStatus[] contents = sFS.listStatus(sourcePath);
	for (FileStatus content : contents) {
		String distPath = content.getPath().toString();
		if (content.isDir()) {
			if (distPath.endsWith("/")) {
				distPath = distPath.substring(0, distPath.length() - 1);
			}
		}
		String localPath = targetPath + distPath.substring(distPath.lastIndexOf("/"));
		copy(content.getPath(), new Path(localPath), executable);
	}
}
 
Example 9
Source File: FileSystemBlobStore.java    From flink with Apache License 2.0 5 votes vote down vote up
public FileSystemBlobStore(FileSystem fileSystem, String storagePath) throws IOException {
	this.fileSystem = checkNotNull(fileSystem);
	this.basePath = checkNotNull(storagePath) + "/blob";

	LOG.info("Creating highly available BLOB storage directory at {}", basePath);

	fileSystem.mkdirs(new Path(basePath));
	LOG.debug("Created highly available BLOB storage directory at {}", basePath);
}
 
Example 10
Source File: HDFSTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Test that {@link FileUtils#deletePathIfEmpty(FileSystem, Path)} deletes the path if it is
 * empty. A path can only be empty if it is a directory which does not contain any
 * files/directories.
 */
@Test
public void testDeletePathIfEmpty() throws IOException {
	final Path basePath = new Path(hdfsURI);
	final Path directory = new Path(basePath, UUID.randomUUID().toString());
	final Path directoryFile = new Path(directory, UUID.randomUUID().toString());
	final Path singleFile = new Path(basePath, UUID.randomUUID().toString());

	FileSystem fs = basePath.getFileSystem();

	fs.mkdirs(directory);

	byte[] data = "HDFSTest#testDeletePathIfEmpty".getBytes(ConfigConstants.DEFAULT_CHARSET);

	for (Path file: Arrays.asList(singleFile, directoryFile)) {
		org.apache.flink.core.fs.FSDataOutputStream outputStream = fs.create(file, FileSystem.WriteMode.OVERWRITE);
		outputStream.write(data);
		outputStream.close();
	}

	// verify that the files have been created
	assertTrue(fs.exists(singleFile));
	assertTrue(fs.exists(directoryFile));

	// delete the single file
	assertFalse(FileUtils.deletePathIfEmpty(fs, singleFile));
	assertTrue(fs.exists(singleFile));

	// try to delete the non-empty directory
	assertFalse(FileUtils.deletePathIfEmpty(fs, directory));
	assertTrue(fs.exists(directory));

	// delete the file contained in the directory
	assertTrue(fs.delete(directoryFile, false));

	// now the deletion should work
	assertTrue(FileUtils.deletePathIfEmpty(fs, directory));
	assertFalse(fs.exists(directory));
}
 
Example 11
Source File: FileUtils.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void internalCopyDirectory(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException {
	tFS.mkdirs(targetPath);
	FileStatus[] contents = sFS.listStatus(sourcePath);
	for (FileStatus content : contents) {
		String distPath = content.getPath().toString();
		if (content.isDir()) {
			if (distPath.endsWith("/")) {
				distPath = distPath.substring(0, distPath.length() - 1);
			}
		}
		String localPath = targetPath + distPath.substring(distPath.lastIndexOf("/"));
		copy(content.getPath(), new Path(localPath), executable);
	}
}
 
Example 12
Source File: FileUtils.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void internalCopyDirectory(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException {
	tFS.mkdirs(targetPath);
	FileStatus[] contents = sFS.listStatus(sourcePath);
	for (FileStatus content : contents) {
		String distPath = content.getPath().toString();
		if (content.isDir()) {
			if (distPath.endsWith("/")) {
				distPath = distPath.substring(0, distPath.length() - 1);
			}
		}
		String localPath = targetPath + distPath.substring(distPath.lastIndexOf("/"));
		copy(content.getPath(), new Path(localPath), executable);
	}
}
 
Example 13
Source File: FileSystemBlobStore.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public FileSystemBlobStore(FileSystem fileSystem, String storagePath) throws IOException {
	this.fileSystem = checkNotNull(fileSystem);
	this.basePath = checkNotNull(storagePath) + "/blob";

	LOG.info("Creating highly available BLOB storage directory at {}", basePath);

	fileSystem.mkdirs(new Path(basePath));
	LOG.debug("Created highly available BLOB storage directory at {}", basePath);
}
 
Example 14
Source File: HDFSTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Test that {@link FileUtils#deletePathIfEmpty(FileSystem, Path)} deletes the path if it is
 * empty. A path can only be empty if it is a directory which does not contain any
 * files/directories.
 */
@Test
public void testDeletePathIfEmpty() throws IOException {
	final Path basePath = new Path(hdfsURI);
	final Path directory = new Path(basePath, UUID.randomUUID().toString());
	final Path directoryFile = new Path(directory, UUID.randomUUID().toString());
	final Path singleFile = new Path(basePath, UUID.randomUUID().toString());

	FileSystem fs = basePath.getFileSystem();

	fs.mkdirs(directory);

	byte[] data = "HDFSTest#testDeletePathIfEmpty".getBytes(ConfigConstants.DEFAULT_CHARSET);

	for (Path file: Arrays.asList(singleFile, directoryFile)) {
		org.apache.flink.core.fs.FSDataOutputStream outputStream = fs.create(file, FileSystem.WriteMode.OVERWRITE);
		outputStream.write(data);
		outputStream.close();
	}

	// verify that the files have been created
	assertTrue(fs.exists(singleFile));
	assertTrue(fs.exists(directoryFile));

	// delete the single file
	assertFalse(FileUtils.deletePathIfEmpty(fs, singleFile));
	assertTrue(fs.exists(singleFile));

	// try to delete the non-empty directory
	assertFalse(FileUtils.deletePathIfEmpty(fs, directory));
	assertTrue(fs.exists(directory));

	// delete the file contained in the directory
	assertTrue(fs.delete(directoryFile, false));

	// now the deletion should work
	assertTrue(FileUtils.deletePathIfEmpty(fs, directory));
	assertFalse(fs.exists(directory));
}
 
Example 15
Source File: PythonPlanBinder.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static void unzipPythonLibrary(Path targetDir) throws IOException {
	FileSystem targetFs = targetDir.getFileSystem();
	ClassLoader classLoader = PythonPlanBinder.class.getClassLoader();
	try (ZipInputStream zis = new ZipInputStream(classLoader.getResourceAsStream("python-source.zip"))) {
		ZipEntry entry = zis.getNextEntry();
		while (entry != null) {
			String fileName = entry.getName();
			Path newFile = new Path(targetDir, fileName);
			if (entry.isDirectory()) {
				targetFs.mkdirs(newFile);
			} else {
				try {
					LOG.debug("Unzipping to {}.", newFile);
					FSDataOutputStream fsDataOutputStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE);
					IOUtils.copyBytes(zis, fsDataOutputStream, false);
				} catch (Exception e) {
					zis.closeEntry();
					throw new IOException("Failed to unzip flink python library.", e);
				}
			}

			zis.closeEntry();
			entry = zis.getNextEntry();
		}
		zis.closeEntry();
	}
}
 
Example 16
Source File: FileUtils.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static Path expandDirectory(Path file, Path targetDirectory) throws IOException {
	FileSystem sourceFs = file.getFileSystem();
	FileSystem targetFs = targetDirectory.getFileSystem();
	Path rootDir = null;
	try (ZipInputStream zis = new ZipInputStream(sourceFs.open(file))) {
		ZipEntry entry;
		while ((entry = zis.getNextEntry()) != null) {
			Path relativePath = new Path(entry.getName());
			if (rootDir == null) {
				// the first entry contains the name of the original directory that was zipped
				rootDir = relativePath;
			}

			Path newFile = new Path(targetDirectory, relativePath);
			if (entry.isDirectory()) {
				targetFs.mkdirs(newFile);
			} else {
				try (FSDataOutputStream fileStream = targetFs.create(newFile, FileSystem.WriteMode.NO_OVERWRITE)) {
					// do not close the streams here as it prevents access to further zip entries
					IOUtils.copyBytes(zis, fileStream, false);
				}
			}
			zis.closeEntry();
		}
	}
	return new Path(targetDirectory, rootDir);
}
 
Example 17
Source File: PythonEnvUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Prepares PythonEnvironment to start python process.
 *
 * @param pythonLibFiles The dependent Python files.
 * @return PythonEnvironment the Python environment which will be executed in Python process.
 */
public static PythonEnvironment preparePythonEnvironment(List<Path> pythonLibFiles) throws IOException {
	PythonEnvironment env = new PythonEnvironment();

	// 1. setup temporary local directory for the user files
	String tmpDir = System.getProperty("java.io.tmpdir") +
		File.separator + "pyflink" + File.separator + UUID.randomUUID();

	Path tmpDirPath = new Path(tmpDir);
	FileSystem fs = tmpDirPath.getFileSystem();
	if (fs.exists(tmpDirPath)) {
		fs.delete(tmpDirPath, true);
	}
	fs.mkdirs(tmpDirPath);

	env.workingDirectory = tmpDirPath.toString();

	StringBuilder pythonPathEnv = new StringBuilder();

	pythonPathEnv.append(env.workingDirectory);

	// 2. create symbolLink in the working directory for the pyflink dependency libs.
	List<java.nio.file.Path> pythonLibs = getLibFiles(FLINK_OPT_DIR_PYTHON);
	for (java.nio.file.Path libPath : pythonLibs) {
		java.nio.file.Path symbolicLinkFilePath = FileSystems.getDefault().getPath(env.workingDirectory,
			libPath.getFileName().toString());
		createSymbolicLinkForPyflinkLib(libPath, symbolicLinkFilePath);
		pythonPathEnv.append(File.pathSeparator);
		pythonPathEnv.append(symbolicLinkFilePath.toString());
	}

	// 3. copy relevant python files to tmp dir and set them in PYTHONPATH.
	for (Path pythonFile : pythonLibFiles) {
		String sourceFileName = pythonFile.getName();
		Path targetPath = new Path(tmpDirPath, sourceFileName);
		FileUtils.copy(pythonFile, targetPath, true);
		String targetFileNames = Files.walk(Paths.get(targetPath.toString()))
			.filter(Files::isRegularFile)
			.filter(f -> !f.toString().endsWith(".py"))
			.map(java.nio.file.Path::toString)
			.collect(Collectors.joining(File.pathSeparator));
		pythonPathEnv.append(File.pathSeparator);
		pythonPathEnv.append(targetFileNames);
	}

	// 4. add the parent directory to PYTHONPATH for files suffixed with .py
	String pyFileParents = Files.walk(Paths.get(tmpDirPath.toString()))
		.filter(file -> file.toString().endsWith(".py"))
		.map(java.nio.file.Path::getParent)
		.distinct()
		.map(java.nio.file.Path::toString)
		.collect(Collectors.joining(File.pathSeparator));
	if (!StringUtils.isNullOrWhitespaceOnly(pyFileParents)) {
		pythonPathEnv.append(File.pathSeparator);
		pythonPathEnv.append(pyFileParents);
	}

	env.pythonPath = pythonPathEnv.toString();
	return env;
}