Java Code Examples for org.apache.hadoop.fs.FileSystem.getHomeDirectory()

The following are Jave code examples for showing how to use getHomeDirectory() of the org.apache.hadoop.fs.FileSystem class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
Example 1
Project: TensorFlowOnYARN   File: Utils.java   Source Code and License Vote up 6 votes
public static Path copyLocalFileToDfs(FileSystem fs, String appId,
    Path srcPath, String dstFileName) throws IOException {
  Path dstPath = new Path(fs.getHomeDirectory(),
      Constants.DEFAULT_APP_NAME + Path.SEPARATOR + appId + Path.SEPARATOR + dstFileName);
  LOG.info("Copying " + srcPath + " to " + dstPath);
  fs.copyFromLocalFile(srcPath, dstPath);
  return dstPath;
}
 
Example 2
Project: monarch   File: HDFSQuasiService.java   Source Code and License Vote up 6 votes
public int getFilesCount(String storeBaseDir, String tableName) {
  int filesCount = 0;
  try {
    FileSystem fs = FileSystem.get(conf);
    Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
    Path tablePath = new Path(storeBasePath, tableName);
    if (fs.exists(tablePath)) {
      RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator =
          fs.listFiles(tablePath, false);
      while (locatedFileStatusRemoteIterator.hasNext()) {
        filesCount++;
        LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
        System.out.println("File name is " + next.getPath());
      }
    }
  } catch (IOException e) {
    e.printStackTrace();
  }
  return filesCount;
}
 
Example 3
Project: monarch   File: HDFSQuasiService.java   Source Code and License Vote up 6 votes
public List<OrcStruct> getORCRecords(String storeBaseDir, String tableName) throws IOException {
  List<OrcStruct> orcrecords = new ArrayList<>();
  try {
    FileSystem fs = FileSystem.get(conf);
    Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
    Path tablePath = new Path(storeBasePath, tableName);
    if (fs.exists(tablePath)) {
      RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator =
          fs.listFiles(tablePath, false);
      while (locatedFileStatusRemoteIterator.hasNext()) {
        LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
        final org.apache.hadoop.hive.ql.io.orc.Reader fis =
            OrcFile.createReader(next.getPath(), OrcFile.readerOptions(conf));
        RecordReader rows = fis.rows();
        while (rows.hasNext()) {
          orcrecords.add((OrcStruct) rows.next(null));
        }
        System.out.println("File name is " + next.getPath());
      }
    }
  } catch (IOException e) {
    e.printStackTrace();
  }
  return orcrecords;
}
 
Example 4
Project: hadoop   File: TestLocalDFS.java   Source Code and License Vote up 6 votes
/**
 * Tests get/set working directory in DFS.
 */
@Test(timeout=30000)
public void testHomeDirectory() throws IOException {
  final String[] homeBases = new String[] {"/home", "/home/user"};
  Configuration conf = new HdfsConfiguration();
  for (final String homeBase : homeBases) {
    conf.set(DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY, homeBase);
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
    FileSystem fileSys = cluster.getFileSystem();
    try {    
      // test home directory
      Path home = 
          fileSys.makeQualified(
              new Path(homeBase + "/" + getUserName(fileSys))); 
      Path fsHome = fileSys.getHomeDirectory();
      assertEquals(home, fsHome);
    } finally {
      fileSys.close();
      cluster.shutdown();
    }
  }
}
 
Example 5
Project: hadoop-oss   File: Client.java   Source Code and License Vote up 5 votes
private void addToLocalResources(FileSystem fs, String fileSrcPath,
																 String fileDstPath, String appId, Map<String, LocalResource> localResources,
																 String resources) throws IOException {
	String suffix =
			"prkeyrotation" + "/" + appId + "/" + fileDstPath;
	Path dst =
			new Path(fs.getHomeDirectory(), suffix);
	if (fileSrcPath == null) {
		FSDataOutputStream ostream = null;
		try {
			ostream = FileSystem
					.create(fs, dst, new FsPermission((short) 0710));
			ostream.writeUTF(resources);
		} finally {
			IOUtils.closeQuietly(ostream);
		}
	} else {
		fs.copyFromLocalFile(new Path(fileSrcPath), dst);
	}
	FileStatus scFileStatus = fs.getFileStatus(dst);
	LocalResource scRsrc =
			LocalResource.newInstance(
					ConverterUtils.getYarnUrlFromPath(dst),
					LocalResourceType.FILE, LocalResourceVisibility.APPLICATION,
					scFileStatus.getLen(), scFileStatus.getModificationTime());
	localResources.put(fileDstPath, scRsrc);
}
 
Example 6
Project: alluxio   File: HdfsAndAlluxioUtils_update.java   Source Code and License Vote up 5 votes
/**
 * 此方法用于获取文件系统的HomeDirectory
 *
 * @param fileSystemInfo
 *            文件系统信息
 * @return 文件系统的HomeDirectory
 */
public static Path getHomeDirectory(FileSystemInfo fileSystemInfo) {
	FileSystem fs = getFileSystem(fileSystemInfo);
	try {
		return fs.getHomeDirectory();
	} finally {
		closeFileSystem(fs);
	}
}
 
Example 7
Project: monarch   File: HDFSQuasiService.java   Source Code and License Vote up 5 votes
public boolean checkFileExistsSecured(final String user, final String keytab, String storeBaseDir,
    String tableName) {
  try {
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(user, keytab);
    FileSystem fs = FileSystem.get(conf);
    Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
    Path tablePath = new Path(storeBasePath, tableName);
    return fs.exists(tablePath);
  } catch (IOException e) {
    e.printStackTrace();
  }
  return false;
}
 
Example 8
Project: monarch   File: HDFSQuasiService.java   Source Code and License Vote up 5 votes
public boolean checkFileExists(String storeBaseDir, String tableName) {
  try {
    FileSystem fs = FileSystem.get(conf);
    Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
    Path tablePath = new Path(storeBasePath, tableName);
    return fs.exists(tablePath);
  } catch (IOException e) {
    e.printStackTrace();
  }
  return false;
}
 
Example 9
Project: hadoop   File: Client.java   Source Code and License Vote up 5 votes
private void addToLocalResources(FileSystem fs, String fileSrcPath,
    String fileDstPath, String appId, Map<String, LocalResource> localResources,
    String resources) throws IOException {
  String suffix =
      appName + "/" + appId + "/" + fileDstPath;
  Path dst =
      new Path(fs.getHomeDirectory(), suffix);
  if (fileSrcPath == null) {
    FSDataOutputStream ostream = null;
    try {
      ostream = FileSystem
          .create(fs, dst, new FsPermission((short) 0710));
      ostream.writeUTF(resources);
    } finally {
      IOUtils.closeQuietly(ostream);
    }
  } else {
    fs.copyFromLocalFile(new Path(fileSrcPath), dst);
  }
  FileStatus scFileStatus = fs.getFileStatus(dst);
  LocalResource scRsrc =
      LocalResource.newInstance(
          ConverterUtils.getYarnUrlFromURI(dst.toUri()),
          LocalResourceType.FILE, LocalResourceVisibility.APPLICATION,
          scFileStatus.getLen(), scFileStatus.getModificationTime());
  localResources.put(fileDstPath, scRsrc);
}
 
Example 10
Project: hadoop   File: TestLocalDFS.java   Source Code and License Vote up 5 votes
/**
 * Tests get/set working directory in DFS.
 */
@Test
public void testWorkingDirectory() throws IOException {
  Configuration conf = new HdfsConfiguration();
  MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
  FileSystem fileSys = cluster.getFileSystem();
  try {
    Path orig_path = fileSys.getWorkingDirectory();
    assertTrue(orig_path.isAbsolute());
    Path file1 = new Path("somewhat/random.txt");
    writeFile(fileSys, file1);
    assertTrue(fileSys.exists(new Path(orig_path, file1.toString())));
    fileSys.delete(file1, true);
    Path subdir1 = new Path("/somewhere");
    fileSys.setWorkingDirectory(subdir1);
    writeFile(fileSys, file1);
    cleanupFile(fileSys, new Path(subdir1, file1.toString()));
    Path subdir2 = new Path("else");
    fileSys.setWorkingDirectory(subdir2);
    writeFile(fileSys, file1);
    readFile(fileSys, file1);
    cleanupFile(fileSys, new Path(new Path(subdir1, subdir2.toString()),
                                  file1.toString()));

    // test home directory
    Path home = 
      fileSys.makeQualified(
          new Path(DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT
              + "/" + getUserName(fileSys))); 
    Path fsHome = fileSys.getHomeDirectory();
    assertEquals(home, fsHome);

  } finally {
    fileSys.close();
    cluster.shutdown();
  }
}
 
Example 11
Project: hadoop   File: FSOperations.java   Source Code and License Vote up 3 votes
/**
 * Executes the filesystem operation.
 *
 * @param fs filesystem instance to use.
 *
 * @return a JSON object with the user home directory.
 *
 * @throws IOException thrown if an IO error occured.
 */
@Override
@SuppressWarnings("unchecked")
public JSONObject execute(FileSystem fs) throws IOException {
  Path homeDir = fs.getHomeDirectory();
  JSONObject json = new JSONObject();
  json.put(HttpFSFileSystem.HOME_DIR_JSON, homeDir.toUri().getPath());
  return json;
}