Java Code Examples for org.apache.hadoop.hdfs.protocol.DirectoryListing#getPartialListing()

The following examples show how to use org.apache.hadoop.hdfs.protocol.DirectoryListing#getPartialListing() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDFSUpgradeFromImage.java    From hadoop with Apache License 2.0 6 votes vote down vote up
static void recoverAllLeases(DFSClient dfs, 
    Path path) throws IOException {
  String pathStr = path.toString();
  HdfsFileStatus status = dfs.getFileInfo(pathStr);
  if (!status.isDir()) {
    dfs.recoverLease(pathStr);
    return;
  }
  byte prev[] = HdfsFileStatus.EMPTY_NAME;
  DirectoryListing dirList;
  do {
    dirList = dfs.listPaths(pathStr, prev);
    HdfsFileStatus files[] = dirList.getPartialListing();
    for (HdfsFileStatus f : files) {
      recoverAllLeases(dfs, f.getFullPath(path));
    }
    prev = dirList.getLastName();
  } while (dirList.hasMore());
}
 
Example 2
Source File: TestDFSUpgradeFromImage.java    From big-c with Apache License 2.0 6 votes vote down vote up
static void recoverAllLeases(DFSClient dfs, 
    Path path) throws IOException {
  String pathStr = path.toString();
  HdfsFileStatus status = dfs.getFileInfo(pathStr);
  if (!status.isDir()) {
    dfs.recoverLease(pathStr);
    return;
  }
  byte prev[] = HdfsFileStatus.EMPTY_NAME;
  DirectoryListing dirList;
  do {
    dirList = dfs.listPaths(pathStr, prev);
    HdfsFileStatus files[] = dirList.getPartialListing();
    for (HdfsFileStatus f : files) {
      recoverAllLeases(dfs, f.getFullPath(path));
    }
    prev = dirList.getLastName();
  } while (dirList.hasMore());
}
 
Example 3
Source File: TerrapinUtil.java    From terrapin with Apache License 2.0 6 votes vote down vote up
/**
 * Retrieve list of files under @hdfsDir for @hdfsClient.
 */
public static List<HdfsFileStatus> getHdfsFileList(DFSClient hdfsClient,
                                                   String hdfsDir)
    throws IOException {
  List<HdfsFileStatus> fileList = Lists.newArrayList();
  // Build a list of files.
  DirectoryListing listing = null;
  String continuation = "";
  while (true) {
    listing = hdfsClient.listPaths(hdfsDir, continuation.getBytes());
    for (HdfsFileStatus fileStatus : listing.getPartialListing()) {
      fileList.add(fileStatus);
    }
    // Go through the listing and paginate.
    if (!listing.hasMore()) {
      break;
    } else {
      continuation = new String(listing.getLastName());
    }
  }
  return fileList;
}
 
Example 4
Source File: TestStorageMover.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}
 
Example 5
Source File: TestStorageMover.java    From hadoop with Apache License 2.0 5 votes vote down vote up
void verifyFile(final Path file, final Byte expectedPolicyId)
    throws Exception {
  final Path parent = file.getParent();
  DirectoryListing children = dfs.getClient().listPaths(
      parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
  for (HdfsFileStatus child : children.getPartialListing()) {
    if (child.getLocalName().equals(file.getName())) {
      verifyFile(parent,  child, expectedPolicyId);
      return;
    }
  }
  Assert.fail("File " + file + " not found.");
}
 
Example 6
Source File: TestStorageMover.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}
 
Example 7
Source File: TestStorageMover.java    From big-c with Apache License 2.0 5 votes vote down vote up
void verifyFile(final Path file, final Byte expectedPolicyId)
    throws Exception {
  final Path parent = file.getParent();
  DirectoryListing children = dfs.getClient().listPaths(
      parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
  for (HdfsFileStatus child : children.getPartialListing()) {
    if (child.getLocalName().equals(file.getName())) {
      verifyFile(parent,  child, expectedPolicyId);
      return;
    }
  }
  Assert.fail("File " + file + " not found.");
}