Java Code Examples for org.apache.hadoop.hdfs.protocol.DirectoryListing

The following examples show how to use org.apache.hadoop.hdfs.protocol.DirectoryListing. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: RpcProgramNfs3.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Used by readdir and readdirplus to get dirents. It retries the listing if
 * the startAfter can't be found anymore.
 */
private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
    byte[] startAfter) throws IOException {
  DirectoryListing dlisting;
  try {
    dlisting = dfsClient.listPaths(dirFileIdPath, startAfter);
  } catch (RemoteException e) {
    IOException io = e.unwrapRemoteException();
    if (!(io instanceof DirectoryListingStartAfterNotFoundException)) {
      throw io;
    }
    // This happens when startAfter was just deleted
    LOG.info("Cookie couldn't be found: "
        + new String(startAfter, Charset.forName("UTF-8"))
        + ", do listing from beginning");
    dlisting = dfsClient
        .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
  }
  return dlisting;
}
 
Example 2
@Override
public GetListingResponseProto getListing(RpcController controller,
    GetListingRequestProto req) throws ServiceException {
  try {
    DirectoryListing result = server.getListing(
        req.getSrc(), req.getStartAfter().toByteArray(),
        req.getNeedLocation());
    if (result !=null) {
      return GetListingResponseProto.newBuilder().setDirList(
        PBHelper.convert(result)).build();
    } else {
      return VOID_GETLISTING_RESPONSE;
    }
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
Example 3
Source Project: hadoop   Source File: ClientNamenodeProtocolTranslatorPB.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) throws AccessControlException,
    FileNotFoundException, UnresolvedLinkException, IOException {
  GetListingRequestProto req = GetListingRequestProto.newBuilder()
      .setSrc(src)
      .setStartAfter(ByteString.copyFrom(startAfter))
      .setNeedLocation(needLocation).build();
  try {
    GetListingResponseProto result = rpcProxy.getListing(null, req);
    
    if (result.hasDirList()) {
      return PBHelper.convert(result.getDirList());
    }
    return null;
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
Example 4
Source Project: hadoop   Source File: TestDFSUpgradeFromImage.java    License: Apache License 2.0 6 votes vote down vote up
static void recoverAllLeases(DFSClient dfs, 
    Path path) throws IOException {
  String pathStr = path.toString();
  HdfsFileStatus status = dfs.getFileInfo(pathStr);
  if (!status.isDir()) {
    dfs.recoverLease(pathStr);
    return;
  }
  byte prev[] = HdfsFileStatus.EMPTY_NAME;
  DirectoryListing dirList;
  do {
    dirList = dfs.listPaths(pathStr, prev);
    HdfsFileStatus files[] = dirList.getPartialListing();
    for (HdfsFileStatus f : files) {
      recoverAllLeases(dfs, f.getFullPath(path));
    }
    prev = dirList.getLastName();
  } while (dirList.hasMore());
}
 
Example 5
Source Project: big-c   Source File: RpcProgramNfs3.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Used by readdir and readdirplus to get dirents. It retries the listing if
 * the startAfter can't be found anymore.
 */
private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
    byte[] startAfter) throws IOException {
  DirectoryListing dlisting;
  try {
    dlisting = dfsClient.listPaths(dirFileIdPath, startAfter);
  } catch (RemoteException e) {
    IOException io = e.unwrapRemoteException();
    if (!(io instanceof DirectoryListingStartAfterNotFoundException)) {
      throw io;
    }
    // This happens when startAfter was just deleted
    LOG.info("Cookie couldn't be found: "
        + new String(startAfter, Charset.forName("UTF-8"))
        + ", do listing from beginning");
    dlisting = dfsClient
        .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
  }
  return dlisting;
}
 
Example 6
@Override
public GetListingResponseProto getListing(RpcController controller,
    GetListingRequestProto req) throws ServiceException {
  try {
    DirectoryListing result = server.getListing(
        req.getSrc(), req.getStartAfter().toByteArray(),
        req.getNeedLocation());
    if (result !=null) {
      return GetListingResponseProto.newBuilder().setDirList(
        PBHelper.convert(result)).build();
    } else {
      return VOID_GETLISTING_RESPONSE;
    }
  } catch (IOException e) {
    throw new ServiceException(e);
  }
}
 
Example 7
Source Project: big-c   Source File: ClientNamenodeProtocolTranslatorPB.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) throws AccessControlException,
    FileNotFoundException, UnresolvedLinkException, IOException {
  GetListingRequestProto req = GetListingRequestProto.newBuilder()
      .setSrc(src)
      .setStartAfter(ByteString.copyFrom(startAfter))
      .setNeedLocation(needLocation).build();
  try {
    GetListingResponseProto result = rpcProxy.getListing(null, req);
    
    if (result.hasDirList()) {
      return PBHelper.convert(result.getDirList());
    }
    return null;
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
}
 
Example 8
Source Project: big-c   Source File: TestDFSUpgradeFromImage.java    License: Apache License 2.0 6 votes vote down vote up
static void recoverAllLeases(DFSClient dfs, 
    Path path) throws IOException {
  String pathStr = path.toString();
  HdfsFileStatus status = dfs.getFileInfo(pathStr);
  if (!status.isDir()) {
    dfs.recoverLease(pathStr);
    return;
  }
  byte prev[] = HdfsFileStatus.EMPTY_NAME;
  DirectoryListing dirList;
  do {
    dirList = dfs.listPaths(pathStr, prev);
    HdfsFileStatus files[] = dirList.getPartialListing();
    for (HdfsFileStatus f : files) {
      recoverAllLeases(dfs, f.getFullPath(path));
    }
    prev = dirList.getLastName();
  } while (dirList.hasMore());
}
 
Example 9
Source Project: terrapin   Source File: TerrapinUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Retrieve list of files under @hdfsDir for @hdfsClient.
 */
public static List<HdfsFileStatus> getHdfsFileList(DFSClient hdfsClient,
                                                   String hdfsDir)
    throws IOException {
  List<HdfsFileStatus> fileList = Lists.newArrayList();
  // Build a list of files.
  DirectoryListing listing = null;
  String continuation = "";
  while (true) {
    listing = hdfsClient.listPaths(hdfsDir, continuation.getBytes());
    for (HdfsFileStatus fileStatus : listing.getPartialListing()) {
      fileList.add(fileStatus);
    }
    // Go through the listing and paginate.
    if (!listing.hasMore()) {
      break;
    } else {
      continuation = new String(listing.getLastName());
    }
  }
  return fileList;
}
 
Example 10
Source Project: hadoop   Source File: DFSClient.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get a partial listing of the indicated directory
 *
 * Recommend to use HdfsFileStatus.EMPTY_NAME as startAfter
 * if the application wants to fetch a listing starting from
 * the first entry in the directory
 *
 * @see ClientProtocol#getListing(String, byte[], boolean)
 */
public DirectoryListing listPaths(String src,  byte[] startAfter,
    boolean needLocation) throws IOException {
  checkOpen();
  TraceScope scope = getPathTraceScope("listPaths", src);
  try {
    return namenode.getListing(src, startAfter, needLocation);
  } catch(RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
                                   FileNotFoundException.class,
                                   UnresolvedPathException.class);
  } finally {
    scope.close();
  }
}
 
Example 11
Source Project: hadoop   Source File: FSDirStatAndListingOp.java    License: Apache License 2.0 5 votes vote down vote up
static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg,
    byte[] startAfter, boolean needLocation) throws IOException {
  FSPermissionChecker pc = fsd.getPermissionChecker();
  byte[][] pathComponents = FSDirectory
      .getPathComponentsForReservedPath(srcArg);
  final String startAfterString = new String(startAfter, Charsets.UTF_8);
  final String src = fsd.resolvePath(pc, srcArg, pathComponents);
  final INodesInPath iip = fsd.getINodesInPath(src, true);

  // Get file name when startAfter is an INodePath
  if (FSDirectory.isReservedName(startAfterString)) {
    byte[][] startAfterComponents = FSDirectory
        .getPathComponentsForReservedPath(startAfterString);
    try {
      String tmp = FSDirectory.resolvePath(src, startAfterComponents, fsd);
      byte[][] regularPath = INode.getPathComponents(tmp);
      startAfter = regularPath[regularPath.length - 1];
    } catch (IOException e) {
      // Possibly the inode is deleted
      throw new DirectoryListingStartAfterNotFoundException(
          "Can't find startAfter " + startAfterString);
    }
  }

  boolean isSuperUser = true;
  if (fsd.isPermissionEnabled()) {
    if (iip.getLastINode() != null && iip.getLastINode().isDirectory()) {
      fsd.checkPathAccess(pc, iip, FsAction.READ_EXECUTE);
    } else {
      fsd.checkTraverse(pc, iip);
    }
    isSuperUser = pc.isSuperUser();
  }
  return getListing(fsd, iip, src, startAfter, needLocation, isSuperUser);
}
 
Example 12
Source Project: hadoop   Source File: FSDirStatAndListingOp.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get a listing of all the snapshots of a snapshottable directory
 */
private static DirectoryListing getSnapshotsListing(
    FSDirectory fsd, String src, byte[] startAfter)
    throws IOException {
  Preconditions.checkState(fsd.hasReadLock());
  Preconditions.checkArgument(
      src.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR),
      "%s does not end with %s", src, HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR);

  final String dirPath = FSDirectory.normalizePath(src.substring(0,
      src.length() - HdfsConstants.DOT_SNAPSHOT_DIR.length()));

  final INode node = fsd.getINode(dirPath);
  final INodeDirectory dirNode = INodeDirectory.valueOf(node, dirPath);
  final DirectorySnapshottableFeature sf = dirNode.getDirectorySnapshottableFeature();
  if (sf == null) {
    throw new SnapshotException(
        "Directory is not a snapshottable directory: " + dirPath);
  }
  final ReadOnlyList<Snapshot> snapshots = sf.getSnapshotList();
  int skipSize = ReadOnlyList.Util.binarySearch(snapshots, startAfter);
  skipSize = skipSize < 0 ? -skipSize - 1 : skipSize + 1;
  int numOfListing = Math.min(snapshots.size() - skipSize, fsd.getLsLimit());
  final HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing];
  for (int i = 0; i < numOfListing; i++) {
    Snapshot.Root sRoot = snapshots.get(i + skipSize).getRoot();
    listing[i] = createFileStatus(fsd, src, sRoot.getLocalNameBytes(), sRoot,
        BlockStoragePolicySuite.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
        false, INodesInPath.fromINode(sRoot));
  }
  return new DirectoryListing(
      listing, snapshots.size() - skipSize - numOfListing);
}
 
Example 13
Source Project: hadoop   Source File: NamenodeWebHdfsMethods.java    License: Apache License 2.0 5 votes vote down vote up
private static DirectoryListing getDirectoryListing(final NamenodeProtocols np,
    final String p, byte[] startAfter) throws IOException {
  final DirectoryListing listing = np.getListing(p, startAfter, false);
  if (listing == null) { // the directory does not exist
    throw new FileNotFoundException("File " + p + " does not exist.");
  }
  return listing;
}
 
Example 14
Source Project: hadoop   Source File: NameNodeRpcServer.java    License: Apache License 2.0 5 votes vote down vote up
@Override // ClientProtocol
public DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) throws IOException {
  checkNNStartup();
  DirectoryListing files = namesystem.getListing(
      src, startAfter, needLocation);
  if (files != null) {
    metrics.incrGetListingOps();
    metrics.incrFilesInGetListingOps(files.getPartialListing().length);
  }
  return files;
}
 
Example 15
Source Project: hadoop   Source File: PBHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static DirectoryListing convert(DirectoryListingProto dl) {
  if (dl == null)
    return null;
  List<HdfsFileStatusProto> partList =  dl.getPartialListingList();
  return new DirectoryListing( 
      partList.isEmpty() ? new HdfsLocatedFileStatus[0] 
        : PBHelper.convert(
            partList.toArray(new HdfsFileStatusProto[partList.size()])),
      dl.getRemainingEntries());
}
 
Example 16
Source Project: hadoop   Source File: PBHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static DirectoryListingProto convert(DirectoryListing d) {
  if (d == null)
    return null;
  return DirectoryListingProto.newBuilder().
      addAllPartialListing(Arrays.asList(
          PBHelper.convert(d.getPartialListing()))).
      setRemainingEntries(d.getRemainingEntries()).
      build();
}
 
Example 17
Source Project: hadoop   Source File: TestStorageMover.java    License: Apache License 2.0 5 votes vote down vote up
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}
 
Example 18
Source Project: hadoop   Source File: TestStorageMover.java    License: Apache License 2.0 5 votes vote down vote up
void verifyFile(final Path file, final Byte expectedPolicyId)
    throws Exception {
  final Path parent = file.getParent();
  DirectoryListing children = dfs.getClient().listPaths(
      parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
  for (HdfsFileStatus child : children.getPartialListing()) {
    if (child.getLocalName().equals(file.getName())) {
      verifyFile(parent,  child, expectedPolicyId);
      return;
    }
  }
  Assert.fail("File " + file + " not found.");
}
 
Example 19
Source Project: big-c   Source File: DFSClient.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get a partial listing of the indicated directory
 *
 * Recommend to use HdfsFileStatus.EMPTY_NAME as startAfter
 * if the application wants to fetch a listing starting from
 * the first entry in the directory
 *
 * @see ClientProtocol#getListing(String, byte[], boolean)
 */
public DirectoryListing listPaths(String src,  byte[] startAfter,
    boolean needLocation) throws IOException {
  checkOpen();
  TraceScope scope = getPathTraceScope("listPaths", src);
  try {
    return namenode.getListing(src, startAfter, needLocation);
  } catch(RemoteException re) {
    throw re.unwrapRemoteException(AccessControlException.class,
                                   FileNotFoundException.class,
                                   UnresolvedPathException.class);
  } finally {
    scope.close();
  }
}
 
Example 20
Source Project: big-c   Source File: FSDirStatAndListingOp.java    License: Apache License 2.0 5 votes vote down vote up
static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg,
    byte[] startAfter, boolean needLocation) throws IOException {
  FSPermissionChecker pc = fsd.getPermissionChecker();
  byte[][] pathComponents = FSDirectory
      .getPathComponentsForReservedPath(srcArg);
  final String startAfterString = new String(startAfter, Charsets.UTF_8);
  final String src = fsd.resolvePath(pc, srcArg, pathComponents);
  final INodesInPath iip = fsd.getINodesInPath(src, true);

  // Get file name when startAfter is an INodePath
  if (FSDirectory.isReservedName(startAfterString)) {
    byte[][] startAfterComponents = FSDirectory
        .getPathComponentsForReservedPath(startAfterString);
    try {
      String tmp = FSDirectory.resolvePath(src, startAfterComponents, fsd);
      byte[][] regularPath = INode.getPathComponents(tmp);
      startAfter = regularPath[regularPath.length - 1];
    } catch (IOException e) {
      // Possibly the inode is deleted
      throw new DirectoryListingStartAfterNotFoundException(
          "Can't find startAfter " + startAfterString);
    }
  }

  boolean isSuperUser = true;
  if (fsd.isPermissionEnabled()) {
    if (iip.getLastINode() != null && iip.getLastINode().isDirectory()) {
      fsd.checkPathAccess(pc, iip, FsAction.READ_EXECUTE);
    } else {
      fsd.checkTraverse(pc, iip);
    }
    isSuperUser = pc.isSuperUser();
  }
  return getListing(fsd, iip, src, startAfter, needLocation, isSuperUser);
}
 
Example 21
Source Project: big-c   Source File: FSDirStatAndListingOp.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get a listing of all the snapshots of a snapshottable directory
 */
private static DirectoryListing getSnapshotsListing(
    FSDirectory fsd, String src, byte[] startAfter)
    throws IOException {
  Preconditions.checkState(fsd.hasReadLock());
  Preconditions.checkArgument(
      src.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR),
      "%s does not end with %s", src, HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR);

  final String dirPath = FSDirectory.normalizePath(src.substring(0,
      src.length() - HdfsConstants.DOT_SNAPSHOT_DIR.length()));

  final INode node = fsd.getINode(dirPath);
  final INodeDirectory dirNode = INodeDirectory.valueOf(node, dirPath);
  final DirectorySnapshottableFeature sf = dirNode.getDirectorySnapshottableFeature();
  if (sf == null) {
    throw new SnapshotException(
        "Directory is not a snapshottable directory: " + dirPath);
  }
  final ReadOnlyList<Snapshot> snapshots = sf.getSnapshotList();
  int skipSize = ReadOnlyList.Util.binarySearch(snapshots, startAfter);
  skipSize = skipSize < 0 ? -skipSize - 1 : skipSize + 1;
  int numOfListing = Math.min(snapshots.size() - skipSize, fsd.getLsLimit());
  final HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing];
  for (int i = 0; i < numOfListing; i++) {
    Snapshot.Root sRoot = snapshots.get(i + skipSize).getRoot();
    listing[i] = createFileStatus(fsd, src, sRoot.getLocalNameBytes(), sRoot,
        BlockStoragePolicySuite.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID,
        false, INodesInPath.fromINode(sRoot));
  }
  return new DirectoryListing(
      listing, snapshots.size() - skipSize - numOfListing);
}
 
Example 22
Source Project: big-c   Source File: NamenodeWebHdfsMethods.java    License: Apache License 2.0 5 votes vote down vote up
private static DirectoryListing getDirectoryListing(final NamenodeProtocols np,
    final String p, byte[] startAfter) throws IOException {
  final DirectoryListing listing = np.getListing(p, startAfter, false);
  if (listing == null) { // the directory does not exist
    throw new FileNotFoundException("File " + p + " does not exist.");
  }
  return listing;
}
 
Example 23
Source Project: big-c   Source File: NameNodeRpcServer.java    License: Apache License 2.0 5 votes vote down vote up
@Override // ClientProtocol
public DirectoryListing getListing(String src, byte[] startAfter,
    boolean needLocation) throws IOException {
  checkNNStartup();
  DirectoryListing files = namesystem.getListing(
      src, startAfter, needLocation);
  if (files != null) {
    metrics.incrGetListingOps();
    metrics.incrFilesInGetListingOps(files.getPartialListing().length);
  }
  return files;
}
 
Example 24
Source Project: big-c   Source File: PBHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static DirectoryListing convert(DirectoryListingProto dl) {
  if (dl == null)
    return null;
  List<HdfsFileStatusProto> partList =  dl.getPartialListingList();
  return new DirectoryListing( 
      partList.isEmpty() ? new HdfsLocatedFileStatus[0] 
        : PBHelper.convert(
            partList.toArray(new HdfsFileStatusProto[partList.size()])),
      dl.getRemainingEntries());
}
 
Example 25
Source Project: big-c   Source File: PBHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static DirectoryListingProto convert(DirectoryListing d) {
  if (d == null)
    return null;
  return DirectoryListingProto.newBuilder().
      addAllPartialListing(Arrays.asList(
          PBHelper.convert(d.getPartialListing()))).
      setRemainingEntries(d.getRemainingEntries()).
      build();
}
 
Example 26
Source Project: big-c   Source File: TestStorageMover.java    License: Apache License 2.0 5 votes vote down vote up
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}
 
Example 27
Source Project: big-c   Source File: TestStorageMover.java    License: Apache License 2.0 5 votes vote down vote up
void verifyFile(final Path file, final Byte expectedPolicyId)
    throws Exception {
  final Path parent = file.getParent();
  DirectoryListing children = dfs.getClient().listPaths(
      parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
  for (HdfsFileStatus child : children.getPartialListing()) {
    if (child.getLocalName().equals(file.getName())) {
      verifyFile(parent,  child, expectedPolicyId);
      return;
    }
  }
  Assert.fail("File " + file + " not found.");
}
 
Example 28
Source Project: terrapin   Source File: HdfsManagerTest.java    License: Apache License 2.0 5 votes vote down vote up
private void setupBaseDirListing(List<String> fileSets) throws IOException {
  HdfsFileStatus[] fsStatusList = new HdfsFileStatus[fileSets.size() + 1];
  fsStatusList[0] = buildHdfsStatus(Constants.HDFS_DATA_DIR + "/_distcp_XcndjkA", true, null);
  int i = 1;
  for (String fileSet : fileSets) {
    fsStatusList[i++] = buildHdfsStatus(Constants.HDFS_DATA_DIR + "/" + fileSet, true, null);
  }
  when(mockDfsClient.listPaths(eq(Constants.HDFS_DATA_DIR), any(byte[].class))).thenReturn(
          new DirectoryListing(fsStatusList, 0));
}
 
Example 29
Source Project: RDFS   Source File: DistributedAvatarFileSystem.java    License: Apache License 2.0 5 votes vote down vote up
public DirectoryListing getPartialListing(final String src,
    final byte[] startAfter) throws IOException {
  return (new ImmutableFSCaller<DirectoryListing>() {
    DirectoryListing call() throws IOException {
      return namenode.getPartialListing(src, startAfter);
    }
  }).callFS();
}
 
Example 30
Source Project: hadoop   Source File: DFSClient.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * Get a partial listing of the indicated directory
 * No block locations need to be fetched
 */
public DirectoryListing listPaths(String src,  byte[] startAfter)
  throws IOException {
  return listPaths(src, startAfter, false);
}