Java Code Examples for org.apache.hadoop.hdfs.protocol.HdfsFileStatus#isSymlink()

The following examples show how to use org.apache.hadoop.hdfs.protocol.HdfsFileStatus#isSymlink() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Nfs3Utils.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static Nfs3FileAttributes getNfs3FileAttrFromFileStatus(
    HdfsFileStatus fs, IdMappingServiceProvider iug) {
  /**
   * Some 32bit Linux client has problem with 64bit fileId: it seems the 32bit
   * client takes only the lower 32bit of the fileId and treats it as signed
   * int. When the 32th bit is 1, the client considers it invalid.
   */
  NfsFileType fileType = fs.isDir() ? NfsFileType.NFSDIR : NfsFileType.NFSREG;
  fileType = fs.isSymlink() ? NfsFileType.NFSLNK : fileType;
  int nlink = (fileType == NfsFileType.NFSDIR) ? fs.getChildrenNum() + 2 : 1;
  long size = (fileType == NfsFileType.NFSDIR) ? getDirSize(fs
      .getChildrenNum()) : fs.getLen();
  return new Nfs3FileAttributes(fileType, nlink,
      fs.getPermission().toShort(), iug.getUidAllowingUnknown(fs.getOwner()),
      iug.getGidAllowingUnknown(fs.getGroup()), size, 0 /* fsid */,
      fs.getFileId(), fs.getModificationTime(), fs.getAccessTime(),
      new Nfs3FileAttributes.Specdata3());
}
 
Example 2
Source File: Nfs3Utils.java    From big-c with Apache License 2.0 6 votes vote down vote up
public static Nfs3FileAttributes getNfs3FileAttrFromFileStatus(
    HdfsFileStatus fs, IdMappingServiceProvider iug) {
  /**
   * Some 32bit Linux client has problem with 64bit fileId: it seems the 32bit
   * client takes only the lower 32bit of the fileId and treats it as signed
   * int. When the 32th bit is 1, the client considers it invalid.
   */
  NfsFileType fileType = fs.isDir() ? NfsFileType.NFSDIR : NfsFileType.NFSREG;
  fileType = fs.isSymlink() ? NfsFileType.NFSLNK : fileType;
  int nlink = (fileType == NfsFileType.NFSDIR) ? fs.getChildrenNum() + 2 : 1;
  long size = (fileType == NfsFileType.NFSDIR) ? getDirSize(fs
      .getChildrenNum()) : fs.getLen();
  return new Nfs3FileAttributes(fileType, nlink,
      fs.getPermission().toShort(), iug.getUidAllowingUnknown(fs.getOwner()),
      iug.getGidAllowingUnknown(fs.getGroup()), size, 0 /* fsid */,
      fs.getFileId(), fs.getModificationTime(), fs.getAccessTime(),
      new Nfs3FileAttributes.Specdata3());
}
 
Example 3
Source File: WebHdfsFileSystem.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private FileStatus makeQualified(HdfsFileStatus f, Path parent) {
  return new FileStatus(f.getLen(), f.isDir(), f.getReplication(),
      f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
      f.getPermission(), f.getOwner(), f.getGroup(),
      f.isSymlink() ? new Path(f.getSymlink()) : null,
      f.getFullPath(parent).makeQualified(getUri(), getWorkingDirectory()));
}
 
Example 4
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static HdfsFileStatusProto convert(HdfsFileStatus fs) {
  if (fs == null)
    return null;
  FileType fType = FileType.IS_FILE;
  if (fs.isDir()) {
    fType = FileType.IS_DIR;
  } else if (fs.isSymlink()) {
    fType = FileType.IS_SYMLINK;
  }

  HdfsFileStatusProto.Builder builder = 
   HdfsFileStatusProto.newBuilder().
    setLength(fs.getLen()).
    setFileType(fType).
    setBlockReplication(fs.getReplication()).
    setBlocksize(fs.getBlockSize()).
    setModificationTime(fs.getModificationTime()).
    setAccessTime(fs.getAccessTime()).
    setPermission(PBHelper.convert(fs.getPermission())).
    setOwner(fs.getOwner()).
    setGroup(fs.getGroup()).
    setFileId(fs.getFileId()).
    setChildrenNum(fs.getChildrenNum()).
    setPath(ByteString.copyFrom(fs.getLocalNameInBytes())).
    setStoragePolicy(fs.getStoragePolicy());
  if (fs.isSymlink())  {
    builder.setSymlink(ByteString.copyFrom(fs.getSymlinkInBytes()));
  }
  if (fs.getFileEncryptionInfo() != null) {
    builder.setFileEncryptionInfo(convert(fs.getFileEncryptionInfo()));
  }
  if (fs instanceof HdfsLocatedFileStatus) {
    final HdfsLocatedFileStatus lfs = (HdfsLocatedFileStatus) fs;
    LocatedBlocks locations = lfs.getBlockLocations();
    if (locations != null) {
      builder.setLocations(PBHelper.convert(locations));
    }
  }
  return builder.build();
}
 
Example 5
Source File: TestJsonUtil.java    From hadoop with Apache License 2.0 5 votes vote down vote up
static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
  return new FileStatus(f.getLen(), f.isDir(), f.getReplication(),
      f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
      f.getPermission(), f.getOwner(), f.getGroup(),
      f.isSymlink() ? new Path(f.getSymlink()) : null,
      new Path(f.getFullName(parent)));
}
 
Example 6
Source File: TestStorageMover.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}
 
Example 7
Source File: WebHdfsFileSystem.java    From big-c with Apache License 2.0 5 votes vote down vote up
private FileStatus makeQualified(HdfsFileStatus f, Path parent) {
  return new FileStatus(f.getLen(), f.isDir(), f.getReplication(),
      f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
      f.getPermission(), f.getOwner(), f.getGroup(),
      f.isSymlink() ? new Path(f.getSymlink()) : null,
      f.getFullPath(parent).makeQualified(getUri(), getWorkingDirectory()));
}
 
Example 8
Source File: PBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static HdfsFileStatusProto convert(HdfsFileStatus fs) {
  if (fs == null)
    return null;
  FileType fType = FileType.IS_FILE;
  if (fs.isDir()) {
    fType = FileType.IS_DIR;
  } else if (fs.isSymlink()) {
    fType = FileType.IS_SYMLINK;
  }

  HdfsFileStatusProto.Builder builder = 
   HdfsFileStatusProto.newBuilder().
    setLength(fs.getLen()).
    setFileType(fType).
    setBlockReplication(fs.getReplication()).
    setBlocksize(fs.getBlockSize()).
    setModificationTime(fs.getModificationTime()).
    setAccessTime(fs.getAccessTime()).
    setPermission(PBHelper.convert(fs.getPermission())).
    setOwner(fs.getOwner()).
    setGroup(fs.getGroup()).
    setFileId(fs.getFileId()).
    setChildrenNum(fs.getChildrenNum()).
    setPath(ByteString.copyFrom(fs.getLocalNameInBytes())).
    setStoragePolicy(fs.getStoragePolicy());
  if (fs.isSymlink())  {
    builder.setSymlink(ByteString.copyFrom(fs.getSymlinkInBytes()));
  }
  if (fs.getFileEncryptionInfo() != null) {
    builder.setFileEncryptionInfo(convert(fs.getFileEncryptionInfo()));
  }
  if (fs instanceof HdfsLocatedFileStatus) {
    final HdfsLocatedFileStatus lfs = (HdfsLocatedFileStatus) fs;
    LocatedBlocks locations = lfs.getBlockLocations();
    if (locations != null) {
      builder.setLocations(PBHelper.convert(locations));
    }
  }
  return builder.build();
}
 
Example 9
Source File: TestJsonUtil.java    From big-c with Apache License 2.0 5 votes vote down vote up
static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
  return new FileStatus(f.getLen(), f.isDir(), f.getReplication(),
      f.getBlockSize(), f.getModificationTime(), f.getAccessTime(),
      f.getPermission(), f.getOwner(), f.getGroup(),
      f.isSymlink() ? new Path(f.getSymlink()) : null,
      new Path(f.getFullName(parent)));
}
 
Example 10
Source File: TestStorageMover.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void verifyRecursively(final Path parent,
    final HdfsFileStatus status) throws Exception {
  if (status.isDir()) {
    Path fullPath = parent == null ?
        new Path("/") : status.getFullPath(parent);
    DirectoryListing children = dfs.getClient().listPaths(
        fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
    for (HdfsFileStatus child : children.getPartialListing()) {
      verifyRecursively(fullPath, child);
    }
  } else if (!status.isSymlink()) { // is file
    verifyFile(parent, status, null);
  }
}