Java Code Examples for org.apache.hadoop.fs.permission.FsPermission#getFileDefault()

The following examples show how to use org.apache.hadoop.fs.permission.FsPermission#getFileDefault() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CrailHadoopFileSystem.java    From incubator-crail with Apache License 2.0 6 votes vote down vote up
@Override
public FileStatus[] listStatus(Path path) throws FileNotFoundException, IOException {
	try {
		CrailNode node = dfs.lookup(path.toUri().getRawPath()).get();
		Iterator<String> iter = node.asContainer().listEntries();
		ArrayList<FileStatus> statusList = new ArrayList<FileStatus>();
		while(iter.hasNext()){
			String filepath = iter.next();
			CrailNode directFile = dfs.lookup(filepath).get();
			if (directFile != null){
				FsPermission permission = FsPermission.getFileDefault();
				if (directFile.getType().isDirectory()) {
					permission = FsPermission.getDirDefault();
				}
				FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, new Path(filepath).makeQualified(this.getUri(), this.workingDir));	
				statusList.add(status);
			}
		}
		FileStatus[] list = new FileStatus[statusList.size()];
		statusList.toArray(list);
		return list;
	} catch(Exception e){
		throw new FileNotFoundException(path.toUri().getRawPath());
	}
}
 
Example 2
Source File: CrailHadoopFileSystem.java    From incubator-crail with Apache License 2.0 6 votes vote down vote up
@Override
public FileStatus getFileStatus(Path path) throws IOException {
	statistics.incrementReadOps(1);
	CrailNode directFile = null;
	try {
		directFile = dfs.lookup(path.toUri().getRawPath()).get();
	} catch (Exception e) {
		throw new IOException(e);
	}
	if (directFile == null) {
		throw new FileNotFoundException("File does not exist: " + path);
	}
	FsPermission permission = FsPermission.getFileDefault();
	if (directFile.getType().isDirectory()) {
		permission = FsPermission.getDirDefault();
	}
	FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, path.makeQualified(this.getUri(), this.workingDir));
	return status;
}
 
Example 3
Source File: HdfsFileStatus.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Constructor
 * @param length the number of bytes the file has
 * @param isdir if the path is a directory
 * @param block_replication the replication factor
 * @param blocksize the block size
 * @param modification_time modification time
 * @param access_time access time
 * @param permission permission
 * @param owner the owner of the path
 * @param group the group of the path
 * @param path the local name in java UTF8 encoding the same as that in-memory
 * @param fileId the file id
 * @param feInfo the file's encryption info
 */
public HdfsFileStatus(long length, boolean isdir, int block_replication,
    long blocksize, long modification_time, long access_time,
    FsPermission permission, String owner, String group, byte[] symlink,
    byte[] path, long fileId, int childrenNum, FileEncryptionInfo feInfo,
    byte storagePolicy) {
  this.length = length;
  this.isdir = isdir;
  this.block_replication = (short)block_replication;
  this.blocksize = blocksize;
  this.modification_time = modification_time;
  this.access_time = access_time;
  this.permission = (permission == null) ? 
      ((isdir || symlink!=null) ? 
          FsPermission.getDefault() : 
          FsPermission.getFileDefault()) :
      permission;
  this.owner = (owner == null) ? "" : owner;
  this.group = (group == null) ? "" : group;
  this.symlink = symlink;
  this.path = path;
  this.fileId = fileId;
  this.childrenNum = childrenNum;
  this.feInfo = feInfo;
  this.storagePolicy = storagePolicy;
}
 
Example 4
Source File: DFSClient.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Same as {@link #create(String, FsPermission, EnumSet, boolean, short, long,
 * Progressable, int, ChecksumOpt)} with the addition of favoredNodes that is
 * a hint to where the namenode should place the file blocks.
 * The favored nodes hint is not persisted in HDFS. Hence it may be honored
 * at the creation time only. HDFS could move the blocks during balancing or
 * replication, to move the blocks from favored nodes. A value of null means
 * no favored nodes for this create
 */
public DFSOutputStream create(String src, 
                           FsPermission permission,
                           EnumSet<CreateFlag> flag, 
                           boolean createParent,
                           short replication,
                           long blockSize,
                           Progressable progress,
                           int buffersize,
                           ChecksumOpt checksumOpt,
                           InetSocketAddress[] favoredNodes) throws IOException {
  checkOpen();
  if (permission == null) {
    permission = FsPermission.getFileDefault();
  }
  FsPermission masked = permission.applyUMask(dfsClientConf.uMask);
  if(LOG.isDebugEnabled()) {
    LOG.debug(src + ": masked=" + masked);
  }
  final DFSOutputStream result = DFSOutputStream.newStreamForCreate(this,
      src, masked, flag, createParent, replication, blockSize, progress,
      buffersize, dfsClientConf.createChecksum(checksumOpt),
      getFavoredNodesStr(favoredNodes));
  beginFileLease(result.getFileId(), result);
  return result;
}
 
Example 5
Source File: HdfsFileStatus.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Constructor
 * @param length the number of bytes the file has
 * @param isdir if the path is a directory
 * @param block_replication the replication factor
 * @param blocksize the block size
 * @param modification_time modification time
 * @param access_time access time
 * @param permission permission
 * @param owner the owner of the path
 * @param group the group of the path
 * @param path the local name in java UTF8 encoding the same as that in-memory
 * @param fileId the file id
 * @param feInfo the file's encryption info
 */
public HdfsFileStatus(long length, boolean isdir, int block_replication,
    long blocksize, long modification_time, long access_time,
    FsPermission permission, String owner, String group, byte[] symlink,
    byte[] path, long fileId, int childrenNum, FileEncryptionInfo feInfo,
    byte storagePolicy) {
  this.length = length;
  this.isdir = isdir;
  this.block_replication = (short)block_replication;
  this.blocksize = blocksize;
  this.modification_time = modification_time;
  this.access_time = access_time;
  this.permission = (permission == null) ? 
      ((isdir || symlink!=null) ? 
          FsPermission.getDefault() : 
          FsPermission.getFileDefault()) :
      permission;
  this.owner = (owner == null) ? "" : owner;
  this.group = (group == null) ? "" : group;
  this.symlink = symlink;
  this.path = path;
  this.fileId = fileId;
  this.childrenNum = childrenNum;
  this.feInfo = feInfo;
  this.storagePolicy = storagePolicy;
}
 
Example 6
Source File: DFSClient.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Same as {@link #create(String, FsPermission, EnumSet, boolean, short, long,
 * Progressable, int, ChecksumOpt)} with the addition of favoredNodes that is
 * a hint to where the namenode should place the file blocks.
 * The favored nodes hint is not persisted in HDFS. Hence it may be honored
 * at the creation time only. HDFS could move the blocks during balancing or
 * replication, to move the blocks from favored nodes. A value of null means
 * no favored nodes for this create
 */
public DFSOutputStream create(String src, 
                           FsPermission permission,
                           EnumSet<CreateFlag> flag, 
                           boolean createParent,
                           short replication,
                           long blockSize,
                           Progressable progress,
                           int buffersize,
                           ChecksumOpt checksumOpt,
                           InetSocketAddress[] favoredNodes) throws IOException {
  checkOpen();
  if (permission == null) {
    permission = FsPermission.getFileDefault();
  }
  FsPermission masked = permission.applyUMask(dfsClientConf.uMask);
  if(LOG.isDebugEnabled()) {
    LOG.debug(src + ": masked=" + masked);
  }
  final DFSOutputStream result = DFSOutputStream.newStreamForCreate(this,
      src, masked, flag, createParent, replication, blockSize, progress,
      buffersize, dfsClientConf.createChecksum(checksumOpt),
      getFavoredNodesStr(favoredNodes));
  beginFileLease(result.getFileId(), result);
  return result;
}
 
Example 7
Source File: CrailHDFS.java    From crail with Apache License 2.0 6 votes vote down vote up
@Override
public FileStatus getFileStatus(Path path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
	CrailNode directFile = null;
	try {
		directFile = dfs.lookup(path.toUri().getRawPath()).get();
	} catch(Exception e){
		throw new IOException(e);
	}
	if (directFile == null){
		throw new FileNotFoundException("filename " + path);
	}
	
	FsPermission permission = FsPermission.getFileDefault();
	if (directFile.getType().isDirectory()) {
		permission = FsPermission.getDirDefault();
	}		
	FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, path.makeQualified(this.getUri(), this.workingDir));
	return status;
}
 
Example 8
Source File: CrailHDFS.java    From crail with Apache License 2.0 6 votes vote down vote up
@Override
public FileStatus[] listStatus(Path path) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
	try {
		CrailNode node = dfs.lookup(path.toUri().getRawPath()).get();
		Iterator<String> iter = node.asContainer().listEntries();
		ArrayList<FileStatus> statusList = new ArrayList<FileStatus>();
		while(iter.hasNext()){
			String filepath = iter.next();
			CrailNode directFile = dfs.lookup(filepath).get();
			if (directFile != null){
				FsPermission permission = FsPermission.getFileDefault();
				if (directFile.getType().isDirectory()) {
					permission = FsPermission.getDirDefault();
				}
				FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, new Path(filepath).makeQualified(this.getUri(), workingDir));	
				statusList.add(status);
			}
		}
		FileStatus[] list = new FileStatus[statusList.size()];
		statusList.toArray(list);
		return list;
	} catch(Exception e){
		throw new FileNotFoundException(path.toUri().getRawPath());
	}
}
 
Example 9
Source File: CrailHadoopFileSystem.java    From crail with Apache License 2.0 6 votes vote down vote up
@Override
public FileStatus[] listStatus(Path path) throws FileNotFoundException, IOException {
	try {
		CrailNode node = dfs.lookup(path.toUri().getRawPath()).get();
		Iterator<String> iter = node.asContainer().listEntries();
		ArrayList<FileStatus> statusList = new ArrayList<FileStatus>();
		while(iter.hasNext()){
			String filepath = iter.next();
			CrailNode directFile = dfs.lookup(filepath).get();
			if (directFile != null){
				FsPermission permission = FsPermission.getFileDefault();
				if (directFile.getType().isDirectory()) {
					permission = FsPermission.getDirDefault();
				}
				FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, new Path(filepath).makeQualified(this.getUri(), this.workingDir));	
				statusList.add(status);
			}
		}
		FileStatus[] list = new FileStatus[statusList.size()];
		statusList.toArray(list);
		return list;
	} catch(Exception e){
		throw new FileNotFoundException(path.toUri().getRawPath());
	}
}
 
Example 10
Source File: CrailHadoopFileSystem.java    From crail with Apache License 2.0 6 votes vote down vote up
@Override
public FileStatus getFileStatus(Path path) throws IOException {
	CrailNode directFile = null;
	try {
		directFile = dfs.lookup(path.toUri().getRawPath()).get();
	} catch (Exception e) {
		throw new IOException(e);
	}
	if (directFile == null) {
		throw new FileNotFoundException("File does not exist: " + path);
	}
	FsPermission permission = FsPermission.getFileDefault();
	if (directFile.getType().isDirectory()) {
		permission = FsPermission.getDirDefault();
	}
	FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(), CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(), directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER, path.makeQualified(this.getUri(), this.workingDir));
	return status;
}
 
Example 11
Source File: FileSystemStorage.java    From zeppelin with Apache License 2.0 6 votes vote down vote up
public void writeFile(final String content, final Path file, boolean writeTempFileFirst, Set<PosixFilePermission> permissions)
    throws IOException {
  FsPermission fsPermission;
  if (permissions == null || permissions.isEmpty()) {
    fsPermission = FsPermission.getFileDefault();
  } else {
    // FsPermission expects a 10-character string because of the leading
    // directory indicator, i.e. "drwx------". The JDK toString method returns
    // a 9-character string, so prepend a leading character.
    fsPermission = FsPermission.valueOf("-" + PosixFilePermissions.toString(permissions));
  }
  callHdfsOperation(new HdfsOperation<Void>() {
    @Override
    public Void call() throws IOException {
      InputStream in = new ByteArrayInputStream(content.getBytes(
          zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_ENCODING)));
      Path tmpFile = new Path(file.toString() + ".tmp");
      IOUtils.copyBytes(in, fs.create(tmpFile), hadoopConf);
      fs.setPermission(tmpFile, fsPermission);
      fs.delete(file, true);
      fs.rename(tmpFile, file);
      return null;
    }
  });
}
 
Example 12
Source File: RawLocalFileSystem.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private LocalFSFileOutputStream(Path f, boolean append,
                                FsPermission permission) throws IOException {
  File file = pathToFile(f);
  if (!append && permission == null) {
    permission = FsPermission.getFileDefault();
  }
  if (permission == null) {
    this.fos = new FileOutputStream(file, append);
  } else {
    permission = permission.applyUMask(FsPermission.getUMask(getConf()));
    if (Shell.WINDOWS && NativeIO.isAvailable()) {
      this.fos = NativeIO.Windows.createFileOutputStreamWithMode(file,
          append, permission.toShort());
    } else {
      this.fos = new FileOutputStream(file, append);
      boolean success = false;
      try {
        setPermission(f, permission);
        success = true;
      } finally {
        if (!success) {
          IOUtils.cleanup(LOG, this.fos);
        }
      }
    }
  }
}
 
Example 13
Source File: CommonFSUtils.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Get the file permissions specified in the configuration, if they are
 * enabled.
 *
 * @param fs filesystem that the file will be created on.
 * @param conf configuration to read for determining if permissions are
 *          enabled and which to use
 * @param permssionConfKey property key in the configuration to use when
 *          finding the permission
 * @return the permission to use when creating a new file on the fs. If
 *         special permissions are not specified in the configuration, then
 *         the default permissions on the the fs will be returned.
 */
public static FsPermission getFilePermissions(final FileSystem fs,
    final Configuration conf, final String permssionConfKey) {
  boolean enablePermissions = conf.getBoolean(
      HConstants.ENABLE_DATA_FILE_UMASK, false);

  if (enablePermissions) {
    try {
      FsPermission perm = new FsPermission(FULL_RWX_PERMISSIONS);
      // make sure that we have a mask, if not, go default.
      String mask = conf.get(permssionConfKey);
      if (mask == null) {
        return FsPermission.getFileDefault();
      }
      // appy the umask
      FsPermission umask = new FsPermission(mask);
      return perm.applyUMask(umask);
    } catch (IllegalArgumentException e) {
      LOG.warn(
          "Incorrect umask attempted to be created: "
              + conf.get(permssionConfKey)
              + ", using default file permissions.", e);
      return FsPermission.getFileDefault();
    }
  }
  return FsPermission.getFileDefault();
}
 
Example 14
Source File: FileStatus.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * Sets permission.
 * @param permission if permission is null, default value is set
 */
protected void setPermission(FsPermission permission) {
  this.permission = (permission == null) ? 
                    FsPermission.getFileDefault() : permission;
}
 
Example 15
Source File: FileStatus.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Sets permission.
 * @param permission if permission is null, default value is set
 */
protected void setPermission(FsPermission permission) {
  this.permission = (permission == null) ? 
                    FsPermission.getFileDefault() : permission;
}
 
Example 16
Source File: MockFileSystem.java    From pravega with Apache License 2.0 4 votes vote down vote up
FileData(Path path) {
    this.path = path;
    this.permission = FsPermission.getFileDefault();
}