org.apache.hadoop.fs.FileEncryptionInfo Java Examples

The following examples show how to use org.apache.hadoop.fs.FileEncryptionInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HDFSUtil.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
private static KeyProvider.KeyVersion decryptEncryptedDataEncryptionKey(DistributedFileSystem dfs, FileEncryptionInfo feInfo) throws IOException {
    KeyProvider provider = dfs.dfs.getKeyProvider();
    if (provider == null) {
        throw new IOException("No KeyProvider is configured, cannot access" +
                " an encrypted file");
    }
    KeyProviderCryptoExtension.EncryptedKeyVersion ekv = KeyProviderCryptoExtension.EncryptedKeyVersion.createForDecryption(
            feInfo.getKeyName(), feInfo.getEzKeyVersionName(), feInfo.getIV(),
            feInfo.getEncryptedDataEncryptionKey());
    try {
        KeyProviderCryptoExtension cryptoProvider = KeyProviderCryptoExtension
                .createKeyProviderCryptoExtension(provider);
        return cryptoProvider.decryptEncryptedKey(ekv);
    } catch (GeneralSecurityException e) {
        throw new IOException(e);
    }
}
 
Example #2
Source File: OzoneKMSUtil.java    From hadoop-ozone with Apache License 2.0 6 votes vote down vote up
public static CryptoCodec getCryptoCodec(ConfigurationSource conf,
    FileEncryptionInfo feInfo) throws IOException {
  CipherSuite suite = feInfo.getCipherSuite();
  if (suite.equals(CipherSuite.UNKNOWN)) {
    throw new IOException("NameNode specified unknown CipherSuite with ID " +
            suite.getUnknownValue() + ", cannot instantiate CryptoCodec.");
  } else {
    Configuration hadoopConfig =
        LegacyHadoopConfigurationSource.asHadoopConfiguration(conf);
    CryptoCodec codec = CryptoCodec.getInstance(hadoopConfig, suite);
    if (codec == null) {
      throw new OMException("No configuration found for the cipher suite " +
              suite.getConfigSuffix() + " prefixed with " +
              "hadoop.security.crypto.codec.classes. Please see the" +
              " example configuration hadoop.security.crypto.codec.classes." +
              "EXAMPLE CIPHER SUITE at core-default.xml for details.",
              OMException.ResultCodes.UNKNOWN_CIPHER_SUITE);
    } else {
      return codec;
    }
  }
}
 
Example #3
Source File: KeyManagerImpl.java    From hadoop-ozone with Apache License 2.0 6 votes vote down vote up
private OmKeyInfo prepareKeyInfo(
    OmKeyArgs keyArgs, String dbKeyName, long size,
    List<OmKeyLocationInfo> locations, FileEncryptionInfo encInfo)
    throws IOException {
  OmKeyInfo keyInfo = null;
  if (keyArgs.getIsMultipartKey()) {
    keyInfo = prepareMultipartKeyInfo(keyArgs, size, locations, encInfo);
  } else if (metadataManager.getKeyTable().isExist(dbKeyName)) {
    keyInfo = metadataManager.getKeyTable().get(dbKeyName);
    // the key already exist, the new blocks will be added as new version
    // when locations.size = 0, the new version will have identical blocks
    // as its previous version
    keyInfo.addNewVersion(locations, true);
    keyInfo.setDataSize(size + keyInfo.getDataSize());
  }
  if(keyInfo != null) {
    keyInfo.setMetadata(keyArgs.getMetadata());
  }
  return keyInfo;
}
 
Example #4
Source File: KeyManagerImpl.java    From hadoop-ozone with Apache License 2.0 6 votes vote down vote up
private OmKeyInfo createDirectoryKey(String volumeName, String bucketName,
    String keyName, List<OzoneAcl> acls) throws IOException {
  // verify bucket exists
  OmBucketInfo bucketInfo = getBucketInfo(volumeName, bucketName);

  String dir = OzoneFSUtils.addTrailingSlashIfNeeded(keyName);
  FileEncryptionInfo encInfo = getFileEncryptionInfo(bucketInfo);
  return new OmKeyInfo.Builder()
      .setVolumeName(volumeName)
      .setBucketName(bucketName)
      .setKeyName(dir)
      .setOmKeyLocationInfos(Collections.singletonList(
          new OmKeyLocationInfoGroup(0, new ArrayList<>())))
      .setCreationTime(Time.now())
      .setModificationTime(Time.now())
      .setDataSize(0)
      .setReplicationType(ReplicationType.RATIS)
      .setReplicationFactor(ReplicationFactor.ONE)
      .setFileEncryptionInfo(encInfo)
      .setAcls(acls)
      .build();
}
 
Example #5
Source File: HdfsFileStatus.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Constructor
 * @param length the number of bytes the file has
 * @param isdir if the path is a directory
 * @param block_replication the replication factor
 * @param blocksize the block size
 * @param modification_time modification time
 * @param access_time access time
 * @param permission permission
 * @param owner the owner of the path
 * @param group the group of the path
 * @param path the local name in java UTF8 encoding the same as that in-memory
 * @param fileId the file id
 * @param feInfo the file's encryption info
 */
public HdfsFileStatus(long length, boolean isdir, int block_replication,
    long blocksize, long modification_time, long access_time,
    FsPermission permission, String owner, String group, byte[] symlink,
    byte[] path, long fileId, int childrenNum, FileEncryptionInfo feInfo,
    byte storagePolicy) {
  this.length = length;
  this.isdir = isdir;
  this.block_replication = (short)block_replication;
  this.blocksize = blocksize;
  this.modification_time = modification_time;
  this.access_time = access_time;
  this.permission = (permission == null) ? 
      ((isdir || symlink!=null) ? 
          FsPermission.getDefault() : 
          FsPermission.getFileDefault()) :
      permission;
  this.owner = (owner == null) ? "" : owner;
  this.group = (group == null) ? "" : group;
  this.symlink = symlink;
  this.path = path;
  this.fileId = fileId;
  this.childrenNum = childrenNum;
  this.feInfo = feInfo;
  this.storagePolicy = storagePolicy;
}
 
Example #6
Source File: DFSClient.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Decrypts a EDEK by consulting the KeyProvider.
 */
private KeyVersion decryptEncryptedDataEncryptionKey(FileEncryptionInfo
    feInfo) throws IOException {
  TraceScope scope = Trace.startSpan("decryptEDEK", traceSampler);
  try {
    KeyProvider provider = getKeyProvider();
    if (provider == null) {
      throw new IOException("No KeyProvider is configured, cannot access" +
          " an encrypted file");
    }
    EncryptedKeyVersion ekv = EncryptedKeyVersion.createForDecryption(
        feInfo.getKeyName(), feInfo.getEzKeyVersionName(), feInfo.getIV(),
        feInfo.getEncryptedDataEncryptionKey());
    try {
      KeyProviderCryptoExtension cryptoProvider = KeyProviderCryptoExtension
          .createKeyProviderCryptoExtension(provider);
      return cryptoProvider.decryptEncryptedKey(ekv);
    } catch (GeneralSecurityException e) {
      throw new IOException(e);
    }
  } finally {
    scope.close();
  }
}
 
Example #7
Source File: DFSClient.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Obtain a CryptoCodec based on the CipherSuite set in a FileEncryptionInfo
 * and the available CryptoCodecs configured in the Configuration.
 *
 * @param conf   Configuration
 * @param feInfo FileEncryptionInfo
 * @return CryptoCodec
 * @throws IOException if no suitable CryptoCodec for the CipherSuite is
 *                     available.
 */
private static CryptoCodec getCryptoCodec(Configuration conf,
    FileEncryptionInfo feInfo) throws IOException {
  final CipherSuite suite = feInfo.getCipherSuite();
  if (suite.equals(CipherSuite.UNKNOWN)) {
    throw new IOException("NameNode specified unknown CipherSuite with ID "
        + suite.getUnknownValue() + ", cannot instantiate CryptoCodec.");
  }
  final CryptoCodec codec = CryptoCodec.getInstance(conf, suite);
  if (codec == null) {
    throw new UnknownCipherSuiteException(
        "No configuration found for the cipher suite "
        + suite.getConfigSuffix() + " prefixed with "
        + HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
        + ". Please see the example configuration "
        + "hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE "
        + "at core-default.xml for details.");
  }
  return codec;
}
 
Example #8
Source File: DFSClient.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Wraps the stream in a CryptoInputStream if the underlying file is
 * encrypted.
 */
public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis)
    throws IOException {
  final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo();
  if (feInfo != null) {
    // File is encrypted, wrap the stream in a crypto stream.
    // Currently only one version, so no special logic based on the version #
    getCryptoProtocolVersion(feInfo);
    final CryptoCodec codec = getCryptoCodec(conf, feInfo);
    final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
    final CryptoInputStream cryptoIn =
        new CryptoInputStream(dfsis, codec, decrypted.getMaterial(),
            feInfo.getIV());
    return new HdfsDataInputStream(cryptoIn);
  } else {
    // No FileEncryptionInfo so no encryption.
    return new HdfsDataInputStream(dfsis);
  }
}
 
Example #9
Source File: DFSClient.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Wraps the stream in a CryptoOutputStream if the underlying file is
 * encrypted.
 */
public HdfsDataOutputStream createWrappedOutputStream(DFSOutputStream dfsos,
    FileSystem.Statistics statistics, long startPos) throws IOException {
  final FileEncryptionInfo feInfo = dfsos.getFileEncryptionInfo();
  if (feInfo != null) {
    // File is encrypted, wrap the stream in a crypto stream.
    // Currently only one version, so no special logic based on the version #
    getCryptoProtocolVersion(feInfo);
    final CryptoCodec codec = getCryptoCodec(conf, feInfo);
    KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
    final CryptoOutputStream cryptoOut =
        new CryptoOutputStream(dfsos, codec,
            decrypted.getMaterial(), feInfo.getIV(), startPos);
    return new HdfsDataOutputStream(cryptoOut, statistics, startPos);
  } else {
    // No FileEncryptionInfo present so no encryption.
    return new HdfsDataOutputStream(dfsos, statistics, startPos);
  }
}
 
Example #10
Source File: OzoneKMSUtil.java    From hadoop-ozone with Apache License 2.0 6 votes vote down vote up
public static KeyProvider.KeyVersion decryptEncryptedDataEncryptionKey(
    FileEncryptionInfo feInfo, KeyProvider keyProvider) throws IOException {
  if (keyProvider == null) {
    throw new IOException("No KeyProvider is configured, " +
        "cannot access an encrypted file");
  } else {
    EncryptedKeyVersion ekv = EncryptedKeyVersion.createForDecryption(
        feInfo.getKeyName(), feInfo.getEzKeyVersionName(), feInfo.getIV(),
        feInfo.getEncryptedDataEncryptionKey());

    try {
      KeyProviderCryptoExtension cryptoProvider = KeyProviderCryptoExtension
          .createKeyProviderCryptoExtension(keyProvider);
      return cryptoProvider.decryptEncryptedKey(ekv);
    } catch (GeneralSecurityException gse) {
      throw new IOException(gse);
    }
  }
}
 
Example #11
Source File: TestEncryptionZones.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private static void mockCreate(ClientProtocol mcp,
    CipherSuite suite, CryptoProtocolVersion version) throws Exception {
  Mockito.doReturn(
      new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission(
          (short) 777), "owner", "group", new byte[0], new byte[0],
          1010, 0, new FileEncryptionInfo(suite,
          version, new byte[suite.getAlgorithmBlockSize()],
          new byte[suite.getAlgorithmBlockSize()],
          "fakeKey", "fakeVersion"),
          (byte) 0))
      .when(mcp)
      .create(anyString(), (FsPermission) anyObject(), anyString(),
          (EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(),
          anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());
}
 
Example #12
Source File: DFSClient.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Decrypts a EDEK by consulting the KeyProvider.
 */
private KeyVersion decryptEncryptedDataEncryptionKey(FileEncryptionInfo
    feInfo) throws IOException {
  TraceScope scope = Trace.startSpan("decryptEDEK", traceSampler);
  try {
    KeyProvider provider = getKeyProvider();
    if (provider == null) {
      throw new IOException("No KeyProvider is configured, cannot access" +
          " an encrypted file");
    }
    EncryptedKeyVersion ekv = EncryptedKeyVersion.createForDecryption(
        feInfo.getKeyName(), feInfo.getEzKeyVersionName(), feInfo.getIV(),
        feInfo.getEncryptedDataEncryptionKey());
    try {
      KeyProviderCryptoExtension cryptoProvider = KeyProviderCryptoExtension
          .createKeyProviderCryptoExtension(provider);
      return cryptoProvider.decryptEncryptedKey(ekv);
    } catch (GeneralSecurityException e) {
      throw new IOException(e);
    }
  } finally {
    scope.close();
  }
}
 
Example #13
Source File: DFSClient.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Obtain a CryptoCodec based on the CipherSuite set in a FileEncryptionInfo
 * and the available CryptoCodecs configured in the Configuration.
 *
 * @param conf   Configuration
 * @param feInfo FileEncryptionInfo
 * @return CryptoCodec
 * @throws IOException if no suitable CryptoCodec for the CipherSuite is
 *                     available.
 */
private static CryptoCodec getCryptoCodec(Configuration conf,
    FileEncryptionInfo feInfo) throws IOException {
  final CipherSuite suite = feInfo.getCipherSuite();
  if (suite.equals(CipherSuite.UNKNOWN)) {
    throw new IOException("NameNode specified unknown CipherSuite with ID "
        + suite.getUnknownValue() + ", cannot instantiate CryptoCodec.");
  }
  final CryptoCodec codec = CryptoCodec.getInstance(conf, suite);
  if (codec == null) {
    throw new UnknownCipherSuiteException(
        "No configuration found for the cipher suite "
        + suite.getConfigSuffix() + " prefixed with "
        + HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
        + ". Please see the example configuration "
        + "hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE "
        + "at core-default.xml for details.");
  }
  return codec;
}
 
Example #14
Source File: DFSClient.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Wraps the stream in a CryptoInputStream if the underlying file is
 * encrypted.
 */
public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis)
    throws IOException {
  final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo();
  if (feInfo != null) {
    // File is encrypted, wrap the stream in a crypto stream.
    // Currently only one version, so no special logic based on the version #
    getCryptoProtocolVersion(feInfo);
    final CryptoCodec codec = getCryptoCodec(conf, feInfo);
    final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
    final CryptoInputStream cryptoIn =
        new CryptoInputStream(dfsis, codec, decrypted.getMaterial(),
            feInfo.getIV());
    return new HdfsDataInputStream(cryptoIn);
  } else {
    // No FileEncryptionInfo so no encryption.
    return new HdfsDataInputStream(dfsis);
  }
}
 
Example #15
Source File: DFSClient.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Wraps the stream in a CryptoOutputStream if the underlying file is
 * encrypted.
 */
public HdfsDataOutputStream createWrappedOutputStream(DFSOutputStream dfsos,
    FileSystem.Statistics statistics, long startPos) throws IOException {
  final FileEncryptionInfo feInfo = dfsos.getFileEncryptionInfo();
  if (feInfo != null) {
    // File is encrypted, wrap the stream in a crypto stream.
    // Currently only one version, so no special logic based on the version #
    getCryptoProtocolVersion(feInfo);
    final CryptoCodec codec = getCryptoCodec(conf, feInfo);
    KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
    final CryptoOutputStream cryptoOut =
        new CryptoOutputStream(dfsos, codec,
            decrypted.getMaterial(), feInfo.getIV(), startPos);
    return new HdfsDataOutputStream(cryptoOut, statistics, startPos);
  } else {
    // No FileEncryptionInfo present so no encryption.
    return new HdfsDataOutputStream(dfsos, statistics, startPos);
  }
}
 
Example #16
Source File: FSDirectory.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Set the FileEncryptionInfo for an INode.
 */
void setFileEncryptionInfo(String src, FileEncryptionInfo info)
    throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelper.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);

  writeLock();
  try {
    FSDirXAttrOp.unprotectedSetXAttrs(this, src, xAttrs,
                                      EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    writeUnlock();
  }
}
 
Example #17
Source File: TestEncryptionZones.java    From big-c with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private static void mockCreate(ClientProtocol mcp,
    CipherSuite suite, CryptoProtocolVersion version) throws Exception {
  Mockito.doReturn(
      new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission(
          (short) 777), "owner", "group", new byte[0], new byte[0],
          1010, 0, new FileEncryptionInfo(suite,
          version, new byte[suite.getAlgorithmBlockSize()],
          new byte[suite.getAlgorithmBlockSize()],
          "fakeKey", "fakeVersion"),
          (byte) 0))
      .when(mcp)
      .create(anyString(), (FsPermission) anyObject(), anyString(),
          (EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(),
          anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());
}
 
Example #18
Source File: ProxiedDFSClient.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis)
        throws IOException {
    final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo();
    if (feInfo != null) {
        // File is encrypted, wrap the stream in a crypto stream.
        // Currently only one version, so no special logic based on the version #
        getCryptoProtocolVersion(feInfo);
        final CryptoCodec codec = getCryptoCodec(getConfiguration(), feInfo);
        final KeyProvider.KeyVersion decrypted = decryptEncryptedDataEncryptionKey(dfsis, feInfo);
        final CryptoInputStream cryptoIn =
                new CryptoInputStream(dfsis, codec, decrypted.getMaterial(),
                        feInfo.getIV());
        return new HdfsDataInputStream(cryptoIn);
    } else {
        // No FileEncryptionInfo so no encryption.
        return new HdfsDataInputStream(dfsis);
    }
}
 
Example #19
Source File: ProxiedDFSClient.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
/**
 * O
 * btain a CryptoCodec based on the CipherSuite set in a FileEncryptionInfo
 * and the available CryptoCodecs configured in the Configuration.
 *
 * @param conf   Configuration
 * @param feInfo FileEncryptionInfo
 * @return CryptoCodec
 * @throws IOException if no suitable CryptoCodec for the CipherSuite is
 *                     available.
 */
private static CryptoCodec getCryptoCodec(Configuration conf,
                                          FileEncryptionInfo feInfo) throws IOException {
    final CipherSuite suite = feInfo.getCipherSuite();
    if (suite.equals(CipherSuite.UNKNOWN)) {
        throw new IOException("NameNode specified unknown CipherSuite with ID "
                + suite.getUnknownValue() + ", cannot instantiate CryptoCodec.");
    }
    final CryptoCodec codec = CryptoCodec.getInstance(conf, suite);
    if (codec == null) {
        throw new UnknownCipherSuiteException(
                "No configuration found for the cipher suite "
                        + suite.getConfigSuffix() + " prefixed with "
                        + HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
                        + ". Please see the example configuration "
                        + "hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE "
                        + "at core-default.xml for details.");
    }
    return codec;
}
 
Example #20
Source File: HdfsFileStatus.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Constructor
 * @param length the number of bytes the file has
 * @param isdir if the path is a directory
 * @param block_replication the replication factor
 * @param blocksize the block size
 * @param modification_time modification time
 * @param access_time access time
 * @param permission permission
 * @param owner the owner of the path
 * @param group the group of the path
 * @param path the local name in java UTF8 encoding the same as that in-memory
 * @param fileId the file id
 * @param feInfo the file's encryption info
 */
public HdfsFileStatus(long length, boolean isdir, int block_replication,
    long blocksize, long modification_time, long access_time,
    FsPermission permission, String owner, String group, byte[] symlink,
    byte[] path, long fileId, int childrenNum, FileEncryptionInfo feInfo,
    byte storagePolicy) {
  this.length = length;
  this.isdir = isdir;
  this.block_replication = (short)block_replication;
  this.blocksize = blocksize;
  this.modification_time = modification_time;
  this.access_time = access_time;
  this.permission = (permission == null) ? 
      ((isdir || symlink!=null) ? 
          FsPermission.getDefault() : 
          FsPermission.getFileDefault()) :
      permission;
  this.owner = (owner == null) ? "" : owner;
  this.group = (group == null) ? "" : group;
  this.symlink = symlink;
  this.path = path;
  this.fileId = fileId;
  this.childrenNum = childrenNum;
  this.feInfo = feInfo;
  this.storagePolicy = storagePolicy;
}
 
Example #21
Source File: FSDirectory.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Set the FileEncryptionInfo for an INode.
 */
void setFileEncryptionInfo(String src, FileEncryptionInfo info)
    throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelper.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);

  writeLock();
  try {
    FSDirXAttrOp.unprotectedSetXAttrs(this, src, xAttrs,
                                      EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    writeUnlock();
  }
}
 
Example #22
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static FileEncryptionInfo convert(
    HdfsProtos.FileEncryptionInfoProto proto) {
  if (proto == null) {
    return null;
  }
  CipherSuite suite = convert(proto.getSuite());
  CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion());
  byte[] key = proto.getKey().toByteArray();
  byte[] iv = proto.getIv().toByteArray();
  String ezKeyVersionName = proto.getEzKeyVersionName();
  String keyName = proto.getKeyName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}
 
Example #23
Source File: OMKeyRequest.java    From hadoop-ozone with Apache License 2.0 5 votes vote down vote up
/**
 * Get FileEncryptionInfoProto from KeyArgs.
 * @param keyArgs
 * @return
 */
protected FileEncryptionInfo getFileEncryptionInfo(KeyArgs keyArgs) {
  FileEncryptionInfo encryptionInfo = null;
  if (keyArgs.hasFileEncryptionInfo()) {
    encryptionInfo = OMPBHelper.convert(keyArgs.getFileEncryptionInfo());
  }
  return encryptionInfo;
}
 
Example #24
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static HdfsProtos.FileEncryptionInfoProto convert(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.FileEncryptionInfoProto.newBuilder()
      .setSuite(convert(info.getCipherSuite()))
      .setCryptoProtocolVersion(convert(info.getCryptoProtocolVersion()))
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .setKeyName(info.getKeyName())
      .build();
}
 
Example #25
Source File: BlockManager.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/** Create a LocatedBlocks. */
public LocatedBlocks createLocatedBlocks(final BlockInfoContiguous[] blocks,
    final long fileSizeExcludeBlocksUnderConstruction,
    final boolean isFileUnderConstruction, final long offset,
    final long length, final boolean needBlockToken,
    final boolean inSnapshot, FileEncryptionInfo feInfo)
    throws IOException {
  assert namesystem.hasReadLock();
  if (blocks == null) {
    return null;
  } else if (blocks.length == 0) {
    return new LocatedBlocks(0, isFileUnderConstruction,
        Collections.<LocatedBlock>emptyList(), null, false, feInfo);
  } else {
    if (LOG.isDebugEnabled()) {
      LOG.debug("blocks = " + java.util.Arrays.asList(blocks));
    }
    final AccessMode mode = needBlockToken? AccessMode.READ: null;
    final List<LocatedBlock> locatedblocks = createLocatedBlockList(
        blocks, offset, length, Integer.MAX_VALUE, mode);

    final LocatedBlock lastlb;
    final boolean isComplete;
    if (!inSnapshot) {
      final BlockInfoContiguous last = blocks[blocks.length - 1];
      final long lastPos = last.isComplete()?
          fileSizeExcludeBlocksUnderConstruction - last.getNumBytes()
          : fileSizeExcludeBlocksUnderConstruction;
      lastlb = createLocatedBlock(last, lastPos, mode);
      isComplete = last.isComplete();
    } else {
      lastlb = createLocatedBlock(blocks,
          fileSizeExcludeBlocksUnderConstruction, mode);
      isComplete = true;
    }
    return new LocatedBlocks(
        fileSizeExcludeBlocksUnderConstruction, isFileUnderConstruction,
        locatedblocks, lastlb, isComplete, feInfo);
  }
}
 
Example #26
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .build();
}
 
Example #27
Source File: PBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static FileEncryptionInfo convert(
    HdfsProtos.FileEncryptionInfoProto proto) {
  if (proto == null) {
    return null;
  }
  CipherSuite suite = convert(proto.getSuite());
  CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion());
  byte[] key = proto.getKey().toByteArray();
  byte[] iv = proto.getIv().toByteArray();
  String ezKeyVersionName = proto.getEzKeyVersionName();
  String keyName = proto.getKeyName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}
 
Example #28
Source File: TestEncryptionZones.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test(timeout = 120000)
public void testReadWrite() throws Exception {
  final HdfsAdmin dfsAdmin =
      new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
  // Create a base file for comparison
  final Path baseFile = new Path("/base");
  final int len = 8192;
  DFSTestUtil.createFile(fs, baseFile, len, (short) 1, 0xFEED);
  // Create the first enc file
  final Path zone = new Path("/zone");
  fs.mkdirs(zone);
  dfsAdmin.createEncryptionZone(zone, TEST_KEY);
  final Path encFile1 = new Path(zone, "myfile");
  DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);
  // Read them back in and compare byte-by-byte
  verifyFilesEqual(fs, baseFile, encFile1, len);
  // Roll the key of the encryption zone
  assertNumZones(1);
  String keyName = dfsAdmin.listEncryptionZones().next().getKeyName();
  cluster.getNamesystem().getProvider().rollNewVersion(keyName);
  // Read them back in and compare byte-by-byte
  verifyFilesEqual(fs, baseFile, encFile1, len);
  // Write a new enc file and validate
  final Path encFile2 = new Path(zone, "myfile2");
  DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);
  // FEInfos should be different
  FileEncryptionInfo feInfo1 = getFileEncryptionInfo(encFile1);
  FileEncryptionInfo feInfo2 = getFileEncryptionInfo(encFile2);
  assertFalse("EDEKs should be different", Arrays
      .equals(feInfo1.getEncryptedDataEncryptionKey(),
          feInfo2.getEncryptedDataEncryptionKey()));
  assertNotEquals("Key was rolled, versions should be different",
      feInfo1.getEzKeyVersionName(), feInfo2.getEzKeyVersionName());
  // Contents still equal
  verifyFilesEqual(fs, encFile1, encFile2, len);
}
 
Example #29
Source File: PBHelper.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .build();
}
 
Example #30
Source File: PBHelper.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static FileEncryptionInfo convert(
    HdfsProtos.PerFileEncryptionInfoProto fileProto,
    CipherSuite suite, CryptoProtocolVersion version, String keyName) {
  if (fileProto == null || suite == null || version == null ||
      keyName == null) {
    return null;
  }
  byte[] key = fileProto.getKey().toByteArray();
  byte[] iv = fileProto.getIv().toByteArray();
  String ezKeyVersionName = fileProto.getEzKeyVersionName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}