org.apache.hadoop.hdfs.protocol.proto.HdfsProtos Java Examples
The following examples show how to use
org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FSDirectory.java From big-c with Apache License 2.0 | 6 votes |
/** * Set the FileEncryptionInfo for an INode. */ void setFileEncryptionInfo(String src, FileEncryptionInfo info) throws IOException { // Make the PB for the xattr final HdfsProtos.PerFileEncryptionInfoProto proto = PBHelper.convertPerFileEncInfo(info); final byte[] protoBytes = proto.toByteArray(); final XAttr fileEncryptionAttr = XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes); final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1); xAttrs.add(fileEncryptionAttr); writeLock(); try { FSDirXAttrOp.unprotectedSetXAttrs(this, src, xAttrs, EnumSet.of(XAttrSetFlag.CREATE)); } finally { writeUnlock(); } }
Example #2
Source File: ProxiedFilesystem.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
@Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { try { try (PreparedStatement statement = connection.prepareStatement("call SYSCS_UTIL.SYSCS_hdfs_OPERATION(?, ?)")) { statement.setString(1, file.getPath().toUri().getPath()); statement.setString(2, "tokens"); try (ResultSet rs = statement.executeQuery()) { List<HdfsProtos.LocatedBlockProto> protos = new ArrayList<>(); while (rs.next()) { Blob blob = rs.getBlob(1); byte[] bytes = blob.getBytes(1, (int) blob.length()); HdfsProtos.LocatedBlockProto lbp = HdfsProtos.LocatedBlockProto.parseFrom(bytes); protos.add(lbp); } // TODO return DFSUtil.locatedBlocks2Locations(PBHelper.convertLocatedBlock(protos)); return null; } } } catch (SQLException e) { throw new IOException(e); } }
Example #3
Source File: ProxiedDFSClient.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
@Override public LocatedBlocks getLocatedBlocks(String src, long start, long length) throws IOException { try { try (PreparedStatement statement = connection.prepareStatement("call SYSCS_UTIL.SYSCS_HDFS_OPERATION(?, ?)")) { statement.setString(1, src); statement.setString(2, "blocks"); try (ResultSet rs = statement.executeQuery()) { if (!rs.next()) { throw new IOException("No results for getFileStatus"); } Blob blob = rs.getBlob(1); byte[] bytes = blob.getBytes(1, (int) blob.length()); HdfsProtos.LocatedBlocksProto lbp = HdfsProtos.LocatedBlocksProto.parseFrom(bytes); // TODO return PBHelper.convert(lbp); return null; } } } catch (SQLException e) { throw new IOException(e); } }
Example #4
Source File: PBHelper.java From big-c with Apache License 2.0 | 6 votes |
public static ContentSummaryProto convert(ContentSummary cs) { if (cs == null) return null; ContentSummaryProto.Builder builder = ContentSummaryProto.newBuilder(); builder.setLength(cs.getLength()). setFileCount(cs.getFileCount()). setDirectoryCount(cs.getDirectoryCount()). setQuota(cs.getQuota()). setSpaceConsumed(cs.getSpaceConsumed()). setSpaceQuota(cs.getSpaceQuota()); if (cs.isTypeQuotaSet() || cs.isTypeConsumedAvailable()) { HdfsProtos.StorageTypeQuotaInfosProto.Builder isb = HdfsProtos.StorageTypeQuotaInfosProto.newBuilder(); for (StorageType t: StorageType.getTypesSupportingQuota()) { HdfsProtos.StorageTypeQuotaInfoProto info = HdfsProtos.StorageTypeQuotaInfoProto.newBuilder(). setType(convertStorageType(t)). setConsumed(cs.getTypeConsumed(t)). setQuota(cs.getTypeQuota(t)). build(); isb.addTypeQuotaInfo(info); } builder.setTypeQuotaInfos(isb); } return builder.build(); }
Example #5
Source File: PBHelper.java From big-c with Apache License 2.0 | 6 votes |
public static ContentSummary convert(ContentSummaryProto cs) { if (cs == null) return null; ContentSummary.Builder builder = new ContentSummary.Builder(); builder.length(cs.getLength()). fileCount(cs.getFileCount()). directoryCount(cs.getDirectoryCount()). quota(cs.getQuota()). spaceConsumed(cs.getSpaceConsumed()). spaceQuota(cs.getSpaceQuota()); if (cs.hasTypeQuotaInfos()) { for (HdfsProtos.StorageTypeQuotaInfoProto info : cs.getTypeQuotaInfos().getTypeQuotaInfoList()) { StorageType type = PBHelper.convertStorageType(info.getType()); builder.typeConsumed(type, info.getConsumed()); builder.typeQuota(type, info.getQuota()); } } return builder.build(); }
Example #6
Source File: PBHelper.java From hadoop with Apache License 2.0 | 6 votes |
public static ContentSummaryProto convert(ContentSummary cs) { if (cs == null) return null; ContentSummaryProto.Builder builder = ContentSummaryProto.newBuilder(); builder.setLength(cs.getLength()). setFileCount(cs.getFileCount()). setDirectoryCount(cs.getDirectoryCount()). setQuota(cs.getQuota()). setSpaceConsumed(cs.getSpaceConsumed()). setSpaceQuota(cs.getSpaceQuota()); if (cs.isTypeQuotaSet() || cs.isTypeConsumedAvailable()) { HdfsProtos.StorageTypeQuotaInfosProto.Builder isb = HdfsProtos.StorageTypeQuotaInfosProto.newBuilder(); for (StorageType t: StorageType.getTypesSupportingQuota()) { HdfsProtos.StorageTypeQuotaInfoProto info = HdfsProtos.StorageTypeQuotaInfoProto.newBuilder(). setType(convertStorageType(t)). setConsumed(cs.getTypeConsumed(t)). setQuota(cs.getTypeQuota(t)). build(); isb.addTypeQuotaInfo(info); } builder.setTypeQuotaInfos(isb); } return builder.build(); }
Example #7
Source File: FSDirectory.java From hadoop with Apache License 2.0 | 6 votes |
/** * Set the FileEncryptionInfo for an INode. */ void setFileEncryptionInfo(String src, FileEncryptionInfo info) throws IOException { // Make the PB for the xattr final HdfsProtos.PerFileEncryptionInfoProto proto = PBHelper.convertPerFileEncInfo(info); final byte[] protoBytes = proto.toByteArray(); final XAttr fileEncryptionAttr = XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes); final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1); xAttrs.add(fileEncryptionAttr); writeLock(); try { FSDirXAttrOp.unprotectedSetXAttrs(this, src, xAttrs, EnumSet.of(XAttrSetFlag.CREATE)); } finally { writeUnlock(); } }
Example #8
Source File: PBHelper.java From hadoop with Apache License 2.0 | 6 votes |
public static ContentSummary convert(ContentSummaryProto cs) { if (cs == null) return null; ContentSummary.Builder builder = new ContentSummary.Builder(); builder.length(cs.getLength()). fileCount(cs.getFileCount()). directoryCount(cs.getDirectoryCount()). quota(cs.getQuota()). spaceConsumed(cs.getSpaceConsumed()). spaceQuota(cs.getSpaceQuota()); if (cs.hasTypeQuotaInfos()) { for (HdfsProtos.StorageTypeQuotaInfoProto info : cs.getTypeQuotaInfos().getTypeQuotaInfoList()) { StorageType type = PBHelper.convertStorageType(info.getType()); builder.typeConsumed(type, info.getConsumed()); builder.typeQuota(type, info.getQuota()); } } return builder.build(); }
Example #9
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
/** * Copy from {@code dnInfos} to a target of list of same size starting at * {@code startIdx}. */ public static List<? extends HdfsProtos.DatanodeInfoProto> convert( DatanodeInfo[] dnInfos, int startIdx) { if (dnInfos == null) return null; ArrayList<HdfsProtos.DatanodeInfoProto> protos = Lists .newArrayListWithCapacity(dnInfos.length); for (int i = startIdx; i < dnInfos.length; i++) { protos.add(convert(dnInfos[i])); } return protos; }
Example #10
Source File: TestPBHelper.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testChecksumTypeProto() { assertEquals(DataChecksum.Type.NULL, PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL)); assertEquals(DataChecksum.Type.CRC32, PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32)); assertEquals(DataChecksum.Type.CRC32C, PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C)); assertEquals(PBHelper.convert(DataChecksum.Type.NULL), HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL); assertEquals(PBHelper.convert(DataChecksum.Type.CRC32), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32); assertEquals(PBHelper.convert(DataChecksum.Type.CRC32C), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C); }
Example #11
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 5 votes |
static long getFileSize(FsImageProto.INodeSection.INodeFile f) { long size = 0; for (HdfsProtos.BlockProto p : f.getBlocksList()) { size += p.getNumBytes(); } return size; }
Example #12
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static FileEncryptionInfo convert( HdfsProtos.PerFileEncryptionInfoProto fileProto, CipherSuite suite, CryptoProtocolVersion version, String keyName) { if (fileProto == null || suite == null || version == null || keyName == null) { return null; } byte[] key = fileProto.getKey().toByteArray(); byte[] iv = fileProto.getIv().toByteArray(); String ezKeyVersionName = fileProto.getEzKeyVersionName(); return new FileEncryptionInfo(suite, version, key, iv, keyName, ezKeyVersionName); }
Example #13
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static FileEncryptionInfo convert( HdfsProtos.FileEncryptionInfoProto proto) { if (proto == null) { return null; } CipherSuite suite = convert(proto.getSuite()); CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion()); byte[] key = proto.getKey().toByteArray(); byte[] iv = proto.getIv().toByteArray(); String ezKeyVersionName = proto.getEzKeyVersionName(); String keyName = proto.getKeyName(); return new FileEncryptionInfo(suite, version, key, iv, keyName, ezKeyVersionName); }
Example #14
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static HdfsProtos.ZoneEncryptionInfoProto convert( CipherSuite suite, CryptoProtocolVersion version, String keyName) { if (suite == null || version == null || keyName == null) { return null; } return HdfsProtos.ZoneEncryptionInfoProto.newBuilder() .setSuite(convert(suite)) .setCryptoProtocolVersion(convert(version)) .setKeyName(keyName) .build(); }
Example #15
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo( FileEncryptionInfo info) { if (info == null) { return null; } return HdfsProtos.PerFileEncryptionInfoProto.newBuilder() .setKey(getByteString(info.getEncryptedDataEncryptionKey())) .setIv(getByteString(info.getIV())) .setEzKeyVersionName(info.getEzKeyVersionName()) .build(); }
Example #16
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static HdfsProtos.FileEncryptionInfoProto convert( FileEncryptionInfo info) { if (info == null) { return null; } return HdfsProtos.FileEncryptionInfoProto.newBuilder() .setSuite(convert(info.getCipherSuite())) .setCryptoProtocolVersion(convert(info.getCryptoProtocolVersion())) .setKey(getByteString(info.getEncryptedDataEncryptionKey())) .setIv(getByteString(info.getIV())) .setEzKeyVersionName(info.getEzKeyVersionName()) .setKeyName(info.getKeyName()) .build(); }
Example #17
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
/** * Copy from {@code dnInfos} to a target of list of same size starting at * {@code startIdx}. */ public static List<? extends HdfsProtos.DatanodeInfoProto> convert( DatanodeInfo[] dnInfos, int startIdx) { if (dnInfos == null) return null; ArrayList<HdfsProtos.DatanodeInfoProto> protos = Lists .newArrayListWithCapacity(dnInfos.length); for (int i = startIdx; i < dnInfos.length; i++) { protos.add(convert(dnInfos[i])); } return protos; }
Example #18
Source File: FSDirectory.java From hadoop with Apache License 2.0 | 5 votes |
/** * This method is always called with writeLock of FSDirectory held. */ public final void addToInodeMap(INode inode) { if (inode instanceof INodeWithAdditionalFields) { inodeMap.put(inode); if (!inode.isSymlink()) { final XAttrFeature xaf = inode.getXAttrFeature(); if (xaf != null) { final List<XAttr> xattrs = xaf.getXAttrs(); for (XAttr xattr : xattrs) { final String xaName = XAttrHelper.getPrefixName(xattr); if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) { try { final HdfsProtos.ZoneEncryptionInfoProto ezProto = HdfsProtos.ZoneEncryptionInfoProto.parseFrom( xattr.getValue()); ezManager.unprotectedAddEncryptionZone(inode.getId(), PBHelper.convert(ezProto.getSuite()), PBHelper.convert(ezProto.getCryptoProtocolVersion()), ezProto.getKeyName()); } catch (InvalidProtocolBufferException e) { NameNode.LOG.warn("Error parsing protocol buffer of " + "EZ XAttr " + xattr.getName()); } } } } } } }
Example #19
Source File: FSDirectory.java From big-c with Apache License 2.0 | 5 votes |
/** * This method is always called with writeLock of FSDirectory held. */ public final void addToInodeMap(INode inode) { if (inode instanceof INodeWithAdditionalFields) { inodeMap.put(inode); if (!inode.isSymlink()) { final XAttrFeature xaf = inode.getXAttrFeature(); if (xaf != null) { final List<XAttr> xattrs = xaf.getXAttrs(); for (XAttr xattr : xattrs) { final String xaName = XAttrHelper.getPrefixName(xattr); if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) { try { final HdfsProtos.ZoneEncryptionInfoProto ezProto = HdfsProtos.ZoneEncryptionInfoProto.parseFrom( xattr.getValue()); ezManager.unprotectedAddEncryptionZone(inode.getId(), PBHelper.convert(ezProto.getSuite()), PBHelper.convert(ezProto.getCryptoProtocolVersion()), ezProto.getKeyName()); } catch (InvalidProtocolBufferException e) { NameNode.LOG.warn("Error parsing protocol buffer of " + "EZ XAttr " + xattr.getName()); } } } } } } }
Example #20
Source File: TestPBHelper.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testChecksumTypeProto() { assertEquals(DataChecksum.Type.NULL, PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL)); assertEquals(DataChecksum.Type.CRC32, PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32)); assertEquals(DataChecksum.Type.CRC32C, PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C)); assertEquals(PBHelper.convert(DataChecksum.Type.NULL), HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL); assertEquals(PBHelper.convert(DataChecksum.Type.CRC32), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32); assertEquals(PBHelper.convert(DataChecksum.Type.CRC32C), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C); }
Example #21
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static HdfsProtos.FileEncryptionInfoProto convert( FileEncryptionInfo info) { if (info == null) { return null; } return HdfsProtos.FileEncryptionInfoProto.newBuilder() .setSuite(convert(info.getCipherSuite())) .setCryptoProtocolVersion(convert(info.getCryptoProtocolVersion())) .setKey(getByteString(info.getEncryptedDataEncryptionKey())) .setIv(getByteString(info.getIV())) .setEzKeyVersionName(info.getEzKeyVersionName()) .setKeyName(info.getKeyName()) .build(); }
Example #22
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo( FileEncryptionInfo info) { if (info == null) { return null; } return HdfsProtos.PerFileEncryptionInfoProto.newBuilder() .setKey(getByteString(info.getEncryptedDataEncryptionKey())) .setIv(getByteString(info.getIV())) .setEzKeyVersionName(info.getEzKeyVersionName()) .build(); }
Example #23
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
static long getFileSize(FsImageProto.INodeSection.INodeFile f) { long size = 0; for (HdfsProtos.BlockProto p : f.getBlocksList()) { size += p.getNumBytes(); } return size; }
Example #24
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static FileEncryptionInfo convert( HdfsProtos.PerFileEncryptionInfoProto fileProto, CipherSuite suite, CryptoProtocolVersion version, String keyName) { if (fileProto == null || suite == null || version == null || keyName == null) { return null; } byte[] key = fileProto.getKey().toByteArray(); byte[] iv = fileProto.getIv().toByteArray(); String ezKeyVersionName = fileProto.getEzKeyVersionName(); return new FileEncryptionInfo(suite, version, key, iv, keyName, ezKeyVersionName); }
Example #25
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static FileEncryptionInfo convert( HdfsProtos.FileEncryptionInfoProto proto) { if (proto == null) { return null; } CipherSuite suite = convert(proto.getSuite()); CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion()); byte[] key = proto.getKey().toByteArray(); byte[] iv = proto.getIv().toByteArray(); String ezKeyVersionName = proto.getEzKeyVersionName(); String keyName = proto.getKeyName(); return new FileEncryptionInfo(suite, version, key, iv, keyName, ezKeyVersionName); }
Example #26
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static HdfsProtos.ZoneEncryptionInfoProto convert( CipherSuite suite, CryptoProtocolVersion version, String keyName) { if (suite == null || version == null || keyName == null) { return null; } return HdfsProtos.ZoneEncryptionInfoProto.newBuilder() .setSuite(convert(suite)) .setCryptoProtocolVersion(convert(version)) .setKeyName(keyName) .build(); }
Example #27
Source File: PBHelper.java From hadoop with Apache License 2.0 | 4 votes |
public static DataChecksum.Type convert(HdfsProtos.ChecksumTypeProto type) { return DataChecksum.Type.valueOf(type.getNumber()); }
Example #28
Source File: FSDirectory.java From hadoop with Apache License 2.0 | 4 votes |
/** * This function combines the per-file encryption info (obtained * from the inode's XAttrs), and the encryption info from its zone, and * returns a consolidated FileEncryptionInfo instance. Null is returned * for non-encrypted files. * * @param inode inode of the file * @param snapshotId ID of the snapshot that * we want to get encryption info from * @param iip inodes in the path containing the file, passed in to * avoid obtaining the list of inodes again; if iip is * null then the list of inodes will be obtained again * @return consolidated file encryption info; null for non-encrypted files */ FileEncryptionInfo getFileEncryptionInfo(INode inode, int snapshotId, INodesInPath iip) throws IOException { if (!inode.isFile()) { return null; } readLock(); try { EncryptionZone encryptionZone = getEZForPath(iip); if (encryptionZone == null) { // not an encrypted file return null; } else if(encryptionZone.getPath() == null || encryptionZone.getPath().isEmpty()) { if (NameNode.LOG.isDebugEnabled()) { NameNode.LOG.debug("Encryption zone " + encryptionZone.getPath() + " does not have a valid path."); } } final CryptoProtocolVersion version = encryptionZone.getVersion(); final CipherSuite suite = encryptionZone.getSuite(); final String keyName = encryptionZone.getKeyName(); XAttr fileXAttr = FSDirXAttrOp.unprotectedGetXAttrByName(inode, snapshotId, CRYPTO_XATTR_FILE_ENCRYPTION_INFO); if (fileXAttr == null) { NameNode.LOG.warn("Could not find encryption XAttr for file " + iip.getPath() + " in encryption zone " + encryptionZone.getPath()); return null; } try { HdfsProtos.PerFileEncryptionInfoProto fileProto = HdfsProtos.PerFileEncryptionInfoProto.parseFrom( fileXAttr.getValue()); return PBHelper.convert(fileProto, suite, version, keyName); } catch (InvalidProtocolBufferException e) { throw new IOException("Could not parse file encryption info for " + "inode " + inode, e); } } finally { readUnlock(); } }
Example #29
Source File: PBHelper.java From hadoop with Apache License 2.0 | 4 votes |
public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) { return HdfsProtos.ChecksumTypeProto.valueOf(type.id); }
Example #30
Source File: PBHelper.java From big-c with Apache License 2.0 | 4 votes |
public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) { return HdfsProtos.ChecksumTypeProto.valueOf(type.id); }