Java Code Examples for org.apache.hadoop.util.DataChecksum#Type

The following examples show how to use org.apache.hadoop.util.DataChecksum#Type . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataTransferProtoUtil.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static DataChecksum fromProto(ChecksumProto proto) {
  if (proto == null) return null;

  int bytesPerChecksum = proto.getBytesPerChecksum();
  DataChecksum.Type type = PBHelper.convert(proto.getType());
  return DataChecksum.newDataChecksum(type, bytesPerChecksum);
}
 
Example 2
Source File: DFSClient.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private DataChecksum.Type getChecksumType(Configuration conf) {
  final String checksum = conf.get(
      DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY,
      DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
  try {
    return DataChecksum.Type.valueOf(checksum);
  } catch(IllegalArgumentException iae) {
    LOG.warn("Bad checksum type: " + checksum + ". Using default "
        + DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
    return DataChecksum.Type.valueOf(
        DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); 
  }
}
 
Example 3
Source File: DataTransferProtoUtil.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static DataChecksum fromProto(ChecksumProto proto) {
  if (proto == null) return null;

  int bytesPerChecksum = proto.getBytesPerChecksum();
  DataChecksum.Type type = PBHelper.convert(proto.getType());
  return DataChecksum.newDataChecksum(type, bytesPerChecksum);
}
 
Example 4
Source File: MD5MD5CRC32FileChecksum.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static DataChecksum.Type getCrcTypeFromAlgorithmName(String algorithm)
    throws IOException {
  if (algorithm.endsWith(DataChecksum.Type.CRC32.name())) {
    return DataChecksum.Type.CRC32;
  } else if (algorithm.endsWith(DataChecksum.Type.CRC32C.name())) {
    return DataChecksum.Type.CRC32C;
  }

  throw new IOException("Unknown checksum type in " + algorithm);
}
 
Example 5
Source File: Options.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * A helper method for processing user input and default value to 
 * create a combined checksum option. This is a bit complicated because
 * bytesPerChecksum is kept for backward compatibility.
 *
 * @param defaultOpt Default checksum option
 * @param userOpt User-specified checksum option. Ignored if null.
 * @param userBytesPerChecksum User-specified bytesPerChecksum
 *                Ignored if < 0.
 */
public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt, 
    ChecksumOpt userOpt, int userBytesPerChecksum) {
  final boolean useDefaultType;
  final DataChecksum.Type type;
  if (userOpt != null 
      && userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) {
    useDefaultType = false;
    type = userOpt.getChecksumType();
  } else {
    useDefaultType = true;
    type = defaultOpt.getChecksumType();
  }

  //  bytesPerChecksum - order of preference
  //    user specified value in bytesPerChecksum
  //    user specified value in checksumOpt
  //    default.
  if (userBytesPerChecksum > 0) {
    return new ChecksumOpt(type, userBytesPerChecksum);
  } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) {
    return !useDefaultType? userOpt
        : new ChecksumOpt(type, userOpt.getBytesPerChecksum());
  } else {
    return useDefaultType? defaultOpt
        : new ChecksumOpt(type, defaultOpt.getBytesPerChecksum());
  }
}
 
Example 6
Source File: FsServerDefaults.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public FsServerDefaults(long blockSize, int bytesPerChecksum,
    int writePacketSize, short replication, int fileBufferSize,
    boolean encryptDataTransfer, long trashInterval,
    DataChecksum.Type checksumType) {
  this.blockSize = blockSize;
  this.bytesPerChecksum = bytesPerChecksum;
  this.writePacketSize = writePacketSize;
  this.replication = replication;
  this.fileBufferSize = fileBufferSize;
  this.encryptDataTransfer = encryptDataTransfer;
  this.trashInterval = trashInterval;
  this.checksumType = checksumType;
}
 
Example 7
Source File: DFSClient.java    From big-c with Apache License 2.0 4 votes vote down vote up
private ChecksumOpt getChecksumOptFromConf(Configuration conf) {
  DataChecksum.Type type = getChecksumType(conf);
  int bytesPerChecksum = conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY,
      DFS_BYTES_PER_CHECKSUM_DEFAULT);
  return new ChecksumOpt(type, bytesPerChecksum);
}
 
Example 8
Source File: JsonUtil.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/** Convert a Json map to a MD5MD5CRC32FileChecksum. */
public static MD5MD5CRC32FileChecksum toMD5MD5CRC32FileChecksum(
    final Map<?, ?> json) throws IOException {
  if (json == null) {
    return null;
  }

  final Map<?, ?> m = (Map<?, ?>)json.get(FileChecksum.class.getSimpleName());
  final String algorithm = (String)m.get("algorithm");
  final int length = ((Number) m.get("length")).intValue();
  final byte[] bytes = StringUtils.hexStringToByte((String)m.get("bytes"));

  final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
  final DataChecksum.Type crcType = 
      MD5MD5CRC32FileChecksum.getCrcTypeFromAlgorithmName(algorithm);
  final MD5MD5CRC32FileChecksum checksum;

  // Recreate what DFSClient would have returned.
  switch(crcType) {
    case CRC32:
      checksum = new MD5MD5CRC32GzipFileChecksum();
      break;
    case CRC32C:
      checksum = new MD5MD5CRC32CastagnoliFileChecksum();
      break;
    default:
      throw new IOException("Unknown algorithm: " + algorithm);
  }
  checksum.readFields(in);

  //check algorithm name
  if (!checksum.getAlgorithmName().equals(algorithm)) {
    throw new IOException("Algorithm not matched. Expected " + algorithm
        + ", Received " + checksum.getAlgorithmName());
  }
  //check length
  if (length != checksum.getLength()) {
    throw new IOException("Length not matched: length=" + length
        + ", checksum.getLength()=" + checksum.getLength());
  }

  return checksum;
}
 
Example 9
Source File: JsonUtil.java    From big-c with Apache License 2.0 4 votes vote down vote up
/** Convert a Json map to a MD5MD5CRC32FileChecksum. */
public static MD5MD5CRC32FileChecksum toMD5MD5CRC32FileChecksum(
    final Map<?, ?> json) throws IOException {
  if (json == null) {
    return null;
  }

  final Map<?, ?> m = (Map<?, ?>)json.get(FileChecksum.class.getSimpleName());
  final String algorithm = (String)m.get("algorithm");
  final int length = ((Number) m.get("length")).intValue();
  final byte[] bytes = StringUtils.hexStringToByte((String)m.get("bytes"));

  final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
  final DataChecksum.Type crcType = 
      MD5MD5CRC32FileChecksum.getCrcTypeFromAlgorithmName(algorithm);
  final MD5MD5CRC32FileChecksum checksum;

  // Recreate what DFSClient would have returned.
  switch(crcType) {
    case CRC32:
      checksum = new MD5MD5CRC32GzipFileChecksum();
      break;
    case CRC32C:
      checksum = new MD5MD5CRC32CastagnoliFileChecksum();
      break;
    default:
      throw new IOException("Unknown algorithm: " + algorithm);
  }
  checksum.readFields(in);

  //check algorithm name
  if (!checksum.getAlgorithmName().equals(algorithm)) {
    throw new IOException("Algorithm not matched. Expected " + algorithm
        + ", Received " + checksum.getAlgorithmName());
  }
  //check length
  if (length != checksum.getLength()) {
    throw new IOException("Length not matched: length=" + length
        + ", checksum.getLength()=" + checksum.getLength());
  }

  return checksum;
}
 
Example 10
Source File: Options.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public DataChecksum.Type getChecksumType() {
  return checksumType;
}
 
Example 11
Source File: Options.java    From big-c with Apache License 2.0 4 votes vote down vote up
public DataChecksum.Type getChecksumType() {
  return checksumType;
}
 
Example 12
Source File: MD5MD5CRC32FileChecksum.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/** Return the object represented in the attributes. */
public static MD5MD5CRC32FileChecksum valueOf(Attributes attrs
    ) throws SAXException {
  final String bytesPerCRC = attrs.getValue("bytesPerCRC");
  final String crcPerBlock = attrs.getValue("crcPerBlock");
  final String md5 = attrs.getValue("md5");
  String crcType = attrs.getValue("crcType");
  DataChecksum.Type finalCrcType;
  if (bytesPerCRC == null || crcPerBlock == null || md5 == null) {
    return null;
  }

  try {
    // old versions don't support crcType.
    if (crcType == null || crcType == "") {
      finalCrcType = DataChecksum.Type.CRC32;
    } else {
      finalCrcType = DataChecksum.Type.valueOf(crcType);
    }

    switch (finalCrcType) {
      case CRC32:
        return new MD5MD5CRC32GzipFileChecksum(
            Integer.parseInt(bytesPerCRC),
            Integer.parseInt(crcPerBlock),
            new MD5Hash(md5));
      case CRC32C:
        return new MD5MD5CRC32CastagnoliFileChecksum(
            Integer.parseInt(bytesPerCRC),
            Integer.parseInt(crcPerBlock),
            new MD5Hash(md5));
      default:
        // we should never get here since finalCrcType will
        // hold a valid type or we should have got an exception.
        return null;
    }
  } catch (Exception e) {
    throw new SAXException("Invalid attributes: bytesPerCRC=" + bytesPerCRC
        + ", crcPerBlock=" + crcPerBlock + ", crcType=" + crcType
        + ", md5=" + md5, e);
  }
}
 
Example 13
Source File: FsServerDefaults.java    From big-c with Apache License 2.0 4 votes vote down vote up
public DataChecksum.Type getChecksumType() {
  return checksumType;
}
 
Example 14
Source File: FsServerDefaults.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public DataChecksum.Type getChecksumType() {
  return checksumType;
}
 
Example 15
Source File: MD5MD5CRC32CastagnoliFileChecksum.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Override
public DataChecksum.Type getCrcType() {
  // default to the one that is understood by all releases.
  return DataChecksum.Type.CRC32C;
}
 
Example 16
Source File: MD5MD5CRC32GzipFileChecksum.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Override
public DataChecksum.Type getCrcType() {
  // default to the one that is understood by all releases.
  return DataChecksum.Type.CRC32;
}
 
Example 17
Source File: MD5MD5CRC32FileChecksum.java    From big-c with Apache License 2.0 4 votes vote down vote up
/** Return the object represented in the attributes. */
public static MD5MD5CRC32FileChecksum valueOf(Attributes attrs
    ) throws SAXException {
  final String bytesPerCRC = attrs.getValue("bytesPerCRC");
  final String crcPerBlock = attrs.getValue("crcPerBlock");
  final String md5 = attrs.getValue("md5");
  String crcType = attrs.getValue("crcType");
  DataChecksum.Type finalCrcType;
  if (bytesPerCRC == null || crcPerBlock == null || md5 == null) {
    return null;
  }

  try {
    // old versions don't support crcType.
    if (crcType == null || crcType == "") {
      finalCrcType = DataChecksum.Type.CRC32;
    } else {
      finalCrcType = DataChecksum.Type.valueOf(crcType);
    }

    switch (finalCrcType) {
      case CRC32:
        return new MD5MD5CRC32GzipFileChecksum(
            Integer.parseInt(bytesPerCRC),
            Integer.parseInt(crcPerBlock),
            new MD5Hash(md5));
      case CRC32C:
        return new MD5MD5CRC32CastagnoliFileChecksum(
            Integer.parseInt(bytesPerCRC),
            Integer.parseInt(crcPerBlock),
            new MD5Hash(md5));
      default:
        // we should never get here since finalCrcType will
        // hold a valid type or we should have got an exception.
        return null;
    }
  } catch (Exception e) {
    throw new SAXException("Invalid attributes: bytesPerCRC=" + bytesPerCRC
        + ", crcPerBlock=" + crcPerBlock + ", crcType=" + crcType
        + ", md5=" + md5, e);
  }
}
 
Example 18
Source File: PBHelper.java    From big-c with Apache License 2.0 4 votes vote down vote up
public static DataChecksum.Type convert(HdfsProtos.ChecksumTypeProto type) {
  return DataChecksum.Type.valueOf(type.getNumber());
}
 
Example 19
Source File: DFSClient.java    From hadoop with Apache License 2.0 4 votes vote down vote up
private ChecksumOpt getChecksumOptFromConf(Configuration conf) {
  DataChecksum.Type type = getChecksumType(conf);
  int bytesPerChecksum = conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY,
      DFS_BYTES_PER_CHECKSUM_DEFAULT);
  return new ChecksumOpt(type, bytesPerChecksum);
}
 
Example 20
Source File: Options.java    From big-c with Apache License 2.0 2 votes vote down vote up
/**
 * Normal ctor
 * @param type checksum type
 * @param size bytes per checksum
 */
public ChecksumOpt(DataChecksum.Type type, int size) {
  checksumType = type;
  bytesPerChecksum = size;
}