Java Code Examples for org.apache.hadoop.util.DataChecksum#writeValue()

The following examples show how to use org.apache.hadoop.util.DataChecksum#writeValue() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataXceiver.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private MD5Hash calcPartialBlockChecksum(ExtendedBlock block,
    long requestLength, DataChecksum checksum, DataInputStream checksumIn)
    throws IOException {
  final int bytesPerCRC = checksum.getBytesPerChecksum();
  final int csize = checksum.getChecksumSize();
  final byte[] buffer = new byte[4*1024];
  MessageDigest digester = MD5Hash.getDigester();

  long remaining = requestLength / bytesPerCRC * csize;
  for (int toDigest = 0; remaining > 0; remaining -= toDigest) {
    toDigest = checksumIn.read(buffer, 0,
        (int) Math.min(remaining, buffer.length));
    if (toDigest < 0) {
      break;
    }
    digester.update(buffer, 0, toDigest);
  }
  
  int partialLength = (int) (requestLength % bytesPerCRC);
  if (partialLength > 0) {
    byte[] buf = new byte[partialLength];
    final InputStream blockIn = datanode.data.getBlockInputStream(block,
        requestLength - partialLength);
    try {
      // Get the CRC of the partialLength.
      IOUtils.readFully(blockIn, buf, 0, partialLength);
    } finally {
      IOUtils.closeStream(blockIn);
    }
    checksum.update(buf, 0, partialLength);
    byte[] partialCrc = new byte[csize];
    checksum.writeValue(partialCrc, 0, true);
    digester.update(partialCrc);
  }
  return new MD5Hash(digester.digest());
}
 
Example 2
Source File: DataXceiver.java    From big-c with Apache License 2.0 5 votes vote down vote up
private MD5Hash calcPartialBlockChecksum(ExtendedBlock block,
    long requestLength, DataChecksum checksum, DataInputStream checksumIn)
    throws IOException {
  final int bytesPerCRC = checksum.getBytesPerChecksum();
  final int csize = checksum.getChecksumSize();
  final byte[] buffer = new byte[4*1024];
  MessageDigest digester = MD5Hash.getDigester();

  long remaining = requestLength / bytesPerCRC * csize;
  for (int toDigest = 0; remaining > 0; remaining -= toDigest) {
    toDigest = checksumIn.read(buffer, 0,
        (int) Math.min(remaining, buffer.length));
    if (toDigest < 0) {
      break;
    }
    digester.update(buffer, 0, toDigest);
  }
  
  int partialLength = (int) (requestLength % bytesPerCRC);
  if (partialLength > 0) {
    byte[] buf = new byte[partialLength];
    final InputStream blockIn = datanode.data.getBlockInputStream(block,
        requestLength - partialLength);
    try {
      // Get the CRC of the partialLength.
      IOUtils.readFully(blockIn, buf, 0, partialLength);
    } finally {
      IOUtils.closeStream(blockIn);
    }
    checksum.update(buf, 0, partialLength);
    byte[] partialCrc = new byte[csize];
    checksum.writeValue(partialCrc, 0, true);
    digester.update(partialCrc);
  }
  return new MD5Hash(digester.digest());
}
 
Example 3
Source File: FsDatasetImpl.java    From hadoop with Apache License 2.0 4 votes vote down vote up
static private void truncateBlock(File blockFile, File metaFile,
    long oldlen, long newlen) throws IOException {
  LOG.info("truncateBlock: blockFile=" + blockFile
      + ", metaFile=" + metaFile
      + ", oldlen=" + oldlen
      + ", newlen=" + newlen);

  if (newlen == oldlen) {
    return;
  }
  if (newlen > oldlen) {
    throw new IOException("Cannot truncate block to from oldlen (=" + oldlen
        + ") to newlen (=" + newlen + ")");
  }

  DataChecksum dcs = BlockMetadataHeader.readHeader(metaFile).getChecksum(); 
  int checksumsize = dcs.getChecksumSize();
  int bpc = dcs.getBytesPerChecksum();
  long n = (newlen - 1)/bpc + 1;
  long newmetalen = BlockMetadataHeader.getHeaderSize() + n*checksumsize;
  long lastchunkoffset = (n - 1)*bpc;
  int lastchunksize = (int)(newlen - lastchunkoffset); 
  byte[] b = new byte[Math.max(lastchunksize, checksumsize)]; 

  RandomAccessFile blockRAF = new RandomAccessFile(blockFile, "rw");
  try {
    //truncate blockFile 
    blockRAF.setLength(newlen);
 
    //read last chunk
    blockRAF.seek(lastchunkoffset);
    blockRAF.readFully(b, 0, lastchunksize);
  } finally {
    blockRAF.close();
  }

  //compute checksum
  dcs.update(b, 0, lastchunksize);
  dcs.writeValue(b, 0, false);

  //update metaFile 
  RandomAccessFile metaRAF = new RandomAccessFile(metaFile, "rw");
  try {
    metaRAF.setLength(newmetalen);
    metaRAF.seek(newmetalen - checksumsize);
    metaRAF.write(b, 0, checksumsize);
  } finally {
    metaRAF.close();
  }
}
 
Example 4
Source File: FsDatasetImpl.java    From big-c with Apache License 2.0 4 votes vote down vote up
static private void truncateBlock(File blockFile, File metaFile,
    long oldlen, long newlen) throws IOException {
  LOG.info("truncateBlock: blockFile=" + blockFile
      + ", metaFile=" + metaFile
      + ", oldlen=" + oldlen
      + ", newlen=" + newlen);

  if (newlen == oldlen) {
    return;
  }
  if (newlen > oldlen) {
    throw new IOException("Cannot truncate block to from oldlen (=" + oldlen
        + ") to newlen (=" + newlen + ")");
  }

  DataChecksum dcs = BlockMetadataHeader.readHeader(metaFile).getChecksum(); 
  int checksumsize = dcs.getChecksumSize();
  int bpc = dcs.getBytesPerChecksum();
  long n = (newlen - 1)/bpc + 1;
  long newmetalen = BlockMetadataHeader.getHeaderSize() + n*checksumsize;
  long lastchunkoffset = (n - 1)*bpc;
  int lastchunksize = (int)(newlen - lastchunkoffset); 
  byte[] b = new byte[Math.max(lastchunksize, checksumsize)]; 

  RandomAccessFile blockRAF = new RandomAccessFile(blockFile, "rw");
  try {
    //truncate blockFile 
    blockRAF.setLength(newlen);
 
    //read last chunk
    blockRAF.seek(lastchunkoffset);
    blockRAF.readFully(b, 0, lastchunksize);
  } finally {
    blockRAF.close();
  }

  //compute checksum
  dcs.update(b, 0, lastchunksize);
  dcs.writeValue(b, 0, false);

  //update metaFile 
  RandomAccessFile metaRAF = new RandomAccessFile(metaFile, "rw");
  try {
    metaRAF.setLength(newmetalen);
    metaRAF.seek(newmetalen - checksumsize);
    metaRAF.write(b, 0, checksumsize);
  } finally {
    metaRAF.close();
  }
}
 
Example 5
Source File: FSDataset.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
static private void truncateBlock(File blockFile, File metaFile,
    long oldlen, long newlen) throws IOException {
  if (newlen == oldlen) {
    return;
  }
  if (newlen > oldlen) {
    throw new IOException("Cannout truncate block to from oldlen (=" + oldlen
        + ") to newlen (=" + newlen + ")");
  }

  DataChecksum dcs = BlockMetadataHeader.readHeader(metaFile).getChecksum(); 
  int checksumsize = dcs.getChecksumSize();
  int bpc = dcs.getBytesPerChecksum();
  long n = (newlen - 1)/bpc + 1;
  long newmetalen = BlockMetadataHeader.getHeaderSize() + n*checksumsize;
  long lastchunkoffset = (n - 1)*bpc;
  int lastchunksize = (int)(newlen - lastchunkoffset); 
  byte[] b = new byte[Math.max(lastchunksize, checksumsize)]; 

  RandomAccessFile blockRAF = new RandomAccessFile(blockFile, "rw");
  try {
    //truncate blockFile 
    blockRAF.setLength(newlen);
 
    //read last chunk
    blockRAF.seek(lastchunkoffset);
    blockRAF.readFully(b, 0, lastchunksize);
  } finally {
    blockRAF.close();
  }

  //compute checksum
  dcs.update(b, 0, lastchunksize);
  dcs.writeValue(b, 0, false);

  //update metaFile 
  RandomAccessFile metaRAF = new RandomAccessFile(metaFile, "rw");
  try {
    metaRAF.setLength(newmetalen);
    metaRAF.seek(newmetalen - checksumsize);
    metaRAF.write(b, 0, checksumsize);
  } finally {
    metaRAF.close();
  }
}