org.apache.hadoop.io.MD5Hash Java Examples

The following examples show how to use org.apache.hadoop.io.MD5Hash. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MD5MD5CRC32FileChecksum.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/** Return the object represented in the attributes. */
public static MD5MD5CRC32FileChecksum valueOf(Attributes attrs
    ) throws SAXException {
  final String bytesPerCRC = attrs.getValue("bytesPerCRC");
  final String crcPerBlock = attrs.getValue("crcPerBlock");
  final String md5 = attrs.getValue("md5");
  if (bytesPerCRC == null || crcPerBlock == null || md5 == null) {
    return null;
  }

  try {
    return new MD5MD5CRC32FileChecksum(Integer.valueOf(bytesPerCRC),
        Integer.valueOf(crcPerBlock), new MD5Hash(md5));
  } catch(Exception e) {
    throw new SAXException("Invalid attributes: bytesPerCRC=" + bytesPerCRC
        + ", crcPerBlock=" + crcPerBlock + ", md5=" + md5, e);
  }
}
 
Example #2
Source File: TestStartup.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Corrupts the MD5 sum of the fsimage.
 * 
 * @param corruptAll
 *          whether to corrupt one or all of the MD5 sums in the configured
 *          namedirs
 * @throws IOException
 */
private void corruptFSImageMD5(boolean corruptAll) throws IOException {
  List<URI> nameDirs = (List<URI>)FSNamesystem.getNamespaceDirs(config);
  // Corrupt the md5 files in all the namedirs
  for (URI uri: nameDirs) {
    // Directory layout looks like:
    // test/data/dfs/nameN/current/{fsimage,edits,...}
    File nameDir = new File(uri.getPath());
    File dfsDir = nameDir.getParentFile();
    assertEquals(dfsDir.getName(), "dfs"); // make sure we got right dir
    // Set the md5 file to all zeros
    File imageFile = new File(nameDir,
        Storage.STORAGE_DIR_CURRENT + "/"
        + NNStorage.getImageFileName(0));
    MD5FileUtils.saveMD5File(imageFile, new MD5Hash(new byte[16]));
    // Only need to corrupt one if !corruptAll
    if (!corruptAll) {
      break;
    }
  }
}
 
Example #3
Source File: FSImage.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Load in the filesystem image from file. It's a big list of
 * filenames and blocks.
 */
private void loadFSImage(File curFile, MD5Hash expectedMd5,
    FSNamesystem target, MetaRecoveryContext recovery,
    boolean requireSameLayoutVersion) throws IOException {
  // BlockPoolId is required when the FsImageLoader loads the rolling upgrade
  // information. Make sure the ID is properly set.
  target.setBlockPoolId(this.getBlockPoolID());

  FSImageFormat.LoaderDelegator loader = FSImageFormat.newLoader(conf, target);
  loader.load(curFile, requireSameLayoutVersion);

  // Check that the image digest we loaded matches up with what
  // we expected
  MD5Hash readImageMd5 = loader.getLoadedImageMd5();
  if (expectedMd5 != null &&
      !expectedMd5.equals(readImageMd5)) {
    throw new IOException("Image file " + curFile +
        " is corrupt with MD5 checksum of " + readImageMd5 +
        " but expecting " + expectedMd5);
  }

  long txId = loader.getLoadedImageTxId();
  LOG.info("Loaded image for txid " + txId + " from " + curFile);
  lastAppliedTxId = txId;
  storage.setMostRecentCheckpointInfo(txId, curFile.lastModified());
}
 
Example #4
Source File: TransferFsImage.java    From big-c with Apache License 2.0 6 votes vote down vote up
static MD5Hash handleUploadImageRequest(HttpServletRequest request,
    long imageTxId, Storage dstStorage, InputStream stream,
    long advertisedSize, DataTransferThrottler throttler) throws IOException {

  String fileName = NNStorage.getCheckpointImageFileName(imageTxId);

  List<File> dstFiles = dstStorage.getFiles(NameNodeDirType.IMAGE, fileName);
  if (dstFiles.isEmpty()) {
    throw new IOException("No targets in destination storage!");
  }

  MD5Hash advertisedDigest = parseMD5Header(request);
  MD5Hash hash = receiveFile(fileName, dstFiles, dstStorage, true,
      advertisedSize, advertisedDigest, fileName, stream, throttler);
  LOG.info("Downloaded file " + dstFiles.get(0).getName() + " size "
      + dstFiles.get(0).length() + " bytes.");
  return hash;
}
 
Example #5
Source File: TransferFsImage.java    From big-c with Apache License 2.0 6 votes vote down vote up
public static MD5Hash downloadImageToStorage(URL fsName, long imageTxId,
    Storage dstStorage, boolean needDigest) throws IOException {
  String fileid = ImageServlet.getParamStringForImage(null,
      imageTxId, dstStorage);
  String fileName = NNStorage.getCheckpointImageFileName(imageTxId);
  
  List<File> dstFiles = dstStorage.getFiles(
      NameNodeDirType.IMAGE, fileName);
  if (dstFiles.isEmpty()) {
    throw new IOException("No targets in destination storage!");
  }
  
  MD5Hash hash = getFileClient(fsName, fileid, dstFiles, dstStorage, needDigest);
  LOG.info("Downloaded file " + dstFiles.get(0).getName() + " size " +
      dstFiles.get(0).length() + " bytes.");
  return hash;
}
 
Example #6
Source File: TestStartup.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Corrupts the MD5 sum of the fsimage.
 * 
 * @param corruptAll
 *          whether to corrupt one or all of the MD5 sums in the configured
 *          namedirs
 * @throws IOException
 */
private void corruptFSImageMD5(boolean corruptAll) throws IOException {
  List<URI> nameDirs = (List<URI>)FSNamesystem.getNamespaceDirs(config);
  // Corrupt the md5 files in all the namedirs
  for (URI uri: nameDirs) {
    // Directory layout looks like:
    // test/data/dfs/nameN/current/{fsimage,edits,...}
    File nameDir = new File(uri.getPath());
    File dfsDir = nameDir.getParentFile();
    assertEquals(dfsDir.getName(), "dfs"); // make sure we got right dir
    // Set the md5 file to all zeros
    File imageFile = new File(nameDir,
        Storage.STORAGE_DIR_CURRENT + "/"
        + NNStorage.getImageFileName(0));
    MD5FileUtils.saveMD5File(imageFile, new MD5Hash(new byte[16]));
    // Only need to corrupt one if !corruptAll
    if (!corruptAll) {
      break;
    }
  }
}
 
Example #7
Source File: MD5FileUtils.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Read the md5 checksum stored alongside the given data file.
 * @param dataFile the file containing data
 * @return the checksum stored in dataFile.md5
 */
public static MD5Hash readStoredMd5ForFile(File dataFile) throws IOException {
  final File md5File = getDigestFileForFile(dataFile);
  if (!md5File.exists()) {
    return null;
  }

  final Matcher matcher = readStoredMd5(md5File);
  String storedHash = matcher.group(1);
  File referencedFile = new File(matcher.group(2));

  // Sanity check: Make sure that the file referenced in the .md5 file at
  // least has the same name as the file we expect
  if (!referencedFile.getName().equals(dataFile.getName())) {
    throw new IOException(
        "MD5 file at " + md5File + " references file named " +
        referencedFile.getName() + " but we expected it to reference " +
        dataFile);
  }
  return new MD5Hash(storedHash);
}
 
Example #8
Source File: FSImage.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Load in the filesystem image from file. It's a big list of
 * filenames and blocks.
 */
private void loadFSImage(File curFile, MD5Hash expectedMd5,
    FSNamesystem target, MetaRecoveryContext recovery,
    boolean requireSameLayoutVersion) throws IOException {
  // BlockPoolId is required when the FsImageLoader loads the rolling upgrade
  // information. Make sure the ID is properly set.
  target.setBlockPoolId(this.getBlockPoolID());

  FSImageFormat.LoaderDelegator loader = FSImageFormat.newLoader(conf, target);
  loader.load(curFile, requireSameLayoutVersion);

  // Check that the image digest we loaded matches up with what
  // we expected
  MD5Hash readImageMd5 = loader.getLoadedImageMd5();
  if (expectedMd5 != null &&
      !expectedMd5.equals(readImageMd5)) {
    throw new IOException("Image file " + curFile +
        " is corrupt with MD5 checksum of " + readImageMd5 +
        " but expecting " + expectedMd5);
  }

  long txId = loader.getLoadedImageTxId();
  LOG.info("Loaded image for txid " + txId + " from " + curFile);
  lastAppliedTxId = txId;
  storage.setMostRecentCheckpointInfo(txId, curFile.lastModified());
}
 
Example #9
Source File: MD5MD5CRC32FileChecksum.java    From hadoop-gpu with Apache License 2.0 6 votes vote down vote up
/** Return the object represented in the attributes. */
public static MD5MD5CRC32FileChecksum valueOf(Attributes attrs
    ) throws SAXException {
  final String bytesPerCRC = attrs.getValue("bytesPerCRC");
  final String crcPerBlock = attrs.getValue("crcPerBlock");
  final String md5 = attrs.getValue("md5");
  if (bytesPerCRC == null || crcPerBlock == null || md5 == null) {
    return null;
  }

  try {
    return new MD5MD5CRC32FileChecksum(Integer.valueOf(bytesPerCRC),
        Integer.valueOf(crcPerBlock), new MD5Hash(md5));
  } catch(Exception e) {
    throw new SAXException("Invalid attributes: bytesPerCRC=" + bytesPerCRC
        + ", crcPerBlock=" + crcPerBlock + ", md5=" + md5, e);
  }
}
 
Example #10
Source File: TransferFsImage.java    From hadoop with Apache License 2.0 6 votes vote down vote up
static MD5Hash handleUploadImageRequest(HttpServletRequest request,
    long imageTxId, Storage dstStorage, InputStream stream,
    long advertisedSize, DataTransferThrottler throttler) throws IOException {

  String fileName = NNStorage.getCheckpointImageFileName(imageTxId);

  List<File> dstFiles = dstStorage.getFiles(NameNodeDirType.IMAGE, fileName);
  if (dstFiles.isEmpty()) {
    throw new IOException("No targets in destination storage!");
  }

  MD5Hash advertisedDigest = parseMD5Header(request);
  MD5Hash hash = receiveFile(fileName, dstFiles, dstStorage, true,
      advertisedSize, advertisedDigest, fileName, stream, throttler);
  LOG.info("Downloaded file " + dstFiles.get(0).getName() + " size "
      + dstFiles.get(0).length() + " bytes.");
  return hash;
}
 
Example #11
Source File: TransferFsImage.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public static MD5Hash downloadImageToStorage(URL fsName, long imageTxId,
    Storage dstStorage, boolean needDigest) throws IOException {
  String fileid = ImageServlet.getParamStringForImage(null,
      imageTxId, dstStorage);
  String fileName = NNStorage.getCheckpointImageFileName(imageTxId);
  
  List<File> dstFiles = dstStorage.getFiles(
      NameNodeDirType.IMAGE, fileName);
  if (dstFiles.isEmpty()) {
    throw new IOException("No targets in destination storage!");
  }
  
  MD5Hash hash = getFileClient(fsName, fileid, dstFiles, dstStorage, needDigest);
  LOG.info("Downloaded file " + dstFiles.get(0).getName() + " size " +
      dstFiles.get(0).length() + " bytes.");
  return hash;
}
 
Example #12
Source File: FSImage.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * Write last checkpoint time and version file into the storage directory.
 * 
 * The version file should always be written last.
 * Missing or corrupted version file indicates that 
 * the checkpoint is not valid.
 * 
 * @param sd storage directory
 * @throws IOException
 */
protected void setFields(Properties props, 
                         StorageDirectory sd 
                         ) throws IOException {
  super.setFields(props, sd);
  boolean uState = getDistributedUpgradeState();
  int uVersion = getDistributedUpgradeVersion();
  if(uState && uVersion != getLayoutVersion()) {
    props.setProperty("distributedUpgradeState", Boolean.toString(uState));
    props.setProperty("distributedUpgradeVersion", Integer.toString(uVersion)); 
  }
  if (this.newImageDigest) {
    this.setImageDigest(MD5Hash.digest(
        new FileInputStream(getImageFile(sd, NameNodeFile.IMAGE))));
  }
  props.setProperty(MESSAGE_DIGEST_PROPERTY, 
      this.getImageDigest().toString());
  writeCheckpointTime(sd);
}
 
Example #13
Source File: ExecutionSummarizer.java    From hadoop with Apache License 2.0 5 votes vote down vote up
protected static String getTraceSignature(String input) throws IOException {
  Path inputPath = new Path(input);
  FileSystem fs = inputPath.getFileSystem(new Configuration());
  FileStatus status = fs.getFileStatus(inputPath);
  Path qPath = fs.makeQualified(status.getPath());
  String traceID = status.getModificationTime() + qPath.toString()
                   + status.getOwner() + status.getLen();
  return MD5Hash.digest(traceID).toString();
}
 
Example #14
Source File: ImageServlet.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Set headers for content length, and, if available, md5.
 * @throws IOException 
 */
public static void setVerificationHeadersForGet(HttpServletResponse response,
    File file) throws IOException {
  response.setHeader(TransferFsImage.CONTENT_LENGTH,
      String.valueOf(file.length()));
  MD5Hash hash = MD5FileUtils.readStoredMd5ForFile(file);
  if (hash != null) {
    response.setHeader(TransferFsImage.MD5_HEADER, hash.toString());
  }
}
 
Example #15
Source File: TestFetchImage.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Run `hdfs dfsadmin -fetchImage ...' and verify that the downloaded image is
 * correct.
 */
private static void runFetchImage(DFSAdmin dfsAdmin, MiniDFSCluster cluster)
    throws Exception {
  int retVal = dfsAdmin.run(new String[]{"-fetchImage",
      FETCHED_IMAGE_FILE.getPath() });
  
  assertEquals(0, retVal);
  
  File highestImageOnNn = getHighestFsImageOnCluster(cluster);
  MD5Hash expected = MD5FileUtils.computeMd5ForFile(highestImageOnNn);
  MD5Hash actual = MD5FileUtils.computeMd5ForFile(
      new File(FETCHED_IMAGE_FILE, highestImageOnNn.getName()));
  
  assertEquals(expected, actual);
}
 
Example #16
Source File: TestMD5FileUtils.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Test when .md5 file exists but incorrect checksum
 */
@Test
public void testVerifyMD5FileBadDigest() throws Exception {
  MD5FileUtils.saveMD5File(TEST_FILE, MD5Hash.digest(new byte[0]));
  try {
    MD5FileUtils.verifySavedMD5(TEST_FILE, TEST_MD5);
    fail("Did not throw");
  } catch (IOException ioe) {
    // Expected
  }
}
 
Example #17
Source File: TestMD5FileUtils.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Test when .md5 file exists but incorrect checksum
 */
@Test
public void testVerifyMD5FileBadDigest() throws Exception {
  MD5FileUtils.saveMD5File(TEST_FILE, MD5Hash.digest(new byte[0]));
  try {
    MD5FileUtils.verifySavedMD5(TEST_FILE, TEST_MD5);
    fail("Did not throw");
  } catch (IOException ioe) {
    // Expected
  }
}
 
Example #18
Source File: TestFetchImage.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Run `hdfs dfsadmin -fetchImage ...' and verify that the downloaded image is
 * correct.
 */
private static void runFetchImage(DFSAdmin dfsAdmin, MiniDFSCluster cluster)
    throws Exception {
  int retVal = dfsAdmin.run(new String[]{"-fetchImage",
      FETCHED_IMAGE_FILE.getPath() });
  
  assertEquals(0, retVal);
  
  File highestImageOnNn = getHighestFsImageOnCluster(cluster);
  MD5Hash expected = MD5FileUtils.computeMd5ForFile(highestImageOnNn);
  MD5Hash actual = MD5FileUtils.computeMd5ForFile(
      new File(FETCHED_IMAGE_FILE, highestImageOnNn.getName()));
  
  assertEquals(expected, actual);
}
 
Example #19
Source File: ImageServlet.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Set headers for image length and if available, md5.
 * 
 * @throws IOException
 */
static void setVerificationHeadersForPut(HttpURLConnection connection,
    File file) throws IOException {
  connection.setRequestProperty(TransferFsImage.CONTENT_LENGTH,
      String.valueOf(file.length()));
  MD5Hash hash = MD5FileUtils.readStoredMd5ForFile(file);
  if (hash != null) {
    connection
        .setRequestProperty(TransferFsImage.MD5_HEADER, hash.toString());
  }
}
 
Example #20
Source File: DataXceiver.java    From hadoop with Apache License 2.0 5 votes vote down vote up
private MD5Hash calcPartialBlockChecksum(ExtendedBlock block,
    long requestLength, DataChecksum checksum, DataInputStream checksumIn)
    throws IOException {
  final int bytesPerCRC = checksum.getBytesPerChecksum();
  final int csize = checksum.getChecksumSize();
  final byte[] buffer = new byte[4*1024];
  MessageDigest digester = MD5Hash.getDigester();

  long remaining = requestLength / bytesPerCRC * csize;
  for (int toDigest = 0; remaining > 0; remaining -= toDigest) {
    toDigest = checksumIn.read(buffer, 0,
        (int) Math.min(remaining, buffer.length));
    if (toDigest < 0) {
      break;
    }
    digester.update(buffer, 0, toDigest);
  }
  
  int partialLength = (int) (requestLength % bytesPerCRC);
  if (partialLength > 0) {
    byte[] buf = new byte[partialLength];
    final InputStream blockIn = datanode.data.getBlockInputStream(block,
        requestLength - partialLength);
    try {
      // Get the CRC of the partialLength.
      IOUtils.readFully(blockIn, buf, 0, partialLength);
    } finally {
      IOUtils.closeStream(blockIn);
    }
    checksum.update(buf, 0, partialLength);
    byte[] partialCrc = new byte[csize];
    checksum.writeValue(partialCrc, 0, true);
    digester.update(partialCrc);
  }
  return new MD5Hash(digester.digest());
}
 
Example #21
Source File: FSImage.java    From RDFS with Apache License 2.0 5 votes vote down vote up
/**
 * Moves fsimage.ckpt to fsImage and edits.new to edits
 * Reopens the new edits file.
 * 
 * @param newImageSignature the signature of the new image
 */
void rollFSImage(CheckpointSignature newImageSignature) throws IOException {
  MD5Hash newImageDigest = newImageSignature.getImageDigest();
  if (!newImageDigest.equals(checkpointImageDigest)) {
    throw new IOException(
        "Checkpoint image is corrupt: expecting an MD5 checksum of" +
        newImageDigest + " but is " + checkpointImageDigest);
  }
  rollFSImage(newImageSignature.getImageDigest());
}
 
Example #22
Source File: MD5FileUtils.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Verify that the previously saved md5 for the given file matches
 * expectedMd5.
 * @throws IOException 
 */
public static void verifySavedMD5(File dataFile, MD5Hash expectedMD5)
    throws IOException {
  MD5Hash storedHash = readStoredMd5ForFile(dataFile);
  // Check the hash itself
  if (!expectedMD5.equals(storedHash)) {
    throw new IOException(
        "File " + dataFile + " did not match stored MD5 checksum " +
        " (stored: " + storedHash + ", computed: " + expectedMD5);
  }
}
 
Example #23
Source File: DataXceiver.java    From big-c with Apache License 2.0 5 votes vote down vote up
private MD5Hash calcPartialBlockChecksum(ExtendedBlock block,
    long requestLength, DataChecksum checksum, DataInputStream checksumIn)
    throws IOException {
  final int bytesPerCRC = checksum.getBytesPerChecksum();
  final int csize = checksum.getChecksumSize();
  final byte[] buffer = new byte[4*1024];
  MessageDigest digester = MD5Hash.getDigester();

  long remaining = requestLength / bytesPerCRC * csize;
  for (int toDigest = 0; remaining > 0; remaining -= toDigest) {
    toDigest = checksumIn.read(buffer, 0,
        (int) Math.min(remaining, buffer.length));
    if (toDigest < 0) {
      break;
    }
    digester.update(buffer, 0, toDigest);
  }
  
  int partialLength = (int) (requestLength % bytesPerCRC);
  if (partialLength > 0) {
    byte[] buf = new byte[partialLength];
    final InputStream blockIn = datanode.data.getBlockInputStream(block,
        requestLength - partialLength);
    try {
      // Get the CRC of the partialLength.
      IOUtils.readFully(blockIn, buf, 0, partialLength);
    } finally {
      IOUtils.closeStream(blockIn);
    }
    checksum.update(buf, 0, partialLength);
    byte[] partialCrc = new byte[csize];
    checksum.writeValue(partialCrc, 0, true);
    digester.update(partialCrc);
  }
  return new MD5Hash(digester.digest());
}
 
Example #24
Source File: TransferFsImage.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Client-side Method to fetch file from a server
 * Copies the response from the URL to a list of local files.
 * @param dstStorage if an error occurs writing to one of the files,
 *                   this storage object will be notified. 
 * @Return a digest of the received file if getChecksum is true
 */
static MD5Hash getFileClient(URL infoServer,
    String queryString, List<File> localPaths,
    Storage dstStorage, boolean getChecksum) throws IOException {
  URL url = new URL(infoServer, ImageServlet.PATH_SPEC + "?" + queryString);
  LOG.info("Opening connection to " + url);
  return doGetUrl(url, localPaths, dstStorage, getChecksum);
}
 
Example #25
Source File: CheckpointSignature.java    From RDFS with Apache License 2.0 5 votes vote down vote up
public void readFields(DataInput in) throws IOException {
  layoutVersion = in.readInt();
  namespaceID = in.readInt();
  cTime = in.readLong();
  editsTime = in.readLong();
  checkpointTime = in.readLong();
  imageDigest = new MD5Hash();
  imageDigest.readFields(in);
  checkpointState = CheckpointStates.deserialize(in.readInt());
}
 
Example #26
Source File: FSImage.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Load the image namespace from the given image file, verifying
 * it against the MD5 sum stored in its associated .md5 file.
 */
private void loadFSImage(File imageFile, FSNamesystem target,
    MetaRecoveryContext recovery, boolean requireSameLayoutVersion)
    throws IOException {
  MD5Hash expectedMD5 = MD5FileUtils.readStoredMd5ForFile(imageFile);
  if (expectedMD5 == null) {
    throw new IOException("No MD5 file found corresponding to image file "
        + imageFile);
  }
  loadFSImage(imageFile, expectedMD5, target, recovery,
      requireSameLayoutVersion);
}
 
Example #27
Source File: FSImage.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * This is called by the 2NN after having downloaded an image, and by
 * the NN after having received a new image from the 2NN. It
 * renames the image from fsimage_N.ckpt to fsimage_N and also
 * saves the related .md5 file into place.
 */
public synchronized void saveDigestAndRenameCheckpointImage(NameNodeFile nnf,
    long txid, MD5Hash digest) throws IOException {
  // Write and rename MD5 file
  List<StorageDirectory> badSds = Lists.newArrayList();
  
  for (StorageDirectory sd : storage.dirIterable(NameNodeDirType.IMAGE)) {
    File imageFile = NNStorage.getImageFile(sd, nnf, txid);
    try {
      MD5FileUtils.saveMD5File(imageFile, digest);
    } catch (IOException ioe) {
      badSds.add(sd);
    }
  }
  storage.reportErrorsOnDirectories(badSds);
  
  CheckpointFaultInjector.getInstance().afterMD5Rename();
  
  // Rename image from tmp file
  renameCheckpoint(txid, NameNodeFile.IMAGE_NEW, nnf, false);
  // So long as this is the newest image available,
  // advertise it as such to other checkpointers
  // from now on
  if (txid > storage.getMostRecentCheckpointTxId()) {
    storage.setMostRecentCheckpointInfo(txid, Time.now());
  }
}
 
Example #28
Source File: ImageServlet.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Set headers for content length, and, if available, md5.
 * @throws IOException 
 */
public static void setVerificationHeadersForGet(HttpServletResponse response,
    File file) throws IOException {
  response.setHeader(TransferFsImage.CONTENT_LENGTH,
      String.valueOf(file.length()));
  MD5Hash hash = MD5FileUtils.readStoredMd5ForFile(file);
  if (hash != null) {
    response.setHeader(TransferFsImage.MD5_HEADER, hash.toString());
  }
}
 
Example #29
Source File: ImageServlet.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Set headers for image length and if available, md5.
 * 
 * @throws IOException
 */
static void setVerificationHeadersForPut(HttpURLConnection connection,
    File file) throws IOException {
  connection.setRequestProperty(TransferFsImage.CONTENT_LENGTH,
      String.valueOf(file.length()));
  MD5Hash hash = MD5FileUtils.readStoredMd5ForFile(file);
  if (hash != null) {
    connection
        .setRequestProperty(TransferFsImage.MD5_HEADER, hash.toString());
  }
}
 
Example #30
Source File: FSImage.java    From hadoop with Apache License 2.0 5 votes vote down vote up
void loadFSImageFile(FSNamesystem target, MetaRecoveryContext recovery,
    FSImageFile imageFile, StartupOption startupOption) throws IOException {
  LOG.debug("Planning to load image :\n" + imageFile);
  StorageDirectory sdForProperties = imageFile.sd;
  storage.readProperties(sdForProperties, startupOption);

  if (NameNodeLayoutVersion.supports(
      LayoutVersion.Feature.TXID_BASED_LAYOUT, getLayoutVersion())) {
    // For txid-based layout, we should have a .md5 file
    // next to the image file
    boolean isRollingRollback = RollingUpgradeStartupOption.ROLLBACK
        .matches(startupOption);
    loadFSImage(imageFile.getFile(), target, recovery, isRollingRollback);
  } else if (NameNodeLayoutVersion.supports(
      LayoutVersion.Feature.FSIMAGE_CHECKSUM, getLayoutVersion())) {
    // In 0.22, we have the checksum stored in the VERSION file.
    String md5 = storage.getDeprecatedProperty(
        NNStorage.DEPRECATED_MESSAGE_DIGEST_PROPERTY);
    if (md5 == null) {
      throw new InconsistentFSStateException(sdForProperties.getRoot(),
          "Message digest property " +
          NNStorage.DEPRECATED_MESSAGE_DIGEST_PROPERTY +
          " not set for storage directory " + sdForProperties.getRoot());
    }
    loadFSImage(imageFile.getFile(), new MD5Hash(md5), target, recovery,
        false);
  } else {
    // We don't have any record of the md5sum
    loadFSImage(imageFile.getFile(), null, target, recovery, false);
  }
}