Search in sources :

Example 6 with MD5Hash

use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.

the class Checkpointer method doCheckpoint.

/**
   * Create a new checkpoint
   */
void doCheckpoint() throws IOException {
    BackupImage bnImage = getFSImage();
    NNStorage bnStorage = bnImage.getStorage();
    long startTime = monotonicNow();
    bnImage.freezeNamespaceAtNextRoll();
    NamenodeCommand cmd = getRemoteNamenodeProxy().startCheckpoint(backupNode.getRegistration());
    CheckpointCommand cpCmd = null;
    switch(cmd.getAction()) {
        case NamenodeProtocol.ACT_SHUTDOWN:
            shutdown();
            throw new IOException("Name-node " + backupNode.nnRpcAddress + " requested shutdown.");
        case NamenodeProtocol.ACT_CHECKPOINT:
            cpCmd = (CheckpointCommand) cmd;
            break;
        default:
            throw new IOException("Unsupported NamenodeCommand: " + cmd.getAction());
    }
    bnImage.waitUntilNamespaceFrozen();
    CheckpointSignature sig = cpCmd.getSignature();
    // Make sure we're talking to the same NN!
    sig.validateStorageInfo(bnImage);
    long lastApplied = bnImage.getLastAppliedTxId();
    LOG.debug("Doing checkpoint. Last applied: " + lastApplied);
    RemoteEditLogManifest manifest = getRemoteNamenodeProxy().getEditLogManifest(bnImage.getLastAppliedTxId() + 1);
    boolean needReloadImage = false;
    if (!manifest.getLogs().isEmpty()) {
        RemoteEditLog firstRemoteLog = manifest.getLogs().get(0);
        // to download and load the image.
        if (firstRemoteLog.getStartTxId() > lastApplied + 1) {
            LOG.info("Unable to roll forward using only logs. Downloading " + "image with txid " + sig.mostRecentCheckpointTxId);
            MD5Hash downloadedHash = TransferFsImage.downloadImageToStorage(backupNode.nnHttpAddress, sig.mostRecentCheckpointTxId, bnStorage, true, false);
            bnImage.saveDigestAndRenameCheckpointImage(NameNodeFile.IMAGE, sig.mostRecentCheckpointTxId, downloadedHash);
            lastApplied = sig.mostRecentCheckpointTxId;
            needReloadImage = true;
        }
        if (firstRemoteLog.getStartTxId() > lastApplied + 1) {
            throw new IOException("No logs to roll forward from " + lastApplied);
        }
        // get edits files
        for (RemoteEditLog log : manifest.getLogs()) {
            TransferFsImage.downloadEditsToStorage(backupNode.nnHttpAddress, log, bnStorage);
        }
        if (needReloadImage) {
            LOG.info("Loading image with txid " + sig.mostRecentCheckpointTxId);
            File file = bnStorage.findImageFile(NameNodeFile.IMAGE, sig.mostRecentCheckpointTxId);
            bnImage.reloadFromImageFile(file, backupNode.getNamesystem());
        }
        rollForwardByApplyingLogs(manifest, bnImage, backupNode.getNamesystem());
    }
    long txid = bnImage.getLastAppliedTxId();
    backupNode.namesystem.writeLock();
    try {
        backupNode.namesystem.setImageLoaded();
        if (backupNode.namesystem.getBlocksTotal() > 0) {
            long completeBlocksTotal = backupNode.namesystem.getCompleteBlocksTotal();
            backupNode.namesystem.getBlockManager().setBlockTotal(completeBlocksTotal);
        }
        bnImage.saveFSImageInAllDirs(backupNode.getNamesystem(), txid);
        if (!backupNode.namenode.isRollingUpgrade()) {
            bnImage.updateStorageVersion();
        }
    } finally {
        backupNode.namesystem.writeUnlock("doCheckpoint");
    }
    if (cpCmd.needToReturnImage()) {
        TransferFsImage.uploadImageFromStorage(backupNode.nnHttpAddress, conf, bnStorage, NameNodeFile.IMAGE, txid);
    }
    getRemoteNamenodeProxy().endCheckpoint(backupNode.getRegistration(), sig);
    if (backupNode.getRole() == NamenodeRole.BACKUP) {
        bnImage.convergeJournalSpool();
    }
    // keep registration up to date
    backupNode.setRegistration();
    long imageSize = bnImage.getStorage().getFsImageName(txid).length();
    LOG.info("Checkpoint completed in " + (monotonicNow() - startTime) / 1000 + " seconds." + " New Image Size: " + imageSize);
}
Also used : CheckpointCommand(org.apache.hadoop.hdfs.server.protocol.CheckpointCommand) RemoteEditLogManifest(org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest) MD5Hash(org.apache.hadoop.io.MD5Hash) IOException(java.io.IOException) RemoteEditLog(org.apache.hadoop.hdfs.server.protocol.RemoteEditLog) File(java.io.File) NameNodeFile(org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile) NamenodeCommand(org.apache.hadoop.hdfs.server.protocol.NamenodeCommand)

Example 7 with MD5Hash

use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.

the class BootstrapStandby method downloadImage.

private int downloadImage(NNStorage storage, NamenodeProtocol proxy, RemoteNameNodeInfo proxyInfo) throws IOException {
    // Load the newly formatted image, using all of the directories
    // (including shared edits)
    final long imageTxId = proxy.getMostRecentCheckpointTxId();
    final long curTxId = proxy.getTransactionID();
    FSImage image = new FSImage(conf);
    try {
        image.getStorage().setStorageInfo(storage);
        image.initEditLog(StartupOption.REGULAR);
        assert image.getEditLog().isOpenForRead() : "Expected edit log to be open for read";
        // start up from the last checkpoint on the active.
        if (!skipSharedEditsCheck && !checkLogsAvailableForRead(image, imageTxId, curTxId)) {
            return ERR_CODE_LOGS_UNAVAILABLE;
        }
        // Download that checkpoint into our storage directories.
        MD5Hash hash = TransferFsImage.downloadImageToStorage(proxyInfo.getHttpAddress(), imageTxId, storage, true, true);
        image.saveDigestAndRenameCheckpointImage(NameNodeFile.IMAGE, imageTxId, hash);
        // Write seen_txid to the formatted image directories.
        storage.writeTransactionIdFileToStorage(imageTxId, NameNodeDirType.IMAGE);
    } catch (IOException ioe) {
        throw ioe;
    } finally {
        image.close();
    }
    return 0;
}
Also used : FSImage(org.apache.hadoop.hdfs.server.namenode.FSImage) MD5Hash(org.apache.hadoop.io.MD5Hash) IOException(java.io.IOException)

Example 8 with MD5Hash

use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.

the class OfflineImageReconstructor method run.

/**
   * Run the OfflineImageReconstructor.
   *
   * @param inputPath         The input path to use.
   * @param outputPath        The output path to use.
   *
   * @throws Exception        On error.
   */
public static void run(String inputPath, String outputPath) throws Exception {
    MessageDigest digester = MD5Hash.getDigester();
    FileOutputStream fout = null;
    File foutHash = new File(outputPath + ".md5");
    // delete any .md5 file that exists
    Files.deleteIfExists(foutHash.toPath());
    CountingOutputStream out = null;
    FileInputStream fis = null;
    InputStreamReader reader = null;
    try {
        Files.deleteIfExists(Paths.get(outputPath));
        fout = new FileOutputStream(outputPath);
        fis = new FileInputStream(inputPath);
        reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
        out = new CountingOutputStream(new DigestOutputStream(new BufferedOutputStream(fout), digester));
        OfflineImageReconstructor oir = new OfflineImageReconstructor(out, reader);
        oir.processXml();
    } finally {
        IOUtils.cleanup(LOG, reader, fis, out, fout);
    }
    // Write the md5 file
    MD5FileUtils.saveMD5File(new File(outputPath), new MD5Hash(digester.digest()));
}
Also used : CountingOutputStream(com.google.common.io.CountingOutputStream) InputStreamReader(java.io.InputStreamReader) DigestOutputStream(java.security.DigestOutputStream) FileOutputStream(java.io.FileOutputStream) MD5Hash(org.apache.hadoop.io.MD5Hash) MessageDigest(java.security.MessageDigest) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream) FileInputStream(java.io.FileInputStream)

Example 9 with MD5Hash

use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.

the class MD5FileUtils method computeMd5ForFile.

/**
   * Read dataFile and compute its MD5 checksum.
   */
public static MD5Hash computeMd5ForFile(File dataFile) throws IOException {
    InputStream in = new FileInputStream(dataFile);
    try {
        MessageDigest digester = MD5Hash.getDigester();
        DigestInputStream dis = new DigestInputStream(in, digester);
        IOUtils.copyBytes(dis, new IOUtils.NullOutputStream(), 128 * 1024);
        return new MD5Hash(digester.digest());
    } finally {
        IOUtils.closeStream(in);
    }
}
Also used : IOUtils(org.apache.hadoop.io.IOUtils) DigestInputStream(java.security.DigestInputStream) FileInputStream(java.io.FileInputStream) DigestInputStream(java.security.DigestInputStream) InputStream(java.io.InputStream) MD5Hash(org.apache.hadoop.io.MD5Hash) MessageDigest(java.security.MessageDigest) FileInputStream(java.io.FileInputStream)

Example 10 with MD5Hash

use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.

the class TestStartup method corruptFSImageMD5.

/**
   * Corrupts the MD5 sum of the fsimage.
   * 
   * @param corruptAll
   *          whether to corrupt one or all of the MD5 sums in the configured
   *          namedirs
   * @throws IOException
   */
private void corruptFSImageMD5(boolean corruptAll) throws IOException {
    List<URI> nameDirs = (List<URI>) FSNamesystem.getNamespaceDirs(config);
    // Corrupt the md5 files in all the namedirs
    for (URI uri : nameDirs) {
        // Directory layout looks like:
        // test/data/dfs/nameN/current/{fsimage,edits,...}
        File nameDir = new File(uri.getPath());
        File dfsDir = nameDir.getParentFile();
        // make sure we got right dir
        assertEquals(dfsDir.getName(), "dfs");
        // Set the md5 file to all zeros
        File imageFile = new File(nameDir, Storage.STORAGE_DIR_CURRENT + "/" + NNStorage.getImageFileName(0));
        MD5FileUtils.saveMD5File(imageFile, new MD5Hash(new byte[16]));
        // Only need to corrupt one if !corruptAll
        if (!corruptAll) {
            break;
        }
    }
}
Also used : List(java.util.List) MD5Hash(org.apache.hadoop.io.MD5Hash) URI(java.net.URI) Util.fileAsURI(org.apache.hadoop.hdfs.server.common.Util.fileAsURI) NameNodeFile(org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile) File(java.io.File)

Aggregations

MD5Hash (org.apache.hadoop.io.MD5Hash)21 IOException (java.io.IOException)11 File (java.io.File)8 MessageDigest (java.security.MessageDigest)5 NameNodeFile (org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile)5 DigestInputStream (java.security.DigestInputStream)3 FileInputStream (java.io.FileInputStream)2 FileOutputStream (java.io.FileOutputStream)2 InputStream (java.io.InputStream)2 CountingOutputStream (com.google.common.io.CountingOutputStream)1 BufferedOutputStream (java.io.BufferedOutputStream)1 InputStreamReader (java.io.InputStreamReader)1 HttpURLConnection (java.net.HttpURLConnection)1 URI (java.net.URI)1 DigestOutputStream (java.security.DigestOutputStream)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 List (java.util.List)1 Matcher (java.util.regex.Matcher)1 ServletContext (javax.servlet.ServletContext)1