use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.
the class FSImage method loadFSImage.
/**
* Load in the filesystem image from file. It's a big list of
* filenames and blocks.
*/
private void loadFSImage(File curFile, MD5Hash expectedMd5, FSNamesystem target, MetaRecoveryContext recovery, boolean requireSameLayoutVersion) throws IOException {
// BlockPoolId is required when the FsImageLoader loads the rolling upgrade
// information. Make sure the ID is properly set.
target.setBlockPoolId(this.getBlockPoolID());
FSImageFormat.LoaderDelegator loader = FSImageFormat.newLoader(conf, target);
loader.load(curFile, requireSameLayoutVersion);
// Check that the image digest we loaded matches up with what
// we expected
MD5Hash readImageMd5 = loader.getLoadedImageMd5();
if (expectedMd5 != null && !expectedMd5.equals(readImageMd5)) {
throw new IOException("Image file " + curFile + " is corrupt with MD5 checksum of " + readImageMd5 + " but expecting " + expectedMd5);
}
long txId = loader.getLoadedImageTxId();
LOG.info("Loaded image for txid " + txId + " from " + curFile);
lastAppliedTxId = txId;
storage.setMostRecentCheckpointInfo(txId, curFile.lastModified());
}
use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.
the class TransferFsImage method downloadImageToStorage.
public static MD5Hash downloadImageToStorage(URL fsName, long imageTxId, Storage dstStorage, boolean needDigest, boolean isBootstrapStandby) throws IOException {
String fileid = ImageServlet.getParamStringForImage(null, imageTxId, dstStorage, isBootstrapStandby);
String fileName = NNStorage.getCheckpointImageFileName(imageTxId);
List<File> dstFiles = dstStorage.getFiles(NameNodeDirType.IMAGE, fileName);
if (dstFiles.isEmpty()) {
throw new IOException("No targets in destination storage!");
}
MD5Hash hash = getFileClient(fsName, fileid, dstFiles, dstStorage, needDigest);
LOG.info("Downloaded file " + dstFiles.get(0).getName() + " size " + dstFiles.get(0).length() + " bytes.");
return hash;
}
use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.
the class TransferFsImage method handleUploadImageRequest.
static MD5Hash handleUploadImageRequest(HttpServletRequest request, long imageTxId, Storage dstStorage, InputStream stream, long advertisedSize, DataTransferThrottler throttler) throws IOException {
String fileName = NNStorage.getCheckpointImageFileName(imageTxId);
List<File> dstFiles = dstStorage.getFiles(NameNodeDirType.IMAGE, fileName);
if (dstFiles.isEmpty()) {
throw new IOException("No targets in destination storage!");
}
MD5Hash advertisedDigest = parseMD5Header(request);
MD5Hash hash = Util.receiveFile(fileName, dstFiles, dstStorage, true, advertisedSize, advertisedDigest, fileName, stream, throttler);
LOG.info("Downloaded file " + dstFiles.get(0).getName() + " size " + dstFiles.get(0).length() + " bytes.");
return hash;
}
use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.
the class MD5FileUtils method readStoredMd5ForFile.
/**
* Read the md5 checksum stored alongside the given data file.
* @param dataFile the file containing data
* @return the checksum stored in dataFile.md5
*/
public static MD5Hash readStoredMd5ForFile(File dataFile) throws IOException {
final File md5File = getDigestFileForFile(dataFile);
if (!md5File.exists()) {
return null;
}
final Matcher matcher = readStoredMd5(md5File);
String storedHash = matcher.group(1);
File referencedFile = new File(matcher.group(2));
// least has the same name as the file we expect
if (!referencedFile.getName().equals(dataFile.getName())) {
throw new IOException("MD5 file at " + md5File + " references file named " + referencedFile.getName() + " but we expected it to reference " + dataFile);
}
return new MD5Hash(storedHash);
}
use of org.apache.hadoop.io.MD5Hash in project hadoop by apache.
the class TestFetchImage method runFetchImage.
/**
* Run `hdfs dfsadmin -fetchImage ...' and verify that the downloaded image is
* correct.
*/
private static void runFetchImage(DFSAdmin dfsAdmin, MiniDFSCluster cluster) throws Exception {
int retVal = dfsAdmin.run(new String[] { "-fetchImage", FETCHED_IMAGE_FILE.getPath() });
assertEquals(0, retVal);
File highestImageOnNn = getHighestFsImageOnCluster(cluster);
MD5Hash expected = MD5FileUtils.computeMd5ForFile(highestImageOnNn);
MD5Hash actual = MD5FileUtils.computeMd5ForFile(new File(FETCHED_IMAGE_FILE, highestImageOnNn.getName()));
assertEquals(expected, actual);
}
Aggregations