use of java.security.MessageDigest in project hadoop by apache.
the class StripedBlockChecksumReconstructor method reconstruct.
public void reconstruct() throws IOException {
MessageDigest digester = MD5Hash.getDigester();
long maxTargetLength = getMaxTargetLength();
while (requestedLen > 0 && getPositionInBlock() < maxTargetLength) {
long remaining = maxTargetLength - getPositionInBlock();
final int toReconstructLen = (int) Math.min(getStripedReader().getBufferSize(), remaining);
// step1: read from minimum source DNs required for reconstruction.
// The returned success list is the source DNs we do real read from
getStripedReader().readMinimumSources(toReconstructLen);
// step2: decode to reconstruct targets
reconstructTargets(toReconstructLen);
// step3: calculate checksum
checksumDataLen += checksumWithTargetOutput(targetBuffer.array(), toReconstructLen, digester);
updatePositionInBlock(toReconstructLen);
requestedLen -= toReconstructLen;
clearBuffers();
}
byte[] digest = digester.digest();
md5 = new MD5Hash(digest);
md5.write(checksumWriter);
}
use of java.security.MessageDigest in project hadoop by apache.
the class OfflineImageReconstructor method run.
/**
* Run the OfflineImageReconstructor.
*
* @param inputPath The input path to use.
* @param outputPath The output path to use.
*
* @throws Exception On error.
*/
public static void run(String inputPath, String outputPath) throws Exception {
MessageDigest digester = MD5Hash.getDigester();
FileOutputStream fout = null;
File foutHash = new File(outputPath + ".md5");
// delete any .md5 file that exists
Files.deleteIfExists(foutHash.toPath());
CountingOutputStream out = null;
FileInputStream fis = null;
InputStreamReader reader = null;
try {
Files.deleteIfExists(Paths.get(outputPath));
fout = new FileOutputStream(outputPath);
fis = new FileInputStream(inputPath);
reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
out = new CountingOutputStream(new DigestOutputStream(new BufferedOutputStream(fout), digester));
OfflineImageReconstructor oir = new OfflineImageReconstructor(out, reader);
oir.processXml();
} finally {
IOUtils.cleanup(LOG, reader, fis, out, fout);
}
// Write the md5 file
MD5FileUtils.saveMD5File(new File(outputPath), new MD5Hash(digester.digest()));
}
use of java.security.MessageDigest in project hadoop by apache.
the class MD5FileUtils method computeMd5ForFile.
/**
* Read dataFile and compute its MD5 checksum.
*/
public static MD5Hash computeMd5ForFile(File dataFile) throws IOException {
InputStream in = new FileInputStream(dataFile);
try {
MessageDigest digester = MD5Hash.getDigester();
DigestInputStream dis = new DigestInputStream(in, digester);
IOUtils.copyBytes(dis, new IOUtils.NullOutputStream(), 128 * 1024);
return new MD5Hash(digester.digest());
} finally {
IOUtils.closeStream(in);
}
}
use of java.security.MessageDigest in project hadoop by apache.
the class VersionInfoMojo method computeMD5.
/**
* Given a list of files, computes and returns an MD5 checksum of the full
* contents of all files.
*
* @param files List<File> containing every file to input into the MD5 checksum
* @return byte[] calculated MD5 checksum
* @throws IOException if there is an I/O error while reading a file
* @throws NoSuchAlgorithmException if the MD5 algorithm is not supported
*/
private byte[] computeMD5(List<File> files) throws IOException, NoSuchAlgorithmException {
MessageDigest md5 = MessageDigest.getInstance("MD5");
for (File file : files) {
getLog().debug("Computing MD5 for: " + file);
md5.update(readFile(file));
}
return md5.digest();
}
use of java.security.MessageDigest in project hadoop by apache.
the class TestAliyunOSSFileSystemStore method writeRenameReadCompare.
protected void writeRenameReadCompare(Path path, long len) throws IOException, NoSuchAlgorithmException {
// If len > fs.oss.multipart.upload.threshold,
// we'll use a multipart upload copy
MessageDigest digest = MessageDigest.getInstance("MD5");
OutputStream out = new BufferedOutputStream(new DigestOutputStream(fs.create(path, false), digest));
for (long i = 0; i < len; i++) {
out.write('Q');
}
out.flush();
out.close();
assertTrue("Exists", fs.exists(path));
Path copyPath = path.suffix(".copy");
fs.rename(path, copyPath);
assertTrue("Copy exists", fs.exists(copyPath));
// Download file from Aliyun OSS and compare the digest against the original
MessageDigest digest2 = MessageDigest.getInstance("MD5");
InputStream in = new BufferedInputStream(new DigestInputStream(fs.open(copyPath), digest2));
long copyLen = 0;
while (in.read() != -1) {
copyLen++;
}
in.close();
assertEquals("Copy length matches original", len, copyLen);
assertArrayEquals("Digests match", digest.digest(), digest2.digest());
}
Aggregations