use of java.security.DigestOutputStream in project jdk8u_jdk by JetBrains.
the class TestDigestIOStream method testMDShare.
/**
* Test DigestInputStream and DigestOutputStream digest function when use
* same message digest object.
*
* @param algo
* Message Digest algorithm
* @param dataLength
* plain test data length.
* @exception Exception
* throw unexpected exception
*/
public boolean testMDShare(String algo, int dataLength) throws Exception {
MessageDigest mdCommon = MessageDigest.getInstance(algo);
// Generate the DigestInputStream/DigestOutputStream object
try (ByteArrayInputStream bais = new ByteArrayInputStream(data);
DigestInputStream dis = new DigestInputStream(bais, mdCommon);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DigestOutputStream dos = new DigestOutputStream(baos, mdCommon)) {
// Perform the update using all available/possible update methods
int k = 0;
byte[] buffer = new byte[10];
// use both read() and read(byte[], int, int)
while (k < data.length) {
int len = dis.read(buffer, 0, buffer.length);
if (len != -1) {
k += len;
if (k < data.length) {
dos.write(data[k]);
k++;
dis.skip(1);
}
}
}
// Get the output and the "correct" digest values
byte[] output = mdCommon.digest();
byte[] standard = md.digest(data);
// Compare generated digest values
return MessageDigest.isEqual(output, standard);
} catch (Exception ex) {
out.println("TestMDShare failed at:" + algo + "/" + dataLength + " with unexpected exception");
throw ex;
}
}
use of java.security.DigestOutputStream in project jdk8u_jdk by JetBrains.
the class RemoteClass method computeInterfaceHash.
/**
* Compute the "interface hash" of the stub/skeleton pair for this
* remote implementation class. This is the 64-bit value used to
* enforce compatibility between a stub and a skeleton using the
* JDK 1.1 version of the stub/skeleton protocol.
*
* It is calculated using the first 64 bits of a SHA digest. The
* digest is from a stream consisting of the following data:
* (int) stub version number, always 1
* for each remote method, in order of operation number:
* (UTF) method name
* (UTF) method type signature
* for each declared exception, in alphabetical name order:
* (UTF) name of exception class
*
*/
private long computeInterfaceHash() {
long hash = 0;
ByteArrayOutputStream sink = new ByteArrayOutputStream(512);
try {
MessageDigest md = MessageDigest.getInstance("SHA");
DataOutputStream out = new DataOutputStream(new DigestOutputStream(sink, md));
out.writeInt(INTERFACE_HASH_STUB_VERSION);
for (int i = 0; i < remoteMethods.length; i++) {
MemberDefinition m = remoteMethods[i].getMemberDefinition();
Identifier name = m.getName();
Type type = m.getType();
out.writeUTF(name.toString());
// type signatures already use mangled class names
out.writeUTF(type.getTypeSignature());
ClassDeclaration[] exceptions = m.getExceptions(env);
sortClassDeclarations(exceptions);
for (int j = 0; j < exceptions.length; j++) {
out.writeUTF(Names.mangleClass(exceptions[j].getName()).toString());
}
}
out.flush();
// use only the first 64 bits of the digest for the hash
byte[] hashArray = md.digest();
for (int i = 0; i < Math.min(8, hashArray.length); i++) {
hash += ((long) (hashArray[i] & 0xFF)) << (i * 8);
}
} catch (IOException e) {
throw new Error("unexpected exception computing intetrface hash: " + e);
} catch (NoSuchAlgorithmException e) {
throw new Error("unexpected exception computing intetrface hash: " + e);
}
return hash;
}
use of java.security.DigestOutputStream in project jackrabbit-oak by apache.
the class AbstractSharedCachingDataStore method addRecord.
@Override
public DataRecord addRecord(InputStream inputStream, BlobOptions blobOptions) throws DataStoreException {
Stopwatch watch = Stopwatch.createStarted();
try {
TransientFileFactory fileFactory = TransientFileFactory.getInstance();
File tmpFile = fileFactory.createTransientFile("upload", null, tmp);
// Copy the stream to the temporary file and calculate the
// stream length and the message digest of the stream
MessageDigest digest = MessageDigest.getInstance(DIGEST);
OutputStream output = new DigestOutputStream(new FileOutputStream(tmpFile), digest);
long length = 0;
try {
length = IOUtils.copyLarge(inputStream, output);
} finally {
output.close();
}
DataIdentifier identifier = new DataIdentifier(encodeHexString(digest.digest()));
LOG.debug("SHA-256 of [{}], length =[{}] took [{}] ms ", identifier, length, watch.elapsed(TimeUnit.MILLISECONDS));
// otherwise add to backend
if (blobOptions.getUpload() == SYNCHRONOUS || !cache.stage(identifier.toString(), tmpFile)) {
backend.write(identifier, tmpFile);
LOG.info("Added blob [{}] to backend", identifier);
// offer to download cache
cache.getDownloadCache().put(identifier.toString(), tmpFile);
}
return getRecordIfStored(identifier);
} catch (Exception e) {
LOG.error("Error in adding record");
throw new DataStoreException("Error in adding record ", e);
}
}
use of java.security.DigestOutputStream in project jackrabbit-oak by apache.
the class S3DataStoreStatsTest method getIdForInputStream.
private String getIdForInputStream(final InputStream in) throws Exception {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
OutputStream output = new DigestOutputStream(new NullOutputStream(), digest);
try {
IOUtils.copyLarge(in, output);
} finally {
IOUtils.closeQuietly(output);
IOUtils.closeQuietly(in);
}
return encodeHexString(digest.digest());
}
use of java.security.DigestOutputStream in project jackrabbit-oak by apache.
the class AzureDataStoreTest method getIdForInputStream.
private static String getIdForInputStream(final InputStream in) throws NoSuchAlgorithmException, IOException {
MessageDigest digest = MessageDigest.getInstance("SHA-1");
OutputStream output = new DigestOutputStream(new NullOutputStream(), digest);
try {
IOUtils.copyLarge(in, output);
} finally {
IOUtils.closeQuietly(output);
IOUtils.closeQuietly(in);
}
return encodeHexString(digest.digest());
}
Aggregations