use of com.google.common.hash.Hasher in project gitiles by GerritCodeReview.
the class Renderer method computeTemplateHash.
HashCode computeTemplateHash(String soyFile) {
URL u = templates.get(soyFile);
checkState(u != null, "Missing Soy template %s", soyFile);
Hasher h = Hashing.sha1().newHasher();
try (InputStream is = u.openStream();
OutputStream os = Funnels.asOutputStream(h)) {
ByteStreams.copy(is, os);
} catch (IOException e) {
throw new IllegalStateException("Missing Soy template " + soyFile, e);
}
return h.hash();
}
use of com.google.common.hash.Hasher in project beam by apache.
the class PackageUtil method createPackageAttributes.
/**
* Compute and cache the attributes of a classpath element that we will need to stage it.
*
* @param source the file or directory to be staged.
* @param stagingPath The base location for staged classpath elements.
* @param overridePackageName If non-null, use the given value as the package name
* instead of generating one automatically.
* @return a {@link PackageAttributes} that containing metadata about the object to be staged.
*/
static PackageAttributes createPackageAttributes(File source, String stagingPath, @Nullable String overridePackageName) {
boolean directory = source.isDirectory();
// Compute size and hash in one pass over file or directory.
Hasher hasher = Hashing.md5().newHasher();
OutputStream hashStream = Funnels.asOutputStream(hasher);
try (CountingOutputStream countingOutputStream = new CountingOutputStream(hashStream)) {
if (!directory) {
// Files are staged as-is.
Files.asByteSource(source).copyTo(countingOutputStream);
} else {
// Directories are recursively zipped.
ZipFiles.zipDirectory(source, countingOutputStream);
}
countingOutputStream.flush();
long size = countingOutputStream.getCount();
String hash = Base64Variants.MODIFIED_FOR_URL.encode(hasher.hash().asBytes());
// Create the DataflowPackage with staging name and location.
String uniqueName = getUniqueContentName(source, hash);
String resourcePath = FileSystems.matchNewResource(stagingPath, true).resolve(uniqueName, StandardResolveOptions.RESOLVE_FILE).toString();
DataflowPackage target = new DataflowPackage();
target.setName(overridePackageName != null ? overridePackageName : uniqueName);
target.setLocation(resourcePath);
return new PackageAttributes(size, hash, directory, target, source.getPath());
} catch (IOException e) {
throw new RuntimeException("Package setup failure for " + source, e);
}
}
use of com.google.common.hash.Hasher in project jackrabbit-oak by apache.
the class GetBlobResponseEncoder method encode.
private static void encode(String blobId, byte[] data, ByteBuf out) {
byte[] blobIdBytes = blobId.getBytes(Charset.forName("UTF-8"));
Hasher hasher = Hashing.murmur3_32().newHasher();
long hash = hasher.putBytes(data).hash().padToLong();
out.writeInt(1 + 4 + blobIdBytes.length + 8 + data.length);
out.writeByte(Messages.HEADER_BLOB);
out.writeInt(blobIdBytes.length);
out.writeBytes(blobIdBytes);
out.writeLong(hash);
out.writeBytes(data);
}
use of com.google.common.hash.Hasher in project jackrabbit-oak by apache.
the class GetSegmentResponseEncoder method encode.
private static void encode(String segmentId, byte[] data, ByteBuf out) {
UUID id = UUID.fromString(segmentId);
Hasher hasher = Hashing.murmur3_32().newHasher();
long hash = hasher.putBytes(data).hash().padToLong();
int len = data.length + EXTRA_HEADERS_WO_SIZE;
out.writeInt(len);
out.writeByte(Messages.HEADER_SEGMENT);
out.writeLong(id.getMostSignificantBits());
out.writeLong(id.getLeastSignificantBits());
out.writeLong(hash);
out.writeBytes(data);
}
Aggregations