use of com.google.common.hash.Hasher in project buck by facebook.
the class ProjectGenerator method getHeaderSymlinkTreeHashCode.
private HashCode getHeaderSymlinkTreeHashCode(ImmutableSortedMap<Path, Path> contents, boolean shouldCreateHeadersSymlinks, boolean shouldCreateHeaderMap) {
Hasher hasher = Hashing.sha1().newHasher();
hasher.putBytes(BuckVersion.getVersion().getBytes(Charsets.UTF_8));
String symlinkState = shouldCreateHeadersSymlinks ? "symlinks-enabled" : "symlinks-disabled";
byte[] symlinkStateValue = symlinkState.getBytes(Charsets.UTF_8);
hasher.putInt(symlinkStateValue.length);
hasher.putBytes(symlinkStateValue);
String hmapState = shouldCreateHeaderMap ? "hmap-enabled" : "hmap-disabled";
byte[] hmapStateValue = hmapState.getBytes(Charsets.UTF_8);
hasher.putInt(hmapStateValue.length);
hasher.putBytes(hmapStateValue);
hasher.putInt(0);
for (Map.Entry<Path, Path> entry : contents.entrySet()) {
byte[] key = entry.getKey().toString().getBytes(Charsets.UTF_8);
byte[] value = entry.getValue().toString().getBytes(Charsets.UTF_8);
hasher.putInt(key.length);
hasher.putBytes(key);
hasher.putInt(value.length);
hasher.putBytes(value);
}
return hasher.hash();
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class TargetsCommand method hashNodeWithDependencies.
private void hashNodeWithDependencies(ImmutableMap<BuildTarget, HashCode> buildTargetHashes, Map<BuildTarget, HashCode> hashesWithTests, TargetNode<?, ?> node) {
HashCode nodeHashCode = getHashCodeOrThrow(buildTargetHashes, node.getBuildTarget());
Hasher hasher = Hashing.sha1().newHasher();
hasher.putBytes(nodeHashCode.asBytes());
Iterable<BuildTarget> dependentTargets = node.getDeps();
LOG.debug("Hashing target %s with dependent nodes %s", node, dependentTargets);
for (BuildTarget targetToHash : dependentTargets) {
HashCode dependencyHash = getHashCodeOrThrow(hashesWithTests, targetToHash);
hasher.putBytes(dependencyHash.asBytes());
}
if (isDetectTestChanges()) {
for (BuildTarget targetToHash : Preconditions.checkNotNull(TargetNodes.getTestTargetsForNode(node))) {
HashCode testNodeHashCode = getHashCodeOrThrow(buildTargetHashes, targetToHash);
hasher.putBytes(testNodeHashCode.asBytes());
}
}
hashesWithTests.put(node.getBuildTarget(), hasher.hash());
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class FilePathHashLoader method get.
@Override
public HashCode get(Path root) throws IOException {
// In case the root path is a directory, collect all files contained in it and sort them before
// hashing to avoid non-deterministic directory traversal order from influencing the hash.
final ImmutableSortedSet.Builder<Path> files = ImmutableSortedSet.naturalOrder();
Files.walkFileTree(defaultCellRoot.resolve(root), ImmutableSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
files.add(file);
return FileVisitResult.CONTINUE;
}
});
Hasher hasher = Hashing.sha1().newHasher();
for (Path file : files.build()) {
file = defaultCellRoot.resolve(file).toRealPath();
boolean assumeModified = assumeModifiedFiles.contains(file);
Path relativePath = MorePaths.relativize(defaultCellRoot, file);
// For each file add its path to the hasher suffixed by whether we assume the file to be
// modified or not. This way files with different paths always result in different hashes and
// files that are assumed to be modified get different hashes than all unmodified files.
StringHashing.hashStringAndLength(hasher, relativePath.toString());
hasher.putBoolean(assumeModified);
}
return hasher.hash();
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class HttpArtifactCacheBinaryProtocol method readMetadataAndPayload.
public static MetadataAndPayloadReadResultInternal readMetadataAndPayload(DataInputStream input, OutputStream payloadSink) throws IOException {
// Read the size of a the metadata, and use that to build a input stream to read and
// process the rest of it.
int metadataSize = input.readInt();
if (metadataSize > MAX_METADATA_HEADER_SIZE) {
throw new IOException(String.format("Metadata header size of %d is too big.", metadataSize));
}
MetadataAndPayloadReadResultInternal.Builder result = MetadataAndPayloadReadResultInternal.builder();
// Create a hasher to be used to generate a hash of the metadata and input. We'll use
// this to compare against the embedded checksum.
Hasher hasher = HASH_FUNCTION.newHasher();
byte[] rawMetadata = new byte[metadataSize];
ByteStreams.readFully(input, rawMetadata);
try (InputStream rawMetadataIn = new ByteArrayInputStream(rawMetadata)) {
// The first part of the metadata needs to be included in the hash.
try (DataInputStream metadataIn = new DataInputStream(new HasherInputStream(hasher, rawMetadataIn))) {
// Read in the rule keys that stored this artifact, and add them to the hash we're
// building up.
int size = metadataIn.readInt();
for (int i = 0; i < size; i++) {
result.addRuleKeys(new RuleKey(metadataIn.readUTF()));
}
// Read in the actual metadata map, and add it the hash.
size = metadataIn.readInt();
for (int i = 0; i < size; i++) {
String key = metadataIn.readUTF();
int valSize = metadataIn.readInt();
byte[] val = new byte[valSize];
ByteStreams.readFully(metadataIn, val);
result.putMetadata(key, new String(val, Charsets.UTF_8));
}
}
// Next, read in the embedded expected checksum, which should be the last byte in
// the metadata header.
byte[] hashCodeBytes = new byte[HASH_FUNCTION.bits() / Byte.SIZE];
ByteStreams.readFully(rawMetadataIn, hashCodeBytes);
result.setExpectedHashCode(HashCode.fromBytes(hashCodeBytes));
}
// The remaining data is the payload, which we write to the created file, and also include
// in our verification checksum.
Hasher artifactOnlyHasher = HASH_FUNCTION.newHasher();
try (InputStream payload = new HasherInputStream(artifactOnlyHasher, new HasherInputStream(hasher, input))) {
result.setResponseSizeBytes(ByteStreams.copy(payload, payloadSink));
result.setArtifactOnlyHashCode(artifactOnlyHasher.hash());
}
result.setActualHashCode(hasher.hash());
return result.build();
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class LcUuidContentsScrubber method scrubFile.
@Override
public void scrubFile(FileChannel file) throws IOException, ScrubException {
if (!Machos.isMacho(file)) {
return;
}
long size = file.size();
MappedByteBuffer map = file.map(FileChannel.MapMode.READ_WRITE, 0, size);
try {
Machos.setUuid(map, ZERO_UUID);
} catch (Machos.MachoException e) {
throw new ScrubException(e.getMessage());
}
map.rewind();
Hasher hasher = Hashing.sha1().newHasher();
while (map.hasRemaining()) {
hasher.putByte(map.get());
}
map.rewind();
try {
Machos.setUuid(map, Arrays.copyOf(hasher.hash().asBytes(), 16));
} catch (Machos.MachoException e) {
throw new ScrubException(e.getMessage());
}
}
Aggregations