use of com.google.common.hash.Hasher in project buck by facebook.
the class FilePathHashLoader method get.
@Override
public HashCode get(Path root) throws IOException {
// In case the root path is a directory, collect all files contained in it and sort them before
// hashing to avoid non-deterministic directory traversal order from influencing the hash.
final ImmutableSortedSet.Builder<Path> files = ImmutableSortedSet.naturalOrder();
Files.walkFileTree(defaultCellRoot.resolve(root), ImmutableSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
files.add(file);
return FileVisitResult.CONTINUE;
}
});
Hasher hasher = Hashing.sha1().newHasher();
for (Path file : files.build()) {
file = defaultCellRoot.resolve(file).toRealPath();
boolean assumeModified = assumeModifiedFiles.contains(file);
Path relativePath = MorePaths.relativize(defaultCellRoot, file);
// For each file add its path to the hasher suffixed by whether we assume the file to be
// modified or not. This way files with different paths always result in different hashes and
// files that are assumed to be modified get different hashes than all unmodified files.
StringHashing.hashStringAndLength(hasher, relativePath.toString());
hasher.putBoolean(assumeModified);
}
return hasher.hash();
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class HttpArtifactCacheBinaryProtocol method readMetadataAndPayload.
public static MetadataAndPayloadReadResultInternal readMetadataAndPayload(DataInputStream input, OutputStream payloadSink) throws IOException {
// Read the size of a the metadata, and use that to build a input stream to read and
// process the rest of it.
int metadataSize = input.readInt();
if (metadataSize > MAX_METADATA_HEADER_SIZE) {
throw new IOException(String.format("Metadata header size of %d is too big.", metadataSize));
}
MetadataAndPayloadReadResultInternal.Builder result = MetadataAndPayloadReadResultInternal.builder();
// Create a hasher to be used to generate a hash of the metadata and input. We'll use
// this to compare against the embedded checksum.
Hasher hasher = HASH_FUNCTION.newHasher();
byte[] rawMetadata = new byte[metadataSize];
ByteStreams.readFully(input, rawMetadata);
try (InputStream rawMetadataIn = new ByteArrayInputStream(rawMetadata)) {
// The first part of the metadata needs to be included in the hash.
try (DataInputStream metadataIn = new DataInputStream(new HasherInputStream(hasher, rawMetadataIn))) {
// Read in the rule keys that stored this artifact, and add them to the hash we're
// building up.
int size = metadataIn.readInt();
for (int i = 0; i < size; i++) {
result.addRuleKeys(new RuleKey(metadataIn.readUTF()));
}
// Read in the actual metadata map, and add it the hash.
size = metadataIn.readInt();
for (int i = 0; i < size; i++) {
String key = metadataIn.readUTF();
int valSize = metadataIn.readInt();
byte[] val = new byte[valSize];
ByteStreams.readFully(metadataIn, val);
result.putMetadata(key, new String(val, Charsets.UTF_8));
}
}
// Next, read in the embedded expected checksum, which should be the last byte in
// the metadata header.
byte[] hashCodeBytes = new byte[HASH_FUNCTION.bits() / Byte.SIZE];
ByteStreams.readFully(rawMetadataIn, hashCodeBytes);
result.setExpectedHashCode(HashCode.fromBytes(hashCodeBytes));
}
// The remaining data is the payload, which we write to the created file, and also include
// in our verification checksum.
Hasher artifactOnlyHasher = HASH_FUNCTION.newHasher();
try (InputStream payload = new HasherInputStream(artifactOnlyHasher, new HasherInputStream(hasher, input))) {
result.setResponseSizeBytes(ByteStreams.copy(payload, payloadSink));
result.setArtifactOnlyHashCode(artifactOnlyHasher.hash());
}
result.setActualHashCode(hasher.hash());
return result.build();
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class LcUuidContentsScrubber method scrubFile.
@Override
public void scrubFile(FileChannel file) throws IOException, ScrubException {
if (!Machos.isMacho(file)) {
return;
}
long size = file.size();
MappedByteBuffer map = file.map(FileChannel.MapMode.READ_WRITE, 0, size);
try {
Machos.setUuid(map, ZERO_UUID);
} catch (Machos.MachoException e) {
throw new ScrubException(e.getMessage());
}
map.rewind();
Hasher hasher = Hashing.sha1().newHasher();
while (map.hasRemaining()) {
hasher.putByte(map.get());
}
map.rewind();
try {
Machos.setUuid(map, Arrays.copyOf(hasher.hash().asBytes(), 16));
} catch (Machos.MachoException e) {
throw new ScrubException(e.getMessage());
}
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class PreprocessorDelegate method hashCommand.
public String hashCommand(ImmutableList<String> flags) {
Hasher hasher = Hashing.murmur3_128().newHasher();
String workingDirString = workingDir.toString();
// Skips the executable argument (the first one) as that is not sanitized.
for (String part : sanitizer.sanitizeFlags(Iterables.skip(flags, 1))) {
// TODO(#10251354): find a better way of dealing with getting a project dir normalized hash
if (part.startsWith(workingDirString)) {
part = "<WORKINGDIR>" + part.substring(workingDirString.length());
}
hasher.putString(part, Charsets.UTF_8);
// separator
hasher.putBoolean(false);
}
return hasher.hash().toString();
}
use of com.google.common.hash.Hasher in project buck by facebook.
the class DefaultParserTargetNodeFactory method createTargetNode.
@Override
public TargetNode<?, ?> createTargetNode(Cell cell, Path buildFile, BuildTarget target, Map<String, Object> rawNode, Function<PerfEventId, SimplePerfEvent.Scope> perfEventScope) {
BuildRuleType buildRuleType = parseBuildRuleTypeFromRawRule(cell, rawNode);
// Because of the way that the parser works, we know this can never return null.
Description<?> description = cell.getDescription(buildRuleType);
UnflavoredBuildTarget unflavoredBuildTarget = target.withoutCell().getUnflavoredBuildTarget();
if (target.isFlavored()) {
if (description instanceof Flavored) {
if (!((Flavored) description).hasFlavors(ImmutableSet.copyOf(target.getFlavors()))) {
throw UnexpectedFlavorException.createWithSuggestions(cell, target);
}
} else {
LOG.warn("Target %s (type %s) must implement the Flavored interface " + "before we can check if it supports flavors: %s", unflavoredBuildTarget, buildRuleType, target.getFlavors());
throw new HumanReadableException("Target %s (type %s) does not currently support flavors (tried %s)", unflavoredBuildTarget, buildRuleType, target.getFlavors());
}
}
UnflavoredBuildTarget unflavoredBuildTargetFromRawData = RawNodeParsePipeline.parseBuildTargetFromRawRule(cell.getRoot(), rawNode, buildFile);
if (!unflavoredBuildTarget.equals(unflavoredBuildTargetFromRawData)) {
throw new IllegalStateException(String.format("Inconsistent internal state, target from data: %s, expected: %s, raw data: %s", unflavoredBuildTargetFromRawData, unflavoredBuildTarget, Joiner.on(',').withKeyValueSeparator("->").join(rawNode)));
}
Cell targetCell = cell.getCell(target);
Object constructorArg = description.createUnpopulatedConstructorArg();
try {
ImmutableSet.Builder<BuildTarget> declaredDeps = ImmutableSet.builder();
ImmutableSet.Builder<VisibilityPattern> visibilityPatterns = ImmutableSet.builder();
try (SimplePerfEvent.Scope scope = perfEventScope.apply(PerfEventId.of("MarshalledConstructorArg"))) {
marshaller.populate(targetCell.getCellPathResolver(), targetCell.getFilesystem(), target, constructorArg, declaredDeps, visibilityPatterns, rawNode);
}
try (SimplePerfEvent.Scope scope = perfEventScope.apply(PerfEventId.of("CreatedTargetNode"))) {
Hasher hasher = Hashing.sha1().newHasher();
hasher.putString(BuckVersion.getVersion(), UTF_8);
JsonObjectHashing.hashJsonObject(hasher, rawNode);
TargetNode<?, ?> node = targetNodeFactory.createFromObject(hasher.hash(), description, constructorArg, targetCell.getFilesystem(), target, declaredDeps.build(), visibilityPatterns.build(), targetCell.getCellPathResolver());
if (buildFileTrees.isPresent() && cell.isEnforcingBuckPackageBoundaries(target.getBasePath())) {
enforceBuckPackageBoundaries(target, buildFileTrees.get().getUnchecked(targetCell), node.getInputs());
}
nodeListener.onCreate(buildFile, node);
return node;
}
} catch (NoSuchBuildTargetException e) {
throw new HumanReadableException(e);
} catch (ParamInfoException e) {
throw new HumanReadableException(e, "%s: %s", target, e.getMessage());
} catch (IOException e) {
throw new HumanReadableException(e.getMessage(), e);
}
}
Aggregations