use of com.google.common.hash.Hasher in project gitiles by GerritCodeReview.
the class Renderer method computeTemplateHash.
HashCode computeTemplateHash(String soyFile) {
URL u = templates.get(soyFile);
checkState(u != null, "Missing Soy template %s", soyFile);
Hasher h = Hashing.murmur3_128().newHasher();
try (InputStream is = u.openStream();
OutputStream os = Funnels.asOutputStream(h)) {
ByteStreams.copy(is, os);
} catch (IOException e) {
throw new IllegalStateException("Missing Soy template " + soyFile, e);
}
return h.hash();
}
use of com.google.common.hash.Hasher in project dsl-devkit by dsldevkit.
the class DefaultInferredElementFragmentProvider method computeHash.
/**
* Computes a hash code for the given {@link #getEClass(EObject) EClass} and {@link #getQualifiedName(EObject) qualified name}.
*
* @param eClass
* EClass to base hash on, must not be {@code null}
* @param name
* qualified name of inferred model element, can be {@code null}
* @return hash code, never {@code null}
*/
protected HashCode computeHash(final EClass eClass, final QualifiedName name) {
byte[] eClassUriBytes = eClassToUriBytesMap.get(eClass);
if (eClassUriBytes == null) {
eClassUriBytes = EcoreUtil.getURI(eClass).toString().getBytes(Charsets.UTF_8);
eClassToUriBytesMap.put(eClass, eClassUriBytes);
}
Hasher hasher = hashFunction.newHasher(HASHER_CAPACITY);
hasher.putBytes(eClassUriBytes);
if (name != null) {
hasher.putChar('/');
for (int j = 0; j < name.getSegmentCount(); j++) {
hasher.putUnencodedChars(name.getSegment(j)).putChar('.');
}
}
return hasher.hash();
}
use of com.google.common.hash.Hasher in project dsl-devkit by dsldevkit.
the class AbstractStreamingFingerprintComputer method computeFingerprint.
/**
* Computes a fingerprint for a collection of {@link EObject}.
* <p>
* The collection of eobjects to fingerprint is specified by an {@link Iterable}. When determining how to compute the fingerprint the first item of the
* Iterable supplies the context for all eobjects in the collection. The collection is fingerprinted as {@link #FingerprintOrder}.{@link #UNORDERED} and each
* eobject in the collection is fingerprinted as {@link #FingerprintIndirection}.{@link #INDIRECT}.
* </p>
*
* @see #fingerprintIndirection(EObject, EObject, EReference, Hasher)
* @see #fingerprintIterable(Iterable, EObject, FingerprintOrder, FingerprintIndirection, Hasher)
* @param objects
* an {@link Iterable} that specifies the collection of {@link EObject}s to be fingerprinted, may be {@code null}
* @return the fingerprint for the collection of {@code objects} or {@code null} if {@code objects} is {@code null} or empty.
*/
protected String computeFingerprint(final Iterable<? extends EObject> objects) {
if (objects == null || Iterables.isEmpty(objects)) {
return null;
}
Hasher hasher = HASH_FUNCTION.newHasher();
fingerprintIterable(objects, Iterables.get(objects, 0), FingerprintOrder.UNORDERED, FingerprintIndirection.INDIRECT, hasher);
return hasher.hash().toString();
}
use of com.google.common.hash.Hasher in project dsl-devkit by dsldevkit.
the class AbstractStreamingFingerprintComputer method computeFingerprint.
/**
* {@inheritDoc}
*/
@Override
public String computeFingerprint(final EObject object) {
if (object == null) {
return null;
}
Hasher hasher = HASH_FUNCTION.newHasher();
fingerprint(object, hasher);
HashCode export = hasher.hash();
if (export.equals(NO_EXPORT)) {
return null;
}
return export.toString();
}
use of com.google.common.hash.Hasher in project OpenTripPlanner by opentripplanner.
the class StopPattern method semanticHash.
/**
* In most cases we want to use identity equality for StopPatterns. There is a single StopPattern instance for each
* semantic StopPattern, and we don't want to calculate complicated hashes or equality values during normal
* execution. However, in some cases we want a way to consistently identify trips across versions of a GTFS feed, when the
* feed publisher cannot ensure stable trip IDs. Therefore we define some additional hash functions.
*/
public HashCode semanticHash(HashFunction hashFunction) {
Hasher hasher = hashFunction.newHasher();
for (int s = 0; s < size; s++) {
Stop stop = stops[s];
// Truncate the lat and lon to 6 decimal places in case they move slightly between feed versions
hasher.putLong((long) (stop.getLat() * 1000000));
hasher.putLong((long) (stop.getLon() * 1000000));
}
// and have changed between OTP versions.
for (int hop = 0; hop < size - 1; hop++) {
hasher.putInt(pickups[hop]);
hasher.putInt(dropoffs[hop + 1]);
}
return hasher.hash();
}
Aggregations