use of com.google.common.hash.Hasher in project flink by apache.
the class StreamGraphHasherV1 method generateNodeHash.
/**
* Generates a hash for the node and returns whether the operation was
* successful.
*
* @param node The node to generate the hash for
* @param hashFunction The hash function to use
* @param hashes The current state of generated hashes
* @return <code>true</code> if the node hash has been generated.
* <code>false</code>, otherwise. If the operation is not successful, the
* hash needs be generated at a later point when all input is available.
* @throws IllegalStateException If node has user-specified hash and is
* intermediate node of a chain
*/
private boolean generateNodeHash(StreamNode node, HashFunction hashFunction, Map<Integer, byte[]> hashes, boolean isChainingEnabled) {
// Check for user-specified ID
String userSpecifiedHash = node.getTransformationUID();
if (userSpecifiedHash == null) {
// Check that all input nodes have their hashes computed
for (StreamEdge inEdge : node.getInEdges()) {
// nodes have been visited and their hashes set.
if (!hashes.containsKey(inEdge.getSourceId())) {
return false;
}
}
Hasher hasher = hashFunction.newHasher();
byte[] hash = generateDeterministicHash(node, hasher, hashes, isChainingEnabled);
if (hashes.put(node.getId(), hash) != null) {
// Sanity check
throw new IllegalStateException("Unexpected state. Tried to add node hash " + "twice. This is probably a bug in the JobGraph generator.");
}
return true;
} else {
// has chained outputs.
for (StreamEdge inEdge : node.getInEdges()) {
if (isChainable(inEdge, isChainingEnabled)) {
throw new UnsupportedOperationException("Cannot assign user-specified hash " + "to intermediate node in chain. This will be supported in future " + "versions of Flink. As a work around start new chain at task " + node.getOperatorName() + ".");
}
}
Hasher hasher = hashFunction.newHasher();
byte[] hash = generateUserSpecifiedHash(node, hasher);
for (byte[] previousHash : hashes.values()) {
if (Arrays.equals(previousHash, hash)) {
throw new IllegalArgumentException("Hash collision on user-specified ID. " + "Most likely cause is a non-unique ID. Please check that all IDs " + "specified via `uid(String)` are unique.");
}
}
if (hashes.put(node.getId(), hash) != null) {
// Sanity check
throw new IllegalStateException("Unexpected state. Tried to add node hash " + "twice. This is probably a bug in the JobGraph generator.");
}
return true;
}
}
use of com.google.common.hash.Hasher in project flink by apache.
the class StreamGraphHasherV2 method generateNodeHash.
/**
* Generates a hash for the node and returns whether the operation was
* successful.
*
* @param node The node to generate the hash for
* @param hashFunction The hash function to use
* @param hashes The current state of generated hashes
* @return <code>true</code> if the node hash has been generated.
* <code>false</code>, otherwise. If the operation is not successful, the
* hash needs be generated at a later point when all input is available.
* @throws IllegalStateException If node has user-specified hash and is
* intermediate node of a chain
*/
private boolean generateNodeHash(StreamNode node, HashFunction hashFunction, Map<Integer, byte[]> hashes, boolean isChainingEnabled) {
// Check for user-specified ID
String userSpecifiedHash = node.getTransformationUID();
if (userSpecifiedHash == null) {
// Check that all input nodes have their hashes computed
for (StreamEdge inEdge : node.getInEdges()) {
// nodes have been visited and their hashes set.
if (!hashes.containsKey(inEdge.getSourceId())) {
return false;
}
}
Hasher hasher = hashFunction.newHasher();
byte[] hash = generateDeterministicHash(node, hasher, hashes, isChainingEnabled);
if (hashes.put(node.getId(), hash) != null) {
// Sanity check
throw new IllegalStateException("Unexpected state. Tried to add node hash " + "twice. This is probably a bug in the JobGraph generator.");
}
return true;
} else {
// has chained outputs.
for (StreamEdge inEdge : node.getInEdges()) {
if (isChainable(inEdge, isChainingEnabled)) {
throw new UnsupportedOperationException("Cannot assign user-specified hash " + "to intermediate node in chain. This will be supported in future " + "versions of Flink. As a work around start new chain at task " + node.getOperatorName() + ".");
}
}
Hasher hasher = hashFunction.newHasher();
byte[] hash = generateUserSpecifiedHash(node, hasher);
for (byte[] previousHash : hashes.values()) {
if (Arrays.equals(previousHash, hash)) {
throw new IllegalArgumentException("Hash collision on user-specified ID. " + "Most likely cause is a non-unique ID. Please check that all IDs " + "specified via `uid(String)` are unique.");
}
}
if (hashes.put(node.getId(), hash) != null) {
// Sanity check
throw new IllegalStateException("Unexpected state. Tried to add node hash " + "twice. This is probably a bug in the JobGraph generator.");
}
return true;
}
}
use of com.google.common.hash.Hasher in project SpongeCommon by SpongePowered.
the class SpongeResourcePackFactory method fromUri.
@Override
public ResourcePack fromUri(URI uri) throws FileNotFoundException {
checkNotNull(uri, "uri");
try {
Hasher hasher = Hashing.sha1().newHasher();
try (InputStream in = openStream(uri)) {
byte[] buf = new byte[256];
while (true) {
int read = in.read(buf);
if (read <= 0) {
break;
}
hasher.putBytes(buf, 0, read);
}
}
return SpongeResourcePack.create(uri, hasher.hash().toString());
} catch (IOException e) {
FileNotFoundException ex = new FileNotFoundException(e.toString());
ex.initCause(e);
throw ex;
}
}
use of com.google.common.hash.Hasher in project bookkeeper by apache.
the class Value method hashCode.
@Override
public int hashCode() {
HashFunction hf = Hashing.murmur3_32();
Hasher hc = hf.newHasher();
for (String key : fields.keySet()) {
hc.putString(key, Charset.defaultCharset());
}
return hc.hash().asInt();
}
use of com.google.common.hash.Hasher in project jackrabbit-oak by apache.
the class ChunkedBlobStream method decorateRawBuffer.
private ByteBuf decorateRawBuffer(ByteBufAllocator allocator, ByteBuf buffer) {
byte[] data = new byte[buffer.readableBytes()];
buffer.readBytes(data);
buffer.release();
byte mask = createMask(data.length);
Hasher hasher = Hashing.murmur3_32().newHasher();
long hash = hasher.putByte(mask).putLong(length).putBytes(data).hash().padToLong();
byte[] blobIdBytes = blobId.getBytes();
ByteBuf out = allocator.buffer();
out.writeInt(1 + 1 + 8 + 4 + blobIdBytes.length + 8 + data.length);
out.writeByte(Messages.HEADER_BLOB);
out.writeByte(mask);
out.writeLong(length);
out.writeInt(blobIdBytes.length);
out.writeBytes(blobIdBytes);
out.writeLong(hash);
out.writeBytes(data);
return out;
}
Aggregations