use of com.palantir.util.crypto.Sha256Hash in project atlasdb by palantir.
the class ValueStreamStore method putHashIndexTask.
private void putHashIndexTask(Transaction t, Map<ValueStreamMetadataTable.ValueStreamMetadataRow, StreamMetadata> rowsToMetadata) {
Multimap<ValueStreamHashAidxTable.ValueStreamHashAidxRow, ValueStreamHashAidxTable.ValueStreamHashAidxColumnValue> indexMap = HashMultimap.create();
for (Entry<ValueStreamMetadataTable.ValueStreamMetadataRow, StreamMetadata> e : rowsToMetadata.entrySet()) {
ValueStreamMetadataTable.ValueStreamMetadataRow row = e.getKey();
StreamMetadata metadata = e.getValue();
Preconditions.checkArgument(metadata.getStatus() == Status.STORED, "Should only index successfully stored streams.");
Sha256Hash hash = Sha256Hash.EMPTY;
if (metadata.getHash() != com.google.protobuf.ByteString.EMPTY) {
hash = new Sha256Hash(metadata.getHash().toByteArray());
}
ValueStreamHashAidxTable.ValueStreamHashAidxRow hashRow = ValueStreamHashAidxTable.ValueStreamHashAidxRow.of(hash);
ValueStreamHashAidxTable.ValueStreamHashAidxColumn column = ValueStreamHashAidxTable.ValueStreamHashAidxColumn.of(row.getId());
ValueStreamHashAidxTable.ValueStreamHashAidxColumnValue columnValue = ValueStreamHashAidxTable.ValueStreamHashAidxColumnValue.of(column, 0L);
indexMap.put(hashRow, columnValue);
}
ValueStreamHashAidxTable hiTable = tables.getValueStreamHashAidxTable(t);
hiTable.put(indexMap);
}
use of com.palantir.util.crypto.Sha256Hash in project atlasdb by palantir.
the class AbstractPersistentStreamStore method storeStreams.
@Override
public Map<Long, Sha256Hash> storeStreams(final Transaction tx, final Map<Long, InputStream> streams) {
if (streams.isEmpty()) {
return ImmutableMap.of();
}
Map<Long, StreamMetadata> idsToEmptyMetadata = Maps.transformValues(streams, Functions.constant(getEmptyMetadata()));
putMetadataAndHashIndexTask(tx, idsToEmptyMetadata);
Map<Long, StreamMetadata> idsToMetadata = Maps.transformEntries(streams, (id, stream) -> storeBlocksAndGetFinalMetadata(tx, id, stream));
putMetadataAndHashIndexTask(tx, idsToMetadata);
Map<Long, Sha256Hash> hashes = Maps.transformValues(idsToMetadata, metadata -> new Sha256Hash(metadata.getHash().toByteArray()));
return hashes;
}
use of com.palantir.util.crypto.Sha256Hash in project atlasdb by palantir.
the class DbKvs method getColumnCountsUnordered.
private Map<Sha256Hash, Integer> getColumnCountsUnordered(TableReference tableRef, List<byte[]> rowList, ColumnRangeSelection columnRangeSelection, long timestamp) {
return runRead(tableRef, dbReadTable -> {
Map<Sha256Hash, Integer> counts = new HashMap<>(rowList.size());
try (ClosableIterator<AgnosticLightResultRow> iter = dbReadTable.getRowsColumnRangeCounts(rowList, timestamp, columnRangeSelection)) {
while (iter.hasNext()) {
AgnosticLightResultRow row = iter.next();
Sha256Hash rowHash = Sha256Hash.computeHash(row.getBytes(ROW));
counts.put(rowHash, row.getInteger("column_count"));
}
}
return counts;
});
}
use of com.palantir.util.crypto.Sha256Hash in project atlasdb by palantir.
the class DbKvs method getColumnCounts.
private Map<Sha256Hash, Integer> getColumnCounts(TableReference tableRef, List<byte[]> rowList, ColumnRangeSelection columnRangeSelection, long timestamp) {
Map<Sha256Hash, Integer> countsByRow = batchingQueryRunner.runTask(rowList, BatchingStrategies.forList(), AccumulatorStrategies.forMap(), partition -> getColumnCountsUnordered(tableRef, partition, columnRangeSelection, timestamp));
// Make iteration order of the returned map match the provided list.
Map<Sha256Hash, Integer> ordered = new LinkedHashMap<>(countsByRow.size());
for (byte[] row : rowList) {
Sha256Hash rowHash = Sha256Hash.computeHash(row);
ordered.put(rowHash, countsByRow.getOrDefault(rowHash, 0));
}
return ordered;
}
use of com.palantir.util.crypto.Sha256Hash in project atlasdb by palantir.
the class DataStreamStore method lookupStreamIdsByHash.
@Override
public Map<Sha256Hash, Long> lookupStreamIdsByHash(Transaction t, final Set<Sha256Hash> hashes) {
if (hashes.isEmpty()) {
return ImmutableMap.of();
}
DataStreamHashAidxTable idx = tables.getDataStreamHashAidxTable(t);
Set<DataStreamHashAidxTable.DataStreamHashAidxRow> rows = getHashIndexRowsForHashes(hashes);
Multimap<DataStreamHashAidxTable.DataStreamHashAidxRow, DataStreamHashAidxTable.DataStreamHashAidxColumnValue> m = idx.getRowsMultimap(rows);
Map<Long, Sha256Hash> hashForStreams = Maps.newHashMap();
for (DataStreamHashAidxTable.DataStreamHashAidxRow r : m.keySet()) {
for (DataStreamHashAidxTable.DataStreamHashAidxColumnValue v : m.get(r)) {
Long streamId = v.getColumnName().getStreamId();
Sha256Hash hash = r.getHash();
if (hashForStreams.containsKey(streamId)) {
AssertUtils.assertAndLog(log, hashForStreams.get(streamId).equals(hash), "(BUG) Stream ID has 2 different hashes: " + streamId);
}
hashForStreams.put(streamId, hash);
}
}
Map<Long, StreamMetadata> metadata = getMetadata(t, hashForStreams.keySet());
Map<Sha256Hash, Long> ret = Maps.newHashMap();
for (Map.Entry<Long, StreamMetadata> e : metadata.entrySet()) {
if (e.getValue().getStatus() != Status.STORED) {
continue;
}
Sha256Hash hash = hashForStreams.get(e.getKey());
ret.put(hash, e.getKey());
}
return ret;
}
Aggregations