use of net.jpountz.xxhash.StreamingXXHash64 in project mapdb by jankotek.
the class Volume method hash.
/**
* <p>
* Calculates XXHash64 from this Volume content.
* </p>
* @param off offset to start calculation from
* @param len length of data to calculate hash
* @param seed hash seed
* @return XXHash.
*/
public long hash(long off, long len, long seed) {
final int blen = 128;
byte[] b = new byte[blen];
StreamingXXHash64 s = CC.HASH_FACTORY.newStreamingHash64(seed);
len += off;
// round size to multiple of blen
int size = (int) Math.min(len - off, Math.min(blen, DataIO.roundUp(off, blen) - off));
getData(off, b, 0, size);
s.update(b, 0, size);
off += size;
// read rest of the data
while (off < len) {
size = (int) Math.min(blen, len - off);
getData(off, b, 0, size);
s.update(b, 0, size);
off += size;
}
return s.getValue();
}
use of net.jpountz.xxhash.StreamingXXHash64 in project herddb by diennea.
the class TableDataChecksum method createChecksum.
public static TableChecksum createChecksum(DBManager manager, TranslatedQuery query, TableSpaceManager tableSpaceManager, String tableSpace, String tableName) throws DataScannerException {
AbstractTableManager tablemanager = tableSpaceManager.getTableManager(tableName);
String nodeID = tableSpaceManager.getDbmanager().getNodeId();
TranslatedQuery translated = query;
final Table table = manager.getTableSpaceManager(tableSpace).getTableManager(tableName).getTable();
// Number of records
long nrecords = 0;
// For example, in leader node we may not know the query
if (translated == null) {
String columns = formatColumns(table);
/*
scan = true
allowCache = false
returnValues = false
maxRows = -1
*/
translated = manager.getPlanner().translate(tableSpace, "SELECT " + columns + " FROM " + tableName + " order by " + formatPrimaryKeys(table), Collections.emptyList(), true, false, false, -1);
}
ScanStatement statement = translated.plan.mainStatement.unwrap(ScanStatement.class);
statement.setAllowExecutionFromFollower(true);
LOGGER.log(Level.INFO, "creating checksum for table {0}.{1} on node {2}", new Object[] { tableSpace, tableName, nodeID });
try (DataScanner scan = manager.scan(statement, translated.context, TransactionContext.NO_TRANSACTION)) {
StreamingXXHash64 hash64 = FACTORY.newStreamingHash64(SEED);
long _start = System.currentTimeMillis();
while (scan.hasNext()) {
nrecords++;
DataAccessor data = scan.next();
data.forEach((String key, Object value) -> {
int type = table.getColumn(key).type;
byte[] serialize = RecordSerializer.serialize(value, type);
/*
Update need three parameters
update(byte[]buff, int off, int len)
buff is the input data
off is the start offset in buff
len is the number of bytes to hash
*/
if (serialize != null) {
hash64.update(serialize, 0, serialize.length);
}
});
}
LOGGER.log(Level.INFO, "Number of processed records for table {0}.{1} on node {2} = {3} ", new Object[] { tableSpace, tableName, nodeID, nrecords });
long _stop = System.currentTimeMillis();
long nextAutoIncrementValue = tablemanager.getNextPrimaryKeyValue();
long scanduration = (_stop - _start);
LOGGER.log(Level.INFO, "Creating checksum for table {0}.{1} on node {2} finished in {3} ms", new Object[] { tableSpace, tableName, nodeID, scanduration });
SystemInstrumentation.instrumentationPoint("createChecksum", tableSpace, tableName);
return new TableChecksum(tableSpace, tableName, hash64.getValue(), HASH_TYPE, nrecords, nextAutoIncrementValue, translated.context.query, scanduration);
} catch (DataScannerException ex) {
LOGGER.log(Level.SEVERE, "Scan failled", ex);
throw new DataScannerException(ex);
}
}
Aggregations