use of com.hedera.services.state.virtual.ContractValue in project hedera-services by hashgraph.
the class VirtualDataSourceBench method b_add500Leaves.
@Benchmark
public void b_add500Leaves() throws IOException {
final long firstLeafPath = dataSource.getFirstLeafPath();
final long lastLeafPath = dataSource.getLastLeafPath();
final long newFirstLeafPath = firstLeafPath + 500;
final long newLastLeafPath = lastLeafPath + 1000;
var internalRecordStream = LongStream.range(firstLeafPath, newFirstLeafPath).mapToObj(path -> new VirtualInternalRecord(path, hash((int) path)));
var leafRecordStream = LongStream.range(lastLeafPath, newLastLeafPath + 1).mapToObj(path -> new VirtualLeafRecord<>(path, hash((int) path), new ContractKey(path, path), new ContractValue(path)));
dataSource.saveRecords(newFirstLeafPath, newLastLeafPath, internalRecordStream, leafRecordStream, Stream.empty());
}
use of com.hedera.services.state.virtual.ContractValue in project hedera-services by hashgraph.
the class AddBench method add500Leaves.
@Benchmark
public void add500Leaves(DatabaseMergingState databaseState) throws IOException {
final long firstLeafPath = databaseState.dataSource.getFirstLeafPath();
final long lastLeafPath = databaseState.dataSource.getLastLeafPath();
final long newFirstLeafPath = firstLeafPath + NUMBER_OF_LEAVES_ADDED_PER_FLUSH;
final long newLastLeafPath = lastLeafPath + NUMBER_OF_LEAVES_ADDED_PER_FLUSH + NUMBER_OF_LEAVES_ADDED_PER_FLUSH;
var internalRecordStream = LongStream.range(firstLeafPath, newFirstLeafPath).mapToObj(path -> new VirtualInternalRecord(path, hash((int) path)));
var leafRecordStream = LongStream.range(lastLeafPath, newLastLeafPath + 1).mapToObj(path -> new VirtualLeafRecord<>(path, hash((int) path), new ContractKey(path, path), new ContractValue(path)));
databaseState.dataSource.saveRecords(newFirstLeafPath, newLastLeafPath, internalRecordStream, leafRecordStream, Stream.empty());
}
use of com.hedera.services.state.virtual.ContractValue in project hedera-services by hashgraph.
the class DatabaseState method setupDatabase.
@Setup(Level.Trial)
public void setupDatabase() throws IOException {
System.out.println("dataSourcePath = " + dataSourcePath + " mergingEnabled=" + mergingEnabled);
if (Files.exists(dataSourcePath)) {
System.err.println("!!!!!!!!!!!!! Deleting old db.");
deleteDirectoryAndContents(dataSourcePath);
}
if (Files.exists(dataSourceSnapshotPath)) {
System.err.println("!!!!!!!!!!!!! Deleting old db snapshot.");
deleteDirectoryAndContents(dataSourceSnapshotPath);
}
// create data source
VirtualLeafRecordSerializer<ContractKey, ContractValue> virtualLeafRecordSerializer = new VirtualLeafRecordSerializer<>((short) 1, DigestType.SHA_384, (short) 1, DataFileCommon.VARIABLE_DATA_SIZE, new ContractKeySupplier(), (short) 1, ContractValue.SERIALIZED_SIZE, new ContractValueSupplier(), true);
JasperDbBuilder<ContractKey, ContractValue> dbBuilder = new JasperDbBuilder<>();
dbBuilder.virtualLeafRecordSerializer(virtualLeafRecordSerializer).virtualInternalRecordSerializer(new VirtualInternalRecordSerializer()).keySerializer(new ContractKeySerializer()).storageDir(dataSourcePath).maxNumOfKeys(500_000_000).preferDiskBasedIndexes(false).internalHashesRamToDiskThreshold(0).mergingEnabled(mergingEnabled);
dataSource = dbBuilder.build("jdb", "4dbState");
// populate with initial data
System.out.printf("Creating initial data set of %,d leaves\n", initialDataSize);
progressPercentage = 0;
final long firstLeafPath = initialDataSize;
final long lastLeafPath = firstLeafPath + initialDataSize;
var internalRecordStream = LongStream.range(0, firstLeafPath).mapToObj(path -> new VirtualInternalRecord(path, hash((int) path))).peek(internalRecord -> printProgress(internalRecord.getPath(), lastLeafPath));
var leafRecordStream = LongStream.range(firstLeafPath, lastLeafPath + 1).mapToObj(path -> new VirtualLeafRecord<>(path, hash((int) path), new ContractKey(path, path), new ContractValue(path))).peek(leaf -> printProgress(leaf.getPath(), lastLeafPath));
dataSource.saveRecords(firstLeafPath, lastLeafPath, internalRecordStream, leafRecordStream, Stream.empty());
System.out.printf("Done creating initial data set of %,d leaves\n", initialDataSize);
}
use of com.hedera.services.state.virtual.ContractValue in project hedera-services by hashgraph.
the class CliffClickMapBench method multiThreadedReadPut10kEach.
@Benchmark
public void multiThreadedReadPut10kEach(Blackhole blackHole) {
IntStream.range(0, 5).parallel().forEach(jobID -> {
ThreadLocalRandom random = ThreadLocalRandom.current();
for (int i = 0; i < 2000; i++) {
blackHole.consume(map.get(random.nextInt(numEntities)));
map.put(random.nextInt(numEntities), new ContractValue(random.nextLong()));
}
});
}
use of com.hedera.services.state.virtual.ContractValue in project hedera-services by hashgraph.
the class CliffClickMapBench method add.
@Benchmark
public void add() throws Exception {
map.put(nextIndex, new ContractValue(nextIndex));
nextIndex++;
}
Aggregations