use of com.hedera.services.state.virtual.ContractKey in project hedera-services by hashgraph.
the class AddBench method add500Leaves.
@Benchmark
public void add500Leaves(DatabaseMergingState databaseState) throws IOException {
final long firstLeafPath = databaseState.dataSource.getFirstLeafPath();
final long lastLeafPath = databaseState.dataSource.getLastLeafPath();
final long newFirstLeafPath = firstLeafPath + NUMBER_OF_LEAVES_ADDED_PER_FLUSH;
final long newLastLeafPath = lastLeafPath + NUMBER_OF_LEAVES_ADDED_PER_FLUSH + NUMBER_OF_LEAVES_ADDED_PER_FLUSH;
var internalRecordStream = LongStream.range(firstLeafPath, newFirstLeafPath).mapToObj(path -> new VirtualInternalRecord(path, hash((int) path)));
var leafRecordStream = LongStream.range(lastLeafPath, newLastLeafPath + 1).mapToObj(path -> new VirtualLeafRecord<>(path, hash((int) path), new ContractKey(path, path), new ContractValue(path)));
databaseState.dataSource.saveRecords(newFirstLeafPath, newLastLeafPath, internalRecordStream, leafRecordStream, Stream.empty());
}
use of com.hedera.services.state.virtual.ContractKey in project hedera-services by hashgraph.
the class DatabaseState method setupDatabase.
@Setup(Level.Trial)
public void setupDatabase() throws IOException {
System.out.println("dataSourcePath = " + dataSourcePath + " mergingEnabled=" + mergingEnabled);
if (Files.exists(dataSourcePath)) {
System.err.println("!!!!!!!!!!!!! Deleting old db.");
deleteDirectoryAndContents(dataSourcePath);
}
if (Files.exists(dataSourceSnapshotPath)) {
System.err.println("!!!!!!!!!!!!! Deleting old db snapshot.");
deleteDirectoryAndContents(dataSourceSnapshotPath);
}
// create data source
VirtualLeafRecordSerializer<ContractKey, ContractValue> virtualLeafRecordSerializer = new VirtualLeafRecordSerializer<>((short) 1, DigestType.SHA_384, (short) 1, DataFileCommon.VARIABLE_DATA_SIZE, new ContractKeySupplier(), (short) 1, ContractValue.SERIALIZED_SIZE, new ContractValueSupplier(), true);
JasperDbBuilder<ContractKey, ContractValue> dbBuilder = new JasperDbBuilder<>();
dbBuilder.virtualLeafRecordSerializer(virtualLeafRecordSerializer).virtualInternalRecordSerializer(new VirtualInternalRecordSerializer()).keySerializer(new ContractKeySerializer()).storageDir(dataSourcePath).maxNumOfKeys(500_000_000).preferDiskBasedIndexes(false).internalHashesRamToDiskThreshold(0).mergingEnabled(mergingEnabled);
dataSource = dbBuilder.build("jdb", "4dbState");
// populate with initial data
System.out.printf("Creating initial data set of %,d leaves\n", initialDataSize);
progressPercentage = 0;
final long firstLeafPath = initialDataSize;
final long lastLeafPath = firstLeafPath + initialDataSize;
var internalRecordStream = LongStream.range(0, firstLeafPath).mapToObj(path -> new VirtualInternalRecord(path, hash((int) path))).peek(internalRecord -> printProgress(internalRecord.getPath(), lastLeafPath));
var leafRecordStream = LongStream.range(firstLeafPath, lastLeafPath + 1).mapToObj(path -> new VirtualLeafRecord<>(path, hash((int) path), new ContractKey(path, path), new ContractValue(path))).peek(leaf -> printProgress(leaf.getPath(), lastLeafPath));
dataSource.saveRecords(firstLeafPath, lastLeafPath, internalRecordStream, leafRecordStream, Stream.empty());
System.out.printf("Done creating initial data set of %,d leaves\n", initialDataSize);
}
use of com.hedera.services.state.virtual.ContractKey in project hedera-services by hashgraph.
the class StaticEntityAccess method getStorage.
@Override
public UInt256 getStorage(AccountID id, UInt256 key) {
final var contractKey = new ContractKey(id.getAccountNum(), key.toArray());
ContractValue value = storage.get(contractKey);
return value == null ? UInt256.ZERO : UInt256.fromBytes(Bytes32.wrap(value.getValue()));
}
use of com.hedera.services.state.virtual.ContractKey in project hedera-services by hashgraph.
the class StateViewTest method setup.
@BeforeEach
@SuppressWarnings("unchecked")
private void setup() throws Throwable {
metadata = new HFileMeta(false, TxnHandlingScenario.MISC_FILE_WACL_KT.asJKey(), expiry, fileMemo);
immutableMetadata = new HFileMeta(false, StateView.EMPTY_WACL, expiry);
expectedImmutable = FileGetInfoResponse.FileInfo.newBuilder().setLedgerId(ledgerId).setDeleted(false).setExpirationTime(Timestamp.newBuilder().setSeconds(expiry)).setFileID(target).setSize(data.length).build();
expected = expectedImmutable.toBuilder().setKeys(TxnHandlingScenario.MISC_FILE_WACL_KT.asKey().getKeyList()).setMemo(fileMemo).build();
tokenAccount = MerkleAccountFactory.newAccount().isSmartContract(false).tokens(tokenId).get();
tokenAccount.setNftsOwned(10);
tokenAccount.setMaxAutomaticAssociations(123);
tokenAccount.setAlias(TxnHandlingScenario.TOKEN_ADMIN_KT.asKey().getEd25519());
contract = MerkleAccountFactory.newAccount().alias(create2Address).memo("Stay cold...").numKvPairs(wellKnownNumKvPairs).isSmartContract(true).accountKeys(COMPLEX_KEY_ACCOUNT_KT).proxy(asAccount("0.0.3")).senderThreshold(1_234L).receiverThreshold(4_321L).receiverSigRequired(true).balance(555L).autoRenewPeriod(1_000_000L).deleted(true).expirationTime(9_999_999L).get();
contracts = (MerkleMap<EntityNum, MerkleAccount>) mock(MerkleMap.class);
topics = (MerkleMap<EntityNum, MerkleTopic>) mock(MerkleMap.class);
tokenRels = new MerkleMap<>();
tokenRels.put(EntityNumPair.fromLongs(tokenAccountId.getAccountNum(), tokenId.getTokenNum()), new MerkleTokenRelStatus(123L, false, true, true));
tokenStore = mock(TokenStore.class);
token = new MerkleToken(Long.MAX_VALUE, 100, 1, "UnfrozenToken", "UnfrozenTokenName", true, true, new EntityId(0, 0, 3));
token.setMemo(tokenMemo);
token.setAdminKey(TxnHandlingScenario.TOKEN_ADMIN_KT.asJKey());
token.setFreezeKey(TxnHandlingScenario.TOKEN_FREEZE_KT.asJKey());
token.setKycKey(TxnHandlingScenario.TOKEN_KYC_KT.asJKey());
token.setSupplyKey(COMPLEX_KEY_ACCOUNT_KT.asJKey());
token.setWipeKey(MISC_ACCOUNT_KT.asJKey());
token.setFeeScheduleKey(MISC_ACCOUNT_KT.asJKey());
token.setPauseKey(TxnHandlingScenario.TOKEN_PAUSE_KT.asJKey());
token.setAutoRenewAccount(EntityId.fromGrpcAccountId(autoRenew));
token.setExpiry(expiry);
token.setAutoRenewPeriod(autoRenewPeriod);
token.setDeleted(true);
token.setPaused(true);
token.setTokenType(TokenType.FUNGIBLE_COMMON);
token.setSupplyType(TokenSupplyType.FINITE);
token.setFeeScheduleFrom(grpcCustomFees);
scheduleStore = mock(ScheduleStore.class);
final var scheduleMemo = "For what but eye and ear";
parentScheduleCreate = scheduleCreateTxnWith(SCHEDULE_ADMIN_KT.asKey(), scheduleMemo, payerAccountId, creatorAccountID, MiscUtils.asTimestamp(now.toJava()));
schedule = MerkleSchedule.from(parentScheduleCreate.toByteArray(), expiry);
schedule.witnessValidSignature("01234567890123456789012345678901".getBytes());
schedule.witnessValidSignature("_123456789_123456789_123456789_1".getBytes());
schedule.witnessValidSignature("_o23456789_o23456789_o23456789_o".getBytes());
contents = mock(Map.class);
attrs = mock(Map.class);
bytecode = mock(Map.class);
specialFiles = mock(MerkleSpecialFiles.class);
mockTokenRelsFn = (BiFunction<StateView, EntityNum, List<TokenRelationship>>) mock(BiFunction.class);
StateView.tokenRelsFn = mockTokenRelsFn;
final var uniqueTokens = new MerkleMap<EntityNumPair, MerkleUniqueToken>();
uniqueTokens.put(targetNftKey, targetNft);
uniqueTokens.put(treasuryNftKey, treasuryNft);
storage = (VirtualMap<VirtualBlobKey, VirtualBlobValue>) mock(VirtualMap.class);
contractStorage = (VirtualMap<ContractKey, ContractValue>) mock(VirtualMap.class);
children = new MutableStateChildren();
children.setUniqueTokens(uniqueTokens);
children.setAccounts(contracts);
children.setTokenAssociations(tokenRels);
children.setSpecialFiles(specialFiles);
networkInfo = mock(NetworkInfo.class);
subject = new StateView(tokenStore, scheduleStore, children, networkInfo);
subject.fileAttrs = attrs;
subject.fileContents = contents;
subject.contractBytecode = bytecode;
}
Aggregations