use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class FileExpansionLoadProvider method fileExpansionsFactory.
private Function<HapiApiSpec, OpProvider> fileExpansionsFactory() {
final SplittableRandom r = new SplittableRandom();
final Set<String> usableTargets = ConcurrentHashMap.newKeySet();
final LongFunction<String> targetNameFn = i -> "expandingFile" + i;
final AtomicInteger nextTargetNum = new AtomicInteger(numActiveTargets.get());
final var key = "multi";
final var waclShape = KeyShape.listOf(SIMPLE, threshOf(1, 3), listOf(2));
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
final List<HapiSpecOperation> ops = new ArrayList<>();
ops.add(newKeyNamed(key).shape(waclShape));
for (int i = 0, n = numActiveTargets.get(); i < n; i++) {
ops.add(fileCreate(targetNameFn.apply(i)).key(key).noLogging().contents(DATA_CHUNK).payingWith(GENESIS));
}
return ops;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
if (usableTargets.size() < numActiveTargets.get()) {
final var name = targetNameFn.apply(nextTargetNum.getAndIncrement());
op = fileCreate(name).noLogging().key(key).contents(DATA_CHUNK).payingWith(GENESIS).deferStatusResolution().exposingNumTo(num -> {
usableTargets.add(name);
});
} else {
final var skips = r.nextInt(usableTargets.size());
final var iter = usableTargets.iterator();
try {
for (int i = 0; i < skips; i++) {
iter.next();
}
final var target = iter.next();
op = fileAppend(target).noLogging().deferStatusResolution().payingWith(GENESIS).content(DATA_CHUNK).hasKnownStatusFrom(MAX_FILE_SIZE_EXCEEDED, SUCCESS).alertingPost(code -> {
if (code == MAX_FILE_SIZE_EXCEEDED) {
log.info("File {} reached max size, no longer in rotation", target);
usableTargets.remove(target);
}
});
} catch (Exception ignore) {
op = noOp();
}
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class TokenRelStatusChanges method statusChangesFactory.
private Function<HapiApiSpec, OpProvider> statusChangesFactory() {
var nowAssociating = new AtomicBoolean(Boolean.FALSE);
var relatableTokens = new AtomicInteger();
var relatableAccounts = new AtomicInteger();
List<String> tokens = new ArrayList<>();
List<String> accounts = new ArrayList<>();
var nextToken = new AtomicInteger(-1);
var nextAccount = new AtomicInteger(-1);
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
var ciProps = spec.setup().ciPropertiesMap();
relatableTokens.set(ciProps.getInteger("numTokens"));
relatableAccounts.set(ciProps.getInteger("numAccounts"));
List<HapiSpecOperation> initializers = new ArrayList<>();
initializers.add(tokenOpsEnablement().hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS));
IntStream.range(0, relatableTokens.get()).mapToObj(i -> "token" + i).forEach(tokens::add);
initializers.add(inParallel(tokens.stream().map(token -> tokenCreate(token).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS)).toArray(HapiSpecOperation[]::new)));
IntStream.range(0, relatableAccounts.get()).mapToObj(i -> "account" + i).forEach(accounts::add);
initializers.add(inParallel(accounts.stream().map(account -> cryptoCreate(account).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS)).toArray(HapiSpecOperation[]::new)));
return initializers;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
final int numTokens = relatableTokens.get();
final int numAccounts = relatableAccounts.get();
int token = nextToken.get() % numTokens;
int account = nextAccount.incrementAndGet() % numAccounts;
if (account == 0) {
token = nextToken.incrementAndGet() % numTokens;
if (token == 0) {
var current = nowAssociating.get();
nowAssociating.compareAndSet(current, !current);
}
}
if (nowAssociating.get()) {
op = tokenAssociate(accounts.get(account), tokens.get(token)).fee(ONE_HUNDRED_HBARS).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).hasKnownStatusFrom(OK, SUCCESS, DUPLICATE_TRANSACTION, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT).noLogging().deferStatusResolution();
} else {
op = tokenDissociate(accounts.get(account), tokens.get(token)).fee(ONE_HUNDRED_HBARS).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).hasKnownStatusFrom(OK, SUCCESS, DUPLICATE_TRANSACTION, TOKEN_NOT_ASSOCIATED_TO_ACCOUNT).noLogging().deferStatusResolution();
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class TokenTransferBasicLoadTest method tokenCreatesFactory.
private Function<HapiApiSpec, OpProvider> tokenCreatesFactory(PerfTestLoadSettings settings) {
int numTotalTokens = settings.getTotalTokens();
int totalClients = settings.getTotalClients();
int numActiveTokens = (totalClients >= 1) ? numTotalTokens / totalClients : numTotalTokens;
AtomicInteger remaining = new AtomicInteger(numActiveTokens - 1);
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
return Collections.emptyList();
}
@Override
public Optional<HapiSpecOperation> get() {
int next;
if ((next = remaining.getAndDecrement()) < 0) {
return Optional.empty();
}
var payingTreasury = String.format("0.0.%d", settings.getTestTreasureStartAccount() + next);
var op = tokenCreate(tokenRegistryName(next)).payingWith(DEFAULT_PAYER).signedBy(DEFAULT_PAYER).fee(ONE_HUNDRED_HBARS).initialSupply(100_000_000_000L).treasury(payingTreasury).hasRetryPrecheckFrom(BUSY, PLATFORM_TRANSACTION_NOT_CREATED, DUPLICATE_TRANSACTION, INSUFFICIENT_PAYER_BALANCE).hasPrecheckFrom(DUPLICATE_TRANSACTION, OK).hasKnownStatusFrom(SUCCESS, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT, TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED, FAIL_INVALID).suppressStats(true).noLogging();
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class TokenTransfersLoadProvider method tokenTransfersFactory.
private Function<HapiApiSpec, OpProvider> tokenTransfersFactory() {
var firstDir = new AtomicBoolean(Boolean.TRUE);
var balanceInit = new AtomicLong();
var tokensPerTxn = new AtomicInteger();
var sendingAccountsPerToken = new AtomicInteger();
var receivingAccountsPerToken = new AtomicInteger();
List<String> treasuries = new ArrayList<>();
Map<String, List<String>> senders = new HashMap<>();
Map<String, List<String>> receivers = new HashMap<>();
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
var ciProps = spec.setup().ciPropertiesMap();
balanceInit.set(ciProps.getLong("balanceInit"));
tokensPerTxn.set(ciProps.getInteger("tokensPerTxn"));
sendingAccountsPerToken.set(ciProps.getInteger("sendingAccountsPerToken"));
receivingAccountsPerToken.set(ciProps.getInteger("receivingAccountsPerToken"));
var initialSupply = (sendingAccountsPerToken.get() + receivingAccountsPerToken.get()) * balanceInit.get();
List<HapiSpecOperation> initializers = new ArrayList<>();
initializers.add(tokenOpsEnablement());
/* Temporary, can be removed after the public testnet state used in
restart tests includes a fee schedule with HTS resource prices. */
if (spec.setup().defaultNode().equals(asAccount("0.0.3"))) {
initializers.add(uploadDefaultFeeSchedules(GENESIS));
} else {
initializers.add(withOpContext((spec, opLog) -> {
log.info("\n\n" + bannerWith("Waiting for a fee schedule with token ops!"));
boolean hasKnownHtsFeeSchedules = false;
SysFileSerde<String> serde = new FeesJsonToGrpcBytes();
while (!hasKnownHtsFeeSchedules) {
var query = QueryVerbs.getFileContents(FEE_SCHEDULE).fee(10_000_000_000L);
try {
allRunFor(spec, query);
var contents = query.getResponse().getFileGetContents().getFileContents().getContents();
var schedules = serde.fromRawFile(contents.toByteArray());
hasKnownHtsFeeSchedules = schedules.contains("TokenCreate");
} catch (Exception e) {
var msg = e.toString();
msg = msg.substring(msg.indexOf(":") + 2);
log.info("Couldn't check for HTS fee schedules---'{}'", msg);
}
TimeUnit.SECONDS.sleep(3);
}
log.info("\n\n" + bannerWith("A fee schedule with token ops now available!"));
spec.tryReinitializingFees();
}));
}
for (int i = 0; i < tokensPerTxn.get(); i++) {
var token = "token" + i;
var treasury = "treasury" + i;
initializers.add(cryptoCreate(treasury));
initializers.add(tokenCreate(token).treasury(treasury).initialSupply(initialSupply));
treasuries.add(treasury);
for (int j = 0; j < sendingAccountsPerToken.get(); j++) {
var sender = token + "sender" + j;
senders.computeIfAbsent(token, ignore -> new ArrayList<>()).add(sender);
initializers.add(cryptoCreate(sender));
initializers.add(tokenAssociate(sender, token));
initializers.add(cryptoTransfer(moving(balanceInit.get(), token).between(treasury, sender)));
}
for (int j = 0; j < receivingAccountsPerToken.get(); j++) {
var receiver = token + "receiver" + j;
receivers.computeIfAbsent(token, ignore -> new ArrayList<>()).add(receiver);
initializers.add(cryptoCreate(receiver));
initializers.add(tokenAssociate(receiver, token));
initializers.add(cryptoTransfer(moving(balanceInit.get(), token).between(treasury, receiver)));
}
}
for (HapiSpecOperation op : initializers) {
if (op instanceof HapiTxnOp) {
((HapiTxnOp) op).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS);
}
}
return initializers;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
var numTokens = tokensPerTxn.get();
var numSenders = sendingAccountsPerToken.get();
var numReceivers = receivingAccountsPerToken.get();
if (firstDir.get()) {
var xfers = new TokenMovement[numTokens * numSenders];
for (int i = 0; i < numTokens; i++) {
var token = "token" + i;
for (int j = 0; j < numSenders; j++) {
var receivers = new String[numReceivers];
for (int k = 0; k < numReceivers; k++) {
receivers[k] = token + "receiver" + k;
}
xfers[i * numSenders + j] = moving(numReceivers, token).distributing(token + "sender" + j, receivers);
}
}
op = cryptoTransfer(xfers).hasKnownStatusFrom(OK, DUPLICATE_TRANSACTION, SUCCESS, UNKNOWN, INSUFFICIENT_PAYER_BALANCE).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).hasPrecheckFrom(OK, PLATFORM_NOT_ACTIVE).noLogging().deferStatusResolution();
firstDir.set(Boolean.FALSE);
} else {
var xfers = new TokenMovement[numTokens * numReceivers];
for (int i = 0; i < numTokens; i++) {
var token = "token" + i;
for (int j = 0; j < numReceivers; j++) {
var senders = new String[numSenders];
for (int k = 0; k < numSenders; k++) {
senders[k] = token + "sender" + k;
}
xfers[i * numReceivers + j] = moving(numSenders, token).distributing(token + "receiver" + j, senders);
}
}
op = cryptoTransfer(xfers).hasKnownStatusFrom(OK, DUPLICATE_TRANSACTION, SUCCESS, UNKNOWN, INSUFFICIENT_PAYER_BALANCE).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).hasPrecheckFrom(OK, PLATFORM_NOT_ACTIVE).noLogging().deferStatusResolution();
firstDir.set(Boolean.TRUE);
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class UniqueTokenStateSetup method nftFactory.
private Function<HapiApiSpec, OpProvider> nftFactory() {
final AtomicInteger uniqueTokensCreated = new AtomicInteger(0);
final AtomicInteger nftsMintedForCurrentUniqueToken = new AtomicInteger(0);
final AtomicBoolean done = new AtomicBoolean(false);
final AtomicReference<String> currentUniqueToken = new AtomicReference<>(uniqueTokenNameFn.apply(0));
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
final var numTreasuries = numTokens.get() / UNIQ_TOKENS_PER_TREASURY + Math.min(1, numTokens.get() % UNIQ_TOKENS_PER_TREASURY);
final List<HapiSpecOperation> inits = new ArrayList<>();
inits.add(inParallel(IntStream.range(0, numTreasuries).mapToObj(i -> cryptoCreate(treasuryNameFn.apply(i)).payingWith(GENESIS).balance(0L).noLogging().key(GENESIS).hasRetryPrecheckFrom(DUPLICATE_TRANSACTION).hasKnownStatusFrom(SUCCESS, UNKNOWN, TRANSACTION_EXPIRED).deferStatusResolution()).toArray(HapiSpecOperation[]::new)));
inits.add(sleepFor(5_000L));
inits.addAll(burstedUniqCreations(UNIQ_TOKENS_BURST_SIZE, numTreasuries, UNIQ_TOKENS_POST_BURST_PAUSE_MS));
return inits;
}
@Override
public Optional<HapiSpecOperation> get() {
if (done.get()) {
return Optional.empty();
}
final var currentToken = currentUniqueToken.get();
if (nftsMintedForCurrentUniqueToken.get() < numNftsPerToken.get()) {
final List<ByteString> allMeta = new ArrayList<>();
final int noMoreThan = numNftsPerToken.get() - nftsMintedForCurrentUniqueToken.get();
for (int i = 0, n = Math.min(noMoreThan, numNftsPerMintOp.get()); i < n; i++) {
final var nextSerialNo = nftsMintedForCurrentUniqueToken.incrementAndGet();
allMeta.add(metadataFor(currentToken, nextSerialNo));
}
final var op = mintToken(currentToken, allMeta).payingWith(GENESIS).rememberingNothing().deferStatusResolution().fee(ONE_HBAR).hasPrecheckFrom(OK, DUPLICATE_TRANSACTION).hasKnownStatusFrom(SUCCESS, UNKNOWN, OK, TRANSACTION_EXPIRED, INVALID_TOKEN_ID).noLogging();
return Optional.of(op);
} else {
nftsMintedForCurrentUniqueToken.set(0);
final var nextUniqTokenNo = uniqueTokensCreated.incrementAndGet();
currentUniqueToken.set(uniqueTokenNameFn.apply(nextUniqTokenNo));
if (nextUniqTokenNo >= numTokens.get()) {
System.out.println("Done creating " + nextUniqTokenNo + " unique tokens w/ approximately " + (numNftsPerToken.get() * nextUniqTokenNo) + " NFTs");
done.set(true);
}
return Optional.empty();
}
}
};
}
Aggregations