use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class FibonacciPlusLoadProvider method contractOpsFactory.
private Function<HapiApiSpec, OpProvider> contractOpsFactory() {
final String civilian = "civilian";
final String bytecode = "bytecode";
final SplittableRandom random = new SplittableRandom(1_234_567L);
final IntFunction<String> contractNameFn = i -> "contract" + i;
final int r = powerLawBaseReciprocal.get();
final DoubleUnaryOperator logBaseReciprocal = x -> Math.log(x) / Math.log(r);
final int numDiscreteSizes = (int) ceil(logBaseReciprocal.applyAsDouble(numContracts.get() * (r - 1)));
double scale = powerLawScale.get();
int numSlots = (int) Math.pow(scale, numDiscreteSizes - 1) * smallestNumSlots.get();
int numContractsWithThisManySlots = 1;
int nextContractNum = 0;
for (int i = 0; i < numDiscreteSizes; i++) {
log.info("Will use {} contracts with {} slots", numContractsWithThisManySlots, numSlots);
for (int j = 0; j < numContractsWithThisManySlots; j++) {
final var thisContractNum = nextContractNum++;
final var thisContract = contractNameFn.apply(thisContractNum);
contractSlots.put(thisContract, numSlots);
if (validateStorage.get()) {
final var slots = new BigInteger[numSlots];
Arrays.fill(slots, BigInteger.ZERO);
contractStorage.put(thisContract, slots);
}
}
numSlots /= scale;
numContractsWithThisManySlots *= r;
}
log.info("Will use {} contracts in total", nextContractNum);
numContracts.set(nextContractNum);
Supplier<String> randomCallChoice = () -> {
final var iter = createdSoFar.iterator();
final var n = createdSoFar.size();
if (n == 1) {
return iter.next();
}
for (int i = 0; i < random.nextInt(n - 1); i++, iter.next()) {
/* No-op */
}
return iter.next();
};
final var that = this;
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
final List<HapiSpecOperation> inits = new ArrayList<>();
inits.add(fileCreate(bytecode).path(FIBONACCI_PLUS_PATH).noLogging().payingWith(GENESIS));
inits.add(cryptoCreate(civilian).balance(100 * ONE_MILLION_HBARS).payingWith(GENESIS));
return inits;
}
@Override
public Optional<HapiSpecOperation> get() {
final var aCallNum = submittedOps.incrementAndGet();
if (aCallNum == 1) {
effStart.set(Instant.now());
}
final var choice = (createdSoFar.isEmpty() || random.nextDouble() > MIN_CALL_PROB) ? contractNameFn.apply(random.nextInt(numContracts.get())) : randomCallChoice.get();
final HapiSpecOperation op;
if (createdSoFar.contains(choice)) {
final var n = slotsPerCall.get();
final int[] targets = new int[n];
final var m = contractSlots.get(choice);
for (int i = 0; i < n; i++) {
targets[i] = random.nextInt(m);
}
final var targetsDesc = Arrays.toString(targets);
if (verbose.get()) {
log.info("Calling {} with targets {} and fibN {}", choice, targetsDesc, fibN.get());
}
op = contractCall(choice, ADD_NTH_FIB_ABI, targets, fibN.get()).noLogging().payingWith(civilian).gas(GAS_TO_OFFER).exposingGasTo((code, gas) -> {
if (verbose.get()) {
log.info("(Tried to) call {} (targets = {}, fibN = {}) with {} gas --> {}", choice, targetsDesc, fibN.get(), gas, code);
}
that.observeExposedGas(gas);
if (code == SUCCESS && validateStorage.get()) {
final var curSlots = contractStorage.get(choice);
final var newSlots = Arrays.copyOf(curSlots, m);
for (int i = 0; i < n; i++) {
final var j = targets[i];
newSlots[j] = newSlots[j].add(fibNValue.get());
}
contractStorage.put(choice, newSlots);
}
}).hasKnownStatusFrom(SUCCESS, CONTRACT_REVERT_EXECUTED, INSUFFICIENT_GAS).deferStatusResolution();
} else {
final var numSlots = contractSlots.get(choice);
op = contractCreate(choice, FIBONACCI_PLUS_CONSTRUCTOR_ABI, numSlots).bytecode(bytecode).payingWith(civilian).balance(0L).gas(GAS_TO_OFFER).exposingGasTo((code, gas) -> {
if (code == SUCCESS) {
createdSoFar.add(choice);
}
log.info("(Tried to) create {} ({} slots) with {} gas --> {}", choice, numSlots, gas, code);
that.observeExposedGas(gas);
}).noLogging().hasKnownStatusFrom(SUCCESS, INSUFFICIENT_GAS).deferStatusResolution();
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class AdHocTokenTransfers method tokenTransfersFactory.
private Function<HapiApiSpec, OpProvider> tokenTransfersFactory() {
var firstDir = new AtomicBoolean(Boolean.TRUE);
var balanceInit = new AtomicLong();
var tokensPerTxn = new AtomicInteger();
var sendingAccountsPerToken = new AtomicInteger();
var receivingAccountsPerToken = new AtomicInteger();
List<String> treasuries = new ArrayList<>();
Map<String, List<String>> senders = new HashMap<>();
Map<String, List<String>> receivers = new HashMap<>();
String targetAccount = targetNodeAccount();
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
var ciProps = spec.setup().ciPropertiesMap();
balanceInit.set(ciProps.getLong("balanceInit"));
tokensPerTxn.set(ciProps.getInteger("tokensPerTxn"));
sendingAccountsPerToken.set(ciProps.getInteger("sendingAccountsPerToken"));
receivingAccountsPerToken.set(ciProps.getInteger("receivingAccountsPerToken"));
var initialSupply = (sendingAccountsPerToken.get() + receivingAccountsPerToken.get()) * balanceInit.get();
List<HapiSpecOperation> initializers = new ArrayList<>();
for (int i = 0; i < tokensPerTxn.get(); i++) {
var token = "token" + i;
var treasury = "treasury" + i;
initializers.add(cryptoCreate(treasury));
initializers.add(tokenCreate(token).treasury(treasury).initialSupply(initialSupply));
treasuries.add(treasury);
for (int j = 0; j < sendingAccountsPerToken.get(); j++) {
var sender = token + "sender" + j;
senders.computeIfAbsent(token, ignore -> new ArrayList<>()).add(sender);
initializers.add(cryptoCreate(sender).balance(5_000_000_000_000L).withRecharging().rechargeWindow(3));
initializers.add(tokenAssociate(sender, token));
initializers.add(cryptoTransfer(moving(balanceInit.get(), token).between(treasury, sender)));
}
for (int j = 0; j < receivingAccountsPerToken.get(); j++) {
var receiver = token + "receiver" + j;
receivers.computeIfAbsent(token, ignore -> new ArrayList<>()).add(receiver);
initializers.add(cryptoCreate(receiver).balance(5_000_000_000_000L).withRecharging().rechargeWindow(3));
initializers.add(tokenAssociate(receiver, token));
initializers.add(cryptoTransfer(moving(balanceInit.get(), token).between(treasury, receiver)));
}
}
for (HapiSpecOperation op : initializers) {
((HapiTxnOp) op).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).setNode(targetAccount);
}
return initializers;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
var numTokens = tokensPerTxn.get();
var numSenders = sendingAccountsPerToken.get();
var numReceivers = receivingAccountsPerToken.get();
String effPayer = null;
var now = "" + Instant.now();
if (firstDir.get()) {
var xfers = new TokenMovement[numTokens * numSenders];
for (int i = 0; i < numTokens; i++) {
var token = "token" + i;
for (int j = 0; j < numSenders; j++) {
var receivers = new String[numReceivers];
for (int k = 0; k < numReceivers; k++) {
receivers[k] = token + "receiver" + k;
}
var source = token + "sender" + j;
if (effPayer == null) {
effPayer = source;
}
xfers[i * numSenders + j] = moving(numReceivers, token).distributing(source, receivers);
}
}
op = cryptoTransfer(xfers).hasKnownStatusFrom(NOISY_ALLOWED_STATUSES).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).memo(now).payingWith(effPayer).setNode(targetAccount).noLogging().deferStatusResolution();
firstDir.set(Boolean.FALSE);
} else {
var xfers = new TokenMovement[numTokens * numReceivers];
for (int i = 0; i < numTokens; i++) {
var token = "token" + i;
for (int j = 0; j < numReceivers; j++) {
var senders = new String[numSenders];
for (int k = 0; k < numSenders; k++) {
senders[k] = token + "sender" + k;
}
var source = token + "receiver" + j;
if (effPayer == null) {
effPayer = source;
}
xfers[i * numReceivers + j] = moving(numSenders, token).distributing(source, senders);
}
}
op = cryptoTransfer(xfers).hasKnownStatusFrom(NOISY_ALLOWED_STATUSES).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).memo(now).payingWith(effPayer).setNode(targetAccount).noLogging().deferStatusResolution();
firstDir.set(Boolean.TRUE);
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class TokenTransferBasicLoadTest method activeTokenAssociatesFactory.
private Function<HapiApiSpec, OpProvider> activeTokenAssociatesFactory(PerfTestLoadSettings settings) {
int numTotalTokens = settings.getTotalTokens();
int numActiveTokenAccounts = settings.getTotalTestTokenAccounts();
int totalClients = settings.getTotalClients();
int numActiveTokens = (totalClients >= 1) ? numTotalTokens / totalClients : numTotalTokens;
AtomicLong remainingAssociations = new AtomicLong(numActiveTokens * numActiveTokenAccounts - 1);
if (log.isDebugEnabled()) {
log.debug("Total active token accounts {}, total test tokens {}, my portion of tokens {}", numActiveTokenAccounts, numTotalTokens, numActiveTokens);
}
long startAccountId = settings.getTestTreasureStartAccount();
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
return Collections.emptyList();
}
@Override
public Optional<HapiSpecOperation> get() {
long nextAssocId;
if ((nextAssocId = remainingAssociations.getAndDecrement()) < 0) {
return Optional.empty();
}
int curToken = (int) nextAssocId / numActiveTokenAccounts;
long curAccount = nextAssocId % numActiveTokenAccounts;
var accountId = "0.0." + (startAccountId + curAccount);
var op = tokenAssociate(accountId, tokenRegistryName(curToken)).payingWith(DEFAULT_PAYER).signedBy(DEFAULT_PAYER).hasRetryPrecheckFrom(BUSY, PLATFORM_TRANSACTION_NOT_CREATED, DUPLICATE_TRANSACTION, INSUFFICIENT_PAYER_BALANCE).hasPrecheckFrom(DUPLICATE_TRANSACTION, OK).hasKnownStatusFrom(SUCCESS, TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT, INVALID_TOKEN_ID, TRANSACTION_EXPIRED, TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED, FAIL_INVALID, OK).fee(ONE_HUNDRED_HBARS).noLogging().suppressStats(true).deferStatusResolution();
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class PerpetualTransfers method transfersFactory.
private Function<HapiApiSpec, OpProvider> transfersFactory() {
AtomicBoolean fromAtoB = new AtomicBoolean(true);
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
return List.of(cryptoCreate("A"), cryptoCreate("B"));
}
@Override
public Optional<HapiSpecOperation> get() {
var from = fromAtoB.get() ? "A" : "B";
var to = fromAtoB.get() ? "B" : "A";
fromAtoB.set(!fromAtoB.get());
var op = cryptoTransfer(tinyBarsFromTo(from, to, 1)).noLogging().deferStatusResolution();
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.infrastructure.OpProvider in project hedera-services by hashgraph.
the class ProviderRun method submitOp.
@Override
protected boolean submitOp(HapiApiSpec spec) {
int MAX_N = Runtime.getRuntime().availableProcessors();
int MAX_OPS_PER_SEC = maxOpsPerSecSupplier.getAsInt();
int MAX_PENDING_OPS = maxPendingOpsSupplier.getAsInt();
int BACKOFF_SLEEP_SECS = backoffSleepSecsSupplier.getAsInt();
long duration = durationSupplier.getAsLong();
OpProvider provider = providerFn.apply(spec);
allRunFor(spec, provider.suggestedInitializers().toArray(new HapiSpecOperation[0]));
log.info("Finished initialization for provider run...");
TimeUnit unit = unitSupplier.get();
Stopwatch stopwatch = Stopwatch.createStarted();
final var remainingOpsToSubmit = new AtomicInteger(totalOpsToSubmit.getAsInt());
final boolean fixedOpSubmission = (remainingOpsToSubmit.get() < 0) ? false : true;
int submittedSoFar = 0;
long durationMs = unit.toMillis(duration);
long logIncrementMs = durationMs / 100;
long nextLogTargetMs = logIncrementMs;
long lastDeltaLogged = -1;
final var opsThisSecond = new AtomicInteger(0);
final var submissionBoundaryMs = new AtomicLong(stopwatch.elapsed(MILLISECONDS) + 1_000);
while (stopwatch.elapsed(unit) < duration) {
long elapsedMs = stopwatch.elapsed(MILLISECONDS);
if (elapsedMs > submissionBoundaryMs.get()) {
submissionBoundaryMs.getAndAdd(1_000);
opsThisSecond.set(0);
}
int numPending = spec.numPendingOps();
if (elapsedMs > nextLogTargetMs) {
nextLogTargetMs += logIncrementMs;
long delta = duration - stopwatch.elapsed(unit);
if (delta != lastDeltaLogged) {
log.info(delta + " " + unit.toString().toLowerCase() + (fixedOpSubmission ? (" or " + remainingOpsToSubmit + " ops ") : "") + " left in test - " + submittedSoFar + " ops submitted so far (" + numPending + " pending).");
log.info("Precheck txn status counts :: " + spec.precheckStatusCounts());
log.info("Resolved txn status counts :: " + spec.finalizedStatusCounts());
log.info("\n------------------------------\n");
lastDeltaLogged = delta;
}
}
if (fixedOpSubmission && remainingOpsToSubmit.get() <= 0) {
if (numPending > 0) {
continue;
}
log.info("Finished submission of total {} operations", totalOpsToSubmit.getAsInt());
break;
}
if (numPending < MAX_PENDING_OPS) {
HapiSpecOperation[] burst = IntStream.range(0, Math.min(MAX_N, fixedOpSubmission ? Math.min(remainingOpsToSubmit.get(), MAX_OPS_PER_SEC - opsThisSecond.get()) : MAX_OPS_PER_SEC - opsThisSecond.get())).mapToObj(ignore -> provider.get()).flatMap(Optional::stream).peek(op -> counts.get(op.type()).getAndIncrement()).toArray(HapiSpecOperation[]::new);
if (burst.length > 0) {
allRunFor(spec, inParallel(burst));
submittedSoFar += burst.length;
if (fixedOpSubmission) {
remainingOpsToSubmit.getAndAdd(-burst.length);
}
opsThisSecond.getAndAdd(burst.length);
}
} else {
log.warn("Now " + numPending + " ops pending; backing off for " + BACKOFF_SLEEP_SECS + "s!");
try {
Thread.sleep(BACKOFF_SLEEP_SECS * 1_000L);
} catch (InterruptedException ignore) {
}
}
}
Map<HederaFunctionality, Integer> finalCounts = counts.entrySet().stream().filter(entry -> entry.getValue().get() > 0).collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().get()));
log.info("Final breakdown of *provided* ops: " + finalCounts);
log.info("Final breakdown of *resolved* statuses: " + spec.finalizedStatusCounts());
return false;
}
Aggregations