use of com.hedera.services.bdd.spec.HapiApiSpec in project hedera-services by hashgraph.
the class HapiContractCreate method opBodyDef.
@Override
protected Consumer<TransactionBody.Builder> opBodyDef(HapiApiSpec spec) throws Throwable {
if (!omitAdminKey && !useDeprecatedAdminKey) {
generateAdminKey(spec);
}
if (bytecodeFileFn.isPresent()) {
bytecodeFile = Optional.of(bytecodeFileFn.get().get());
}
if (!bytecodeFile.isPresent()) {
setBytecodeToDefaultContract(spec);
}
Optional<byte[]> params;
if (explicitHexedParams.isPresent()) {
params = explicitHexedParams.map(Supplier::get).map(CommonUtils::unhex);
} else {
params = abi.isPresent() ? Optional.of(CallTransaction.Function.fromJsonInterface(abi.get()).encodeArguments(args.get())) : Optional.empty();
}
FileID bytecodeFileId = TxnUtils.asFileId(bytecodeFile.get(), spec);
ContractCreateTransactionBody opBody = spec.txns().<ContractCreateTransactionBody, ContractCreateTransactionBody.Builder>body(ContractCreateTransactionBody.class, b -> {
if (useDeprecatedAdminKey) {
b.setAdminKey(DEPRECATED_CID_ADMIN_KEY);
} else if (!omitAdminKey) {
b.setAdminKey(adminKey);
}
b.setFileID(bytecodeFileId);
autoRenewPeriodSecs.ifPresent(p -> b.setAutoRenewPeriod(Duration.newBuilder().setSeconds(p).build()));
balance.ifPresent(b::setInitialBalance);
memo.ifPresent(b::setMemo);
gas.ifPresent(b::setGas);
proxy.ifPresent(p -> b.setProxyAccountID(asId(p, spec)));
params.ifPresent(bytes -> b.setConstructorParameters(ByteString.copyFrom(bytes)));
});
return b -> b.setContractCreateInstance(opBody);
}
use of com.hedera.services.bdd.spec.HapiApiSpec in project hedera-services by hashgraph.
the class CryptoTransferLoadTestWithInvalidAccounts method runCryptoTransfers.
protected HapiApiSpec runCryptoTransfers() {
PerfTestLoadSettings settings = new PerfTestLoadSettings();
Supplier<HapiSpecOperation[]> transferBurst = () -> {
return new HapiSpecOperation[] { cryptoTransfer(tinyBarsFromTo("0.0.1000000001", "0.0.1000000002", 1L)).noLogging().signedBy(GENESIS).suppressStats(true).fee(100_000_000L).hasKnownStatusFrom(INVALID_ACCOUNT_ID).hasRetryPrecheckFrom(BUSY, PLATFORM_TRANSACTION_NOT_CREATED).deferStatusResolution() };
};
return defaultHapiSpec("RunCryptoTransfers").given(withOpContext((spec, ignore) -> settings.setFrom(spec.setup().ciPropertiesMap())), logIt(ignore -> settings.toString())).when().then(defaultLoadTest(transferBurst, settings));
}
use of com.hedera.services.bdd.spec.HapiApiSpec in project hedera-services by hashgraph.
the class FileExpansionLoadProvider method fileExpansionsFactory.
private Function<HapiApiSpec, OpProvider> fileExpansionsFactory() {
final SplittableRandom r = new SplittableRandom();
final Set<String> usableTargets = ConcurrentHashMap.newKeySet();
final LongFunction<String> targetNameFn = i -> "expandingFile" + i;
final AtomicInteger nextTargetNum = new AtomicInteger(numActiveTargets.get());
final var key = "multi";
final var waclShape = KeyShape.listOf(SIMPLE, threshOf(1, 3), listOf(2));
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
final List<HapiSpecOperation> ops = new ArrayList<>();
ops.add(newKeyNamed(key).shape(waclShape));
for (int i = 0, n = numActiveTargets.get(); i < n; i++) {
ops.add(fileCreate(targetNameFn.apply(i)).key(key).noLogging().contents(DATA_CHUNK).payingWith(GENESIS));
}
return ops;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
if (usableTargets.size() < numActiveTargets.get()) {
final var name = targetNameFn.apply(nextTargetNum.getAndIncrement());
op = fileCreate(name).noLogging().key(key).contents(DATA_CHUNK).payingWith(GENESIS).deferStatusResolution().exposingNumTo(num -> {
usableTargets.add(name);
});
} else {
final var skips = r.nextInt(usableTargets.size());
final var iter = usableTargets.iterator();
try {
for (int i = 0; i < skips; i++) {
iter.next();
}
final var target = iter.next();
op = fileAppend(target).noLogging().deferStatusResolution().payingWith(GENESIS).content(DATA_CHUNK).hasKnownStatusFrom(MAX_FILE_SIZE_EXCEEDED, SUCCESS).alertingPost(code -> {
if (code == MAX_FILE_SIZE_EXCEEDED) {
log.info("File {} reached max size, no longer in rotation", target);
usableTargets.remove(target);
}
});
} catch (Exception ignore) {
op = noOp();
}
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.spec.HapiApiSpec in project hedera-services by hashgraph.
the class FileUpdateLoadTest method runFileUpdates.
private HapiApiSpec runFileUpdates() {
PerfTestLoadSettings settings = new PerfTestLoadSettings();
final AtomicInteger submittedSoFar = new AtomicInteger(0);
final byte[] NEW_CONTENTS = TxnUtils.randomUtf8Bytes(TxnUtils.BYTES_4K);
Supplier<HapiSpecOperation[]> fileUpdateBurst = () -> new HapiSpecOperation[] { inParallel(IntStream.range(0, settings.getBurstSize()).mapToObj(i -> TxnVerbs.fileUpdate("target").fee(Integer.MAX_VALUE).contents(NEW_CONTENTS).noLogging().hasPrecheckFrom(OK, BUSY, DUPLICATE_TRANSACTION, PLATFORM_TRANSACTION_NOT_CREATED).deferStatusResolution()).toArray(n -> new HapiSpecOperation[n])), logIt(ignore -> String.format("Now a total of %d file updates submitted.", submittedSoFar.addAndGet(settings.getBurstSize()))) };
return defaultHapiSpec("RunFileUpdates").given(withOpContext((spec, ignore) -> settings.setFrom(spec.setup().ciPropertiesMap())), logIt(ignore -> settings.toString())).when(fileCreate("target").contents("The initial contents!")).then(runLoadTest(fileUpdateBurst).tps(settings::getTps).tolerance(settings::getTolerancePercentage).allowedSecsBelow(settings::getAllowedSecsBelow).lasting(settings::getMins, () -> MINUTES));
}
use of com.hedera.services.bdd.spec.HapiApiSpec in project hedera-services by hashgraph.
the class MixedFileOpsLoadTest method runMixedFileOps.
protected HapiApiSpec runMixedFileOps() {
PerfTestLoadSettings settings = new PerfTestLoadSettings();
final AtomicInteger submittedSoFar = new AtomicInteger(0);
String initialContent = "The initial contents!";
String targetFile = "targetFile";
Supplier<HapiSpecOperation[]> mixedFileOpsBurst = () -> new HapiSpecOperation[] { fileCreate(targetFile + submittedSoFar.getAndIncrement()).contents(initialContent).hasKnownStatusFrom(SUCCESS, UNKNOWN), fileUpdate(targetFile).fee(ONE_HUNDRED_HBARS).contents(TxnUtils.randomUtf8Bytes(TxnUtils.BYTES_4K)).noLogging().payingWith(GENESIS).hasAnyPrecheck().hasKnownStatusFrom(SUCCESS, UNKNOWN).deferStatusResolution(), fileAppend(targetFile).content("dummy").hasAnyPrecheck().payingWith(GENESIS).fee(ONE_HUNDRED_HBARS).hasKnownStatusFrom(SUCCESS, UNKNOWN).deferStatusResolution() };
return defaultHapiSpec("runMixedFileOps").given(withOpContext((spec, ignore) -> settings.setFrom(spec.setup().ciPropertiesMap())), logIt(ignore -> settings.toString())).when(fileCreate(targetFile).contents(initialContent).hasAnyPrecheck().payingWith(GENESIS), getFileInfo(targetFile).logging().payingWith(GENESIS)).then(defaultLoadTest(mixedFileOpsBurst, settings));
}
Aggregations