use of com.hedera.services.bdd.spec.HapiSpecOperation in project hedera-services by hashgraph.
the class RunLoadTest method testRun.
void testRun(HapiApiSpec spec) {
double _targetTps = targetTps.getAsDouble();
long _testDuration = testDuration.getAsLong();
TimeUnit _ofUnit = ofUnit.get();
int totalOps = 0;
float currentTPS = 0;
Stopwatch duration = createStarted();
boolean reported = false;
Stopwatch statDuration = duration;
// submitted tran during the stat window
int submitOps = 0;
while (duration.elapsed(_ofUnit) < _testDuration) {
HapiSpecOperation[] ops = opSource.get();
allRunFor(spec, ops);
submitOps += ops.length;
totalOps += ops.length;
long elapsedMS = statDuration.elapsed(MILLISECONDS);
currentTPS = submitOps / (elapsedMS * 0.001f);
if (statDuration.elapsed(SECONDS) % 10 == 0) {
// report periodically
if (!reported) {
log.info("Thread {} ops {} current TPS {}", Thread.currentThread().getName(), submitOps, currentTPS);
reported = true;
submitOps = 0;
statDuration = createStarted();
}
} else {
reported = false;
}
try {
if (currentTPS > _targetTps) {
long pauseMillieSeconds = (long) ((submitOps / (float) _targetTps) * 1000 - elapsedMS);
Thread.sleep(Math.max(5, pauseMillieSeconds));
}
} catch (InterruptedException irrelevant) {
}
}
log.info("Thread {} final ops {} in {} seconds, TPS {} ", Thread.currentThread().getName(), totalOps, duration.elapsed(SECONDS), currentTPS);
totalOpsAllThread.addAndGet(totalOps);
}
use of com.hedera.services.bdd.spec.HapiSpecOperation in project hedera-services by hashgraph.
the class AssortedHcsOps method runMisc.
private HapiApiSpec runMisc() {
final int SUBMIT_BURST_SIZE = 10;
AtomicReference<String> vanillaTopic = new AtomicReference<>();
AtomicReference<String> updatedTopic = new AtomicReference<>();
AtomicReference<String> deletedTopic = new AtomicReference<>();
Function<String, HapiSpecOperation[]> submitBurst = ref -> IntStream.range(0, SUBMIT_BURST_SIZE).mapToObj(i -> submitMessageTo(ref).message(String.format("%s message #%d", ref, i))).toArray(n -> new HapiSpecOperation[n]);
KeyShape origAdminKey = listOf(SIMPLE, threshOf(2, 3), SIMPLE);
KeyShape origSubmitKey = listOf(SIMPLE, threshOf(2, 3), listOf(5));
return customHapiSpec("RunMisc").withProperties(Map.of("client.feeSchedule.fromDisk", "false", "client.feeSchedule.path", path("feeSchedule.bin"), "client.exchangeRates.fromDisk", "false", "client.exchangeRates.path", path("exchangeRates.bin"))).given(newKeyNamed("origAdminKey").shape(origAdminKey), newKeyNamed("origSubmitKey").shape(origSubmitKey), createTopic("vanillaTopic").adminKeyName(GENESIS), createTopic("updatedTopic").adminKeyName("origAdminKey").submitKeyName("origSubmitKey"), createTopic("deletedTopic").adminKeyName(GENESIS), withOpContext((spec, opLog) -> {
vanillaTopic.set(asTopicString(spec.registry().getTopicID("vanillaTopic")));
updatedTopic.set(asTopicString(spec.registry().getTopicID("updatedTopic")));
deletedTopic.set(asTopicString(spec.registry().getTopicID("deletedTopic")));
})).when(flattened(submitBurst.apply("vanillaTopic"), submitBurst.apply("updatedTopic"), submitBurst.apply("deletedTopic"), updateTopic("updatedTopic").adminKey(GENESIS).submitKey(GENESIS), deleteTopic("deletedTopic"))).then(getTopicInfo("vanillaTopic").hasSeqNo(10L), getTopicInfo("updatedTopic").hasSeqNo(10L).hasAdminKey(GENESIS).hasSubmitKey(GENESIS), getTopicInfo("deletedTopic").hasCostAnswerPrecheck(INVALID_TOPIC_ID), logIt(spec -> String.format("Vanilla: %s, Updated: %s, Deleted: %s", vanillaTopic.get(), updatedTopic.get(), deletedTopic.get())));
}
use of com.hedera.services.bdd.spec.HapiSpecOperation in project hedera-services by hashgraph.
the class ValidationScenarios method novelFileIfDesired.
private static HapiSpecOperation[] novelFileIfDesired() {
if (!params.isNovelContent()) {
return new HapiSpecOperation[0];
}
KeyShape firstComplex = KeyShape.listOf(KeyShape.threshOf(2, 3), KeyShape.threshOf(1, 3));
KeyShape secondComplex = KeyShape.listOf(3);
SigControl normalDelete = secondComplex.signedWith(KeyShape.sigs(ON, ON, ON));
SigControl revocation = secondComplex.signedWith(KeyShape.sigs(ON, OFF, OFF));
return new HapiSpecOperation[] { newKeyNamed("novelFileFirstKey").shape(firstComplex), newKeyNamed("novelFileSecondKey").shape(secondComplex), fileCreate(NOVEL_FILE_NAME).payingWith(SCENARIO_PAYER_NAME).setNodeFrom(ValidationScenarios::nextNode).key("novelFileFirstKey").contents("abcdefghijklm"), fileAppend(NOVEL_FILE_NAME).payingWith(SCENARIO_PAYER_NAME).setNodeFrom(ValidationScenarios::nextNode).content("nopqrstuvwxyz"), getFileContents(NOVEL_FILE_NAME).payingWith(SCENARIO_PAYER_NAME).setNodeFrom(ValidationScenarios::nextNode).hasContents(ignore -> "abcdefghijklmnopqrstuvwxyz".getBytes()), fileUpdate(NOVEL_FILE_NAME).payingWith(SCENARIO_PAYER_NAME).setNodeFrom(ValidationScenarios::nextNode).wacl("novelFileSecondKey"), fileDelete(NOVEL_FILE_NAME).payingWith(SCENARIO_PAYER_NAME).setNodeFrom(ValidationScenarios::nextNode).sigControl(ControlForKey.forKey(NOVEL_FILE_NAME, params.isRevocationService() ? revocation : normalDelete)), withOpContext((spec, opLog) -> novelFileUsed.set(HapiPropertySource.asFileString(spec.registry().getFileId(NOVEL_FILE_NAME)))) };
}
use of com.hedera.services.bdd.spec.HapiSpecOperation in project hedera-services by hashgraph.
the class ValidationScenarios method sysFilesDown.
private static HapiApiSpec sysFilesDown() {
ensureScenarios();
if (scenarios.getSysFilesDown() == null) {
scenarios.setSysFilesDown(new SysFilesDownScenario());
}
var sys = scenarios.getSysFilesDown();
final long[] targets = sys.getNumsToFetch().stream().mapToLong(Integer::longValue).toArray();
try {
return customHapiSpec("SysFilesDown").withProperties(Map.of("nodes", nodes(), "default.payer", primaryPayer(), "default.node", defaultNode(), "fees.useFixedOffer", "true", "fees.fixedOffer", "" + FEE_TO_OFFER, "startupAccounts.literal", payerKeystoreLiteral())).given(keyFromPem(() -> pemForAccount(targetNetwork().getScenarioPayer())).name(SCENARIO_PAYER_NAME).linkedTo(() -> String.format("0.0.%d", targetNetwork().getScenarioPayer()))).when().then(Arrays.stream(targets).mapToObj(fileNum -> appropriateQuery(sys, fileNum)).toArray(HapiSpecOperation[]::new));
} catch (Exception e) {
log.warn("Unable to initialize system file scenarios, skipping it!", e);
errorsOccurred.set(true);
return null;
}
}
use of com.hedera.services.bdd.spec.HapiSpecOperation in project hedera-services by hashgraph.
the class ValidationScenarios method sysFilesUp.
private static HapiApiSpec sysFilesUp() {
ensureScenarios();
if (scenarios.getSysFilesUp() == null) {
scenarios.setSysFilesUp(new SysFilesUpScenario());
}
var sys = scenarios.getSysFilesUp();
long[] payers = sys.getUpdates().stream().mapToLong(UpdateAction::getPayer).toArray();
try {
return customHapiSpec("SysFilesUp").withProperties(Map.of("nodes", nodes(), "default.payer", primaryPayer(), "default.node", defaultNode(), "fees.useFixedOffer", "true", "fees.fixedOffer", "" + FEE_TO_OFFER, "startupAccounts.literal", payerKeystoreLiteral())).given(LongStream.of(payers).mapToObj(payer -> keyFromPem(() -> pemForAccount(payer)).name(String.format("payer%d", payer)).passphrase(passphraseFor(payer)).linkedTo(() -> String.format("0.0.%d", payer))).toArray(HapiSpecOperation[]::new)).when().then(sys.getUpdates().stream().map(action -> updateLargeFile(String.format("payer%d", action.getPayer()), String.format("0.0.%d", action.getNum()), appropriateContents(action.getNum()), true, OptionalLong.of(10_000_000_000L))).toArray(HapiSpecOperation[]::new));
} catch (Exception e) {
log.warn("Unable to initialize system file update scenario, skipping it!", e);
errorsOccurred.set(true);
return null;
}
}
Aggregations