use of com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE in project hedera-services by hashgraph.
the class DiverseStateCreation method createDiverseState.
private HapiApiSpec createDiverseState() {
final KeyShape SMALL_SHAPE = listOf(threshOf(1, 3));
final KeyShape MEDIUM_SHAPE = listOf(SIMPLE, threshOf(2, 3));
final KeyShape LARGE_SHAPE = listOf(SIMPLE, threshOf(1, listOf(SIMPLE, threshOf(1, 2), SIMPLE)), threshOf(2, threshOf(1, SIMPLE, listOf(SIMPLE, SIMPLE)), SIMPLE));
final var smallKey = "smallKey";
final var mediumKey = "mediumKey";
final var largeKey = "largeKey";
final var fuseInitcode = "fuseInitcode";
final var multiInitcode = "multiInitcode";
final var fuseContract = "fuseContract";
final var multiContract = "multiContract";
return defaultHapiSpec("CreateDiverseState").given(newKeyNamed(smallKey).shape(SMALL_SHAPE), newKeyNamed(mediumKey).shape(MEDIUM_SHAPE), newKeyNamed(largeKey).shape(LARGE_SHAPE)).when(/* Create some well-known files */
fileCreate(SMALL_FILE).contents(SMALL_CONTENTS).key(smallKey).expiry(SMALL_EXPIRY_TIME).exposingNumTo(num -> entityNums.put(SMALL_FILE, num)), fileCreate(MEDIUM_FILE).contents("").key(mediumKey).expiry(MEDIUM_EXPIRY_TIME).exposingNumTo(num -> entityNums.put(MEDIUM_FILE, num)), updateLargeFile(GENESIS, MEDIUM_FILE, ByteString.copyFrom(MEDIUM_CONTENTS), false, OptionalLong.of(ONE_HBAR)), fileDelete(MEDIUM_FILE), fileCreate(LARGE_FILE).contents("").key(largeKey).expiry(LARGE_EXPIRY_TIME).exposingNumTo(num -> entityNums.put(LARGE_FILE, num)), updateLargeFile(GENESIS, LARGE_FILE, ByteString.copyFrom(LARGE_CONTENTS), false, OptionalLong.of(ONE_HBAR)), /* Create some bytecode files */
fileCreate(fuseInitcode).expiry(FUSE_EXPIRY_TIME).path(ContractResources.FUSE_BYTECODE_PATH).exposingNumTo(num -> entityNums.put(FUSE_INITCODE, num)), fileCreate(multiInitcode).expiry(MULTI_EXPIRY_TIME).path(ContractResources.MULTIPURPOSE_BYTECODE_PATH).exposingNumTo(num -> entityNums.put(MULTI_INITCODE, num)), contractCreate(fuseContract).bytecode(fuseInitcode).exposingNumTo(num -> entityNums.put(FUSE_CONTRACT, num)), contractCreate(multiContract).bytecode(multiInitcode).exposingNumTo(num -> entityNums.put(MULTI_CONTRACT, num)), contractCall(multiContract, ContractResources.BELIEVE_IN_ABI, EXPECTED_LUCKY_NO)).then(systemFileDelete(fuseInitcode).payingWith(GENESIS), systemFileDelete(multiInitcode).payingWith(GENESIS), getFileInfo(SMALL_FILE).exposingKeyReprTo(repr -> keyReprs.put(SMALL_FILE, repr)), getFileInfo(MEDIUM_FILE).exposingKeyReprTo(repr -> keyReprs.put(MEDIUM_FILE, repr)), getFileInfo(LARGE_FILE).exposingKeyReprTo(repr -> keyReprs.put(LARGE_FILE, repr)), getContractBytecode(FUSE_CONTRACT).exposingBytecodeTo(code -> hexedBytecode.put(FUSE_BYTECODE, CommonUtils.hex(code))), withOpContext((spec, opLog) -> {
final var toSerialize = Map.of(ENTITY_NUM_KEY, entityNums, KEY_REPRS_KEY, keyReprs, HEXED_BYTECODE_KEY, hexedBytecode);
final var om = new ObjectMapper();
om.writeValue(Files.newOutputStream(Paths.get(STATE_META_JSON_LOC)), toSerialize);
}));
}
use of com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE in project hedera-services by hashgraph.
the class CryptoCreateSuite method createAnAccountInvalidThresholdKey.
// One of element in threshold key is not valid
private HapiApiSpec createAnAccountInvalidThresholdKey() {
KeyShape emptyListShape = listOf(0);
KeyShape thresholdShape = threshOf(1, SIMPLE, SIMPLE, emptyListShape);
long initialBalance = 10_000L;
// build a threshold key with one of key is invalid
Key randomKey1 = Key.newBuilder().setEd25519(ByteString.copyFrom(randomUtf8Bytes(32))).build();
Key randomKey2 = Key.newBuilder().setEd25519(ByteString.copyFrom(randomUtf8Bytes(32))).build();
Key shortKey = Key.newBuilder().setEd25519(ByteString.copyFrom(new byte[10])).build();
KeyList invalidKeyList = KeyList.newBuilder().addKeys(randomKey1).addKeys(randomKey2).addKeys(shortKey).build();
ThresholdKey invalidThresholdKey = ThresholdKey.newBuilder().setThreshold(2).setKeys(invalidKeyList).build();
Key regKey1 = Key.newBuilder().setThresholdKey(invalidThresholdKey).build();
Key regKey2 = Key.newBuilder().setKeyList(invalidKeyList).build();
return defaultHapiSpec("createAnAccountInvalidThresholdKey").given().when().then(withOpContext((spec, opLog) -> {
spec.registry().saveKey("regKey1", regKey1);
spec.registry().saveKey("regKey2", regKey2);
}), cryptoCreate("badThresholdKeyAccount").keyShape(thresholdShape).balance(initialBalance).logged().hasPrecheck(BAD_ENCODING), cryptoCreate("badThresholdKeyAccount2").key("regKey1").balance(initialBalance).logged().signedBy(GENESIS).hasPrecheck(BAD_ENCODING), cryptoCreate("badThresholdKeyAccount3").key("regKey2").balance(initialBalance).logged().signedBy(GENESIS).hasPrecheck(BAD_ENCODING));
}
use of com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE in project hedera-services by hashgraph.
the class CreateTopicPerfSuite method createTopicPerf.
private HapiApiSpec createTopicPerf() {
final int NUM_TOPICS = 100000;
KeyShape submitKeyShape = threshOf(2, SIMPLE, SIMPLE, listOf(2));
return defaultHapiSpec("createTopicPerf").given().when(startThroughputObs("createTopicThroughput").msToSaturateQueues(50L), inParallel(// only ask for record for the last transaction
asOpArray(NUM_TOPICS, i -> (i == (NUM_TOPICS - 1)) ? createTopic("testTopic" + i).submitKeyShape(submitKeyShape) : createTopic("testTopic" + i).submitKeyShape(submitKeyShape).deferStatusResolution()))).then(// wait until the record of the last transaction are ready
finishThroughputObs("createTopicThroughput").gatedByQuery(() -> getTopicInfo("testTopic" + (NUM_TOPICS - 1))).sleepMs(1_000L).expiryMs(300_000L));
}
use of com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE in project hedera-services by hashgraph.
the class CreateAndUpdateOps method variousCryptoMutations.
HapiApiSpec variousCryptoMutations() {
KeyShape smallKey = SIMPLE;
KeyShape largeKey = listOf(3);
long shortExpiry = 100_000L;
long mediumExpiry = 10 * shortExpiry;
long eternalExpiry = 10 * mediumExpiry;
AtomicLong consensusNow = new AtomicLong();
return customHapiSpec("VariousCryptoMutations").withProperties(Map.of("cost.snapshot.mode", costSnapshotMode.toString())).given(newKeyNamed("sk").shape(smallKey), newKeyNamed("lk").shape(largeKey), cryptoCreate("payer").via("payerCreation").fee(feeToOffer).balance(payerBalance), withOpContext((spec, opLog) -> {
var lookup = getTxnRecord("payerCreation").nodePayment(paymentToOffer);
allRunFor(spec, lookup);
var record = lookup.getResponseRecord();
consensusNow.set(record.getConsensusTimestamp().getSeconds());
}), cryptoCreate("proxy").fee(feeToOffer)).when(cryptoCreate("sksenp").fee(feeToOffer).payingWith("payer").key("sk").autoRenewSecs(shortExpiry), cryptoCreate("sksep").fee(feeToOffer).payingWith("payer").proxy("0.0.2").key("sk").autoRenewSecs(shortExpiry), cryptoCreate("skmenp").fee(feeToOffer).payingWith("payer").key("sk").autoRenewSecs(mediumExpiry), cryptoCreate("skmep").fee(feeToOffer).payingWith("payer").proxy("0.0.2").key("sk").autoRenewSecs(mediumExpiry), cryptoCreate("skeenp").fee(feeToOffer).payingWith("payer").key("sk").autoRenewSecs(eternalExpiry), cryptoCreate("skeep").fee(feeToOffer).payingWith("payer").proxy("0.0.2").key("sk").autoRenewSecs(eternalExpiry)).then(sourcing(() -> cryptoUpdate("sksenp").fee(feeToOffer).payingWith("payer").newProxy("proxy").key("lk").expiring(consensusNow.get() + mediumExpiry)), sourcing(() -> cryptoUpdate("skmenp").fee(feeToOffer).payingWith("payer").newProxy("proxy").key("lk").expiring(consensusNow.get() + eternalExpiry)), sourcing(() -> cryptoUpdate("skeenp").fee(feeToOffer).payingWith("payer").newProxy("proxy").key("lk")), getAccountInfo("sksenp"), getAccountInfo("skmenp"), getAccountInfo("skeenp"));
}
use of com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE in project hedera-services by hashgraph.
the class FileExpansionLoadProvider method fileExpansionsFactory.
private Function<HapiApiSpec, OpProvider> fileExpansionsFactory() {
final SplittableRandom r = new SplittableRandom();
final Set<String> usableTargets = ConcurrentHashMap.newKeySet();
final LongFunction<String> targetNameFn = i -> "expandingFile" + i;
final AtomicInteger nextTargetNum = new AtomicInteger(numActiveTargets.get());
final var key = "multi";
final var waclShape = KeyShape.listOf(SIMPLE, threshOf(1, 3), listOf(2));
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
final List<HapiSpecOperation> ops = new ArrayList<>();
ops.add(newKeyNamed(key).shape(waclShape));
for (int i = 0, n = numActiveTargets.get(); i < n; i++) {
ops.add(fileCreate(targetNameFn.apply(i)).key(key).noLogging().contents(DATA_CHUNK).payingWith(GENESIS));
}
return ops;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
if (usableTargets.size() < numActiveTargets.get()) {
final var name = targetNameFn.apply(nextTargetNum.getAndIncrement());
op = fileCreate(name).noLogging().key(key).contents(DATA_CHUNK).payingWith(GENESIS).deferStatusResolution().exposingNumTo(num -> {
usableTargets.add(name);
});
} else {
final var skips = r.nextInt(usableTargets.size());
final var iter = usableTargets.iterator();
try {
for (int i = 0; i < skips; i++) {
iter.next();
}
final var target = iter.next();
op = fileAppend(target).noLogging().deferStatusResolution().payingWith(GENESIS).content(DATA_CHUNK).hasKnownStatusFrom(MAX_FILE_SIZE_EXCEEDED, SUCCESS).alertingPost(code -> {
if (code == MAX_FILE_SIZE_EXCEEDED) {
log.info("File {} reached max size, no longer in rotation", target);
usableTargets.remove(target);
}
});
} catch (Exception ignore) {
op = noOp();
}
}
return Optional.of(op);
}
};
}
Aggregations