use of com.hedera.services.bdd.suites.utils.sysfiles.serdes.SysFileSerde in project hedera-services by hashgraph.
the class TokenTransfersLoadProvider method tokenTransfersFactory.
private Function<HapiApiSpec, OpProvider> tokenTransfersFactory() {
var firstDir = new AtomicBoolean(Boolean.TRUE);
var balanceInit = new AtomicLong();
var tokensPerTxn = new AtomicInteger();
var sendingAccountsPerToken = new AtomicInteger();
var receivingAccountsPerToken = new AtomicInteger();
List<String> treasuries = new ArrayList<>();
Map<String, List<String>> senders = new HashMap<>();
Map<String, List<String>> receivers = new HashMap<>();
return spec -> new OpProvider() {
@Override
public List<HapiSpecOperation> suggestedInitializers() {
var ciProps = spec.setup().ciPropertiesMap();
balanceInit.set(ciProps.getLong("balanceInit"));
tokensPerTxn.set(ciProps.getInteger("tokensPerTxn"));
sendingAccountsPerToken.set(ciProps.getInteger("sendingAccountsPerToken"));
receivingAccountsPerToken.set(ciProps.getInteger("receivingAccountsPerToken"));
var initialSupply = (sendingAccountsPerToken.get() + receivingAccountsPerToken.get()) * balanceInit.get();
List<HapiSpecOperation> initializers = new ArrayList<>();
initializers.add(tokenOpsEnablement());
/* Temporary, can be removed after the public testnet state used in
restart tests includes a fee schedule with HTS resource prices. */
if (spec.setup().defaultNode().equals(asAccount("0.0.3"))) {
initializers.add(uploadDefaultFeeSchedules(GENESIS));
} else {
initializers.add(withOpContext((spec, opLog) -> {
log.info("\n\n" + bannerWith("Waiting for a fee schedule with token ops!"));
boolean hasKnownHtsFeeSchedules = false;
SysFileSerde<String> serde = new FeesJsonToGrpcBytes();
while (!hasKnownHtsFeeSchedules) {
var query = QueryVerbs.getFileContents(FEE_SCHEDULE).fee(10_000_000_000L);
try {
allRunFor(spec, query);
var contents = query.getResponse().getFileGetContents().getFileContents().getContents();
var schedules = serde.fromRawFile(contents.toByteArray());
hasKnownHtsFeeSchedules = schedules.contains("TokenCreate");
} catch (Exception e) {
var msg = e.toString();
msg = msg.substring(msg.indexOf(":") + 2);
log.info("Couldn't check for HTS fee schedules---'{}'", msg);
}
TimeUnit.SECONDS.sleep(3);
}
log.info("\n\n" + bannerWith("A fee schedule with token ops now available!"));
spec.tryReinitializingFees();
}));
}
for (int i = 0; i < tokensPerTxn.get(); i++) {
var token = "token" + i;
var treasury = "treasury" + i;
initializers.add(cryptoCreate(treasury));
initializers.add(tokenCreate(token).treasury(treasury).initialSupply(initialSupply));
treasuries.add(treasury);
for (int j = 0; j < sendingAccountsPerToken.get(); j++) {
var sender = token + "sender" + j;
senders.computeIfAbsent(token, ignore -> new ArrayList<>()).add(sender);
initializers.add(cryptoCreate(sender));
initializers.add(tokenAssociate(sender, token));
initializers.add(cryptoTransfer(moving(balanceInit.get(), token).between(treasury, sender)));
}
for (int j = 0; j < receivingAccountsPerToken.get(); j++) {
var receiver = token + "receiver" + j;
receivers.computeIfAbsent(token, ignore -> new ArrayList<>()).add(receiver);
initializers.add(cryptoCreate(receiver));
initializers.add(tokenAssociate(receiver, token));
initializers.add(cryptoTransfer(moving(balanceInit.get(), token).between(treasury, receiver)));
}
}
for (HapiSpecOperation op : initializers) {
if (op instanceof HapiTxnOp) {
((HapiTxnOp) op).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS);
}
}
return initializers;
}
@Override
public Optional<HapiSpecOperation> get() {
HapiSpecOperation op;
var numTokens = tokensPerTxn.get();
var numSenders = sendingAccountsPerToken.get();
var numReceivers = receivingAccountsPerToken.get();
if (firstDir.get()) {
var xfers = new TokenMovement[numTokens * numSenders];
for (int i = 0; i < numTokens; i++) {
var token = "token" + i;
for (int j = 0; j < numSenders; j++) {
var receivers = new String[numReceivers];
for (int k = 0; k < numReceivers; k++) {
receivers[k] = token + "receiver" + k;
}
xfers[i * numSenders + j] = moving(numReceivers, token).distributing(token + "sender" + j, receivers);
}
}
op = cryptoTransfer(xfers).hasKnownStatusFrom(OK, DUPLICATE_TRANSACTION, SUCCESS, UNKNOWN, INSUFFICIENT_PAYER_BALANCE).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).hasPrecheckFrom(OK, PLATFORM_NOT_ACTIVE).noLogging().deferStatusResolution();
firstDir.set(Boolean.FALSE);
} else {
var xfers = new TokenMovement[numTokens * numReceivers];
for (int i = 0; i < numTokens; i++) {
var token = "token" + i;
for (int j = 0; j < numReceivers; j++) {
var senders = new String[numSenders];
for (int k = 0; k < numSenders; k++) {
senders[k] = token + "sender" + k;
}
xfers[i * numReceivers + j] = moving(numSenders, token).distributing(token + "receiver" + j, senders);
}
}
op = cryptoTransfer(xfers).hasKnownStatusFrom(OK, DUPLICATE_TRANSACTION, SUCCESS, UNKNOWN, INSUFFICIENT_PAYER_BALANCE).hasRetryPrecheckFrom(NOISY_RETRY_PRECHECKS).hasPrecheckFrom(OK, PLATFORM_NOT_ACTIVE).noLogging().deferStatusResolution();
firstDir.set(Boolean.TRUE);
}
return Optional.of(op);
}
};
}
use of com.hedera.services.bdd.suites.utils.sysfiles.serdes.SysFileSerde in project hedera-services by hashgraph.
the class ValidationScenarios method appropriateQuery.
private static HapiSpecOperation appropriateQuery(SysFilesDownScenario sys, long fileNum) {
String fid = String.format("0.0.%d", fileNum);
SysFileSerde<String> serde = SYS_FILE_SERDES.get(fileNum);
String fqn = params.getTargetNetwork() + "-" + serde.preferredFileName();
String loc = "files/" + fqn;
UnaryOperator<byte[]> preCompare = (fileNum == 121 || fileNum == 122) ? ValidationScenarios::asOrdered : bytes -> bytes;
if (SysFilesDownScenario.COMPARE_EVAL_MODE.equals(sys.getEvalMode())) {
String actualLoc = "files/actual-" + fqn;
try {
byte[] expected = serde.toRawFile(readString(Paths.get(loc)));
return getFileContents(fid).payingWith(SCENARIO_PAYER_NAME).saveReadableTo(serde::fromRawFile, actualLoc).hasContents(spec -> expected).afterBytesTransform(preCompare);
} catch (IOException e) {
throw new IllegalStateException("Cannot read comparison file @ '" + loc + "'!", e);
}
} else if (SysFilesDownScenario.SNAPSHOT_EVAL_MODE.equals(sys.getEvalMode())) {
return getFileContents(fid).payingWith(SCENARIO_PAYER_NAME).saveReadableTo(serde::fromRawFile, loc);
} else {
throw new IllegalArgumentException("No such sys files eval mode '" + sys.getEvalMode() + "'!");
}
}
Aggregations