use of com.hedera.services.bdd.spec.transactions.consensus.HapiMessageSubmit in project hedera-services by hashgraph.
the class RandomMessageSubmit method get.
@Override
public Optional<HapiSpecOperation> get() {
final var target = topics.getQualifying();
if (target.isEmpty()) {
return Optional.empty();
}
HapiMessageSubmit op = submitMessageTo(target.get()).message(new String(messageBytes)).hasKnownStatusFrom(permissibleOutcomes).hasPrecheckFrom(STANDARD_PERMISSIBLE_PRECHECKS);
return Optional.of(op);
}
use of com.hedera.services.bdd.spec.transactions.consensus.HapiMessageSubmit in project hedera-services by hashgraph.
the class UtilVerbs method chunkAFile.
public static HapiSpecOperation chunkAFile(String filePath, int chunkSize, String payer, String topic, AtomicLong count) {
return withOpContext((spec, ctxLog) -> {
List<HapiSpecOperation> opsList = new ArrayList<HapiSpecOperation>();
String overriddenFile = new String(filePath);
int overriddenChunkSize = chunkSize;
String overriddenTopic = new String(topic);
boolean validateRunningHash = false;
long currentCount = count.getAndIncrement();
if (currentCount >= 0) {
var ciProperties = spec.setup().ciPropertiesMap();
if (null != ciProperties) {
if (ciProperties.has("file")) {
overriddenFile = ciProperties.get("file");
}
if (ciProperties.has("chunkSize")) {
overriddenChunkSize = ciProperties.getInteger("chunkSize");
}
if (ciProperties.has("validateRunningHash")) {
validateRunningHash = ciProperties.getBoolean("validateRunningHash");
}
int threads = PerfTestLoadSettings.DEFAULT_THREADS;
if (ciProperties.has("threads")) {
threads = ciProperties.getInteger("threads");
}
int factor = HCSChunkingRealisticPerfSuite.DEFAULT_COLLISION_AVOIDANCE_FACTOR;
if (ciProperties.has("collisionAvoidanceFactor")) {
factor = ciProperties.getInteger("collisionAvoidanceFactor");
}
overriddenTopic += currentCount % (threads * factor);
}
}
ByteString msg = ByteString.copyFrom(Files.readAllBytes(Paths.get(overriddenFile)));
int size = msg.size();
int totalChunks = (size + overriddenChunkSize - 1) / overriddenChunkSize;
int position = 0;
int currentChunk = 0;
var initialTransactionID = asTransactionID(spec, Optional.of(payer));
while (position < size) {
++currentChunk;
int newPosition = Math.min(size, position + overriddenChunkSize);
ByteString subMsg = msg.substring(position, newPosition);
HapiMessageSubmit subOp = submitMessageTo(overriddenTopic).message(subMsg).chunkInfo(totalChunks, currentChunk, initialTransactionID).payingWith(payer).hasKnownStatus(SUCCESS).hasRetryPrecheckFrom(BUSY, DUPLICATE_TRANSACTION, PLATFORM_TRANSACTION_NOT_CREATED, INSUFFICIENT_PAYER_BALANCE).noLogging().suppressStats(true);
if (1 == currentChunk) {
subOp = subOp.usePresetTimestamp();
}
if (validateRunningHash) {
String txnName = "submitMessage-" + overriddenTopic + "-" + currentChunk;
HapiGetTxnRecord validateOp = getTxnRecord(txnName).hasCorrectRunningHash(overriddenTopic, subMsg.toByteArray()).payingWith(payer).noLogging();
opsList.add(subOp.via(txnName));
opsList.add(validateOp);
} else {
opsList.add(subOp.deferStatusResolution());
}
position = newPosition;
}
CustomSpecAssert.allRunFor(spec, opsList);
});
}
Aggregations