use of com.hedera.mirror.importer.parser.record.entity.EntityBatchCleanupEvent in project hedera-mirror-node by hashgraph.
the class RedisEntityListenerTest method onSlowPublish.
@Test
void onSlowPublish() {
// given
int publishCount = redisProperties.getQueueCapacity() + 2;
Sinks.Many<Object> sink = Sinks.many().multicast().directBestEffort();
Flux<Integer> publisher = Flux.range(1, publishCount).doOnNext(i -> {
submitAndSave(topicMessage());
entityListener.onCleanup(new EntityBatchCleanupEvent(this));
});
// when
when(redisOperations.executePipelined(any(SessionCallback.class))).then((callback) -> {
Uninterruptibles.sleepUninterruptibly(Duration.ofMillis(50L));
sink.tryEmitNext(callback);
return null;
});
// then
StepVerifier redisVerifier = sink.asFlux().subscribeOn(Schedulers.parallel()).as(StepVerifier::create).expectNextCount(publishCount).thenCancel().verifyLater();
publisher.publishOn(Schedulers.parallel()).as(StepVerifier::create).expectNextCount(publishCount).expectComplete().verify(TIMEOUT);
redisVerifier.verify(TIMEOUT);
verify(redisOperations, timeout(TIMEOUT.toMillis() * 5).times(publishCount)).executePipelined(any(SessionCallback.class));
}
use of com.hedera.mirror.importer.parser.record.entity.EntityBatchCleanupEvent in project hedera-mirror-node by hashgraph.
the class SqlEntityListener method cleanup.
private void cleanup() {
try {
assessedCustomFees.clear();
contracts.clear();
contractState.clear();
contractLogs.clear();
contractResults.clear();
contractStateChanges.clear();
cryptoAllowances.clear();
cryptoAllowanceState.clear();
cryptoTransfers.clear();
customFees.clear();
entities.clear();
entityState.clear();
ethereumTransactions.clear();
fileData.clear();
liveHashes.clear();
nonFeeTransfers.clear();
stakingRewardTransfers.clear();
nfts.clear();
nftAllowances.clear();
nftAllowanceState.clear();
nftTransferState.clear();
nodeStakes.clear();
schedules.clear();
topicMessages.clear();
tokenAccounts.clear();
tokenAccountState.clear();
tokenAllowances.clear();
tokenAllowanceState.clear();
tokens.clear();
tokenDissociateTransfers.clear();
tokenTransfers.clear();
transactions.clear();
transactionSignatures.clear();
eventPublisher.publishEvent(new EntityBatchCleanupEvent(this));
} catch (BeanCreationNotAllowedException e) {
// This error can occur during shutdown
}
}
use of com.hedera.mirror.importer.parser.record.entity.EntityBatchCleanupEvent in project hedera-mirror-node by hashgraph.
the class RedisEntityListenerTest method onDuplicateTopicMessages.
@Test
void onDuplicateTopicMessages() throws InterruptedException {
TopicMessage topicMessage1 = topicMessage();
TopicMessage topicMessage2 = topicMessage();
TopicMessage topicMessage3 = topicMessage();
// submitAndSave two messages, verify publish logic called twice
submitAndSave(topicMessage1);
submitAndSave(topicMessage2);
verify(redisOperations, timeout(TIMEOUT.toMillis()).times(2)).executePipelined(any(SessionCallback.class));
// submitAndSave two duplicate messages, verify publish was not attempted
Mockito.reset(redisOperations);
submitAndSave(topicMessage1);
submitAndSave(topicMessage2);
verify(redisOperations, timeout(TIMEOUT.toMillis()).times(0)).executePipelined(any(SessionCallback.class));
// submitAndSave third new unique message, verify publish called once.
Mockito.reset(redisOperations);
submitAndSave(topicMessage3);
entityListener.onCleanup(new EntityBatchCleanupEvent(this));
verify(redisOperations, timeout(TIMEOUT.toMillis()).times(1)).executePipelined(any(SessionCallback.class));
}
Aggregations