use of com.rabbitmq.stream.codec.SimpleCodec in project rabbitmq-stream-java-client by rabbitmq.
the class StreamProducerUnitTest method init.
@BeforeEach
@SuppressWarnings("unchecked")
void init() {
mocks = MockitoAnnotations.openMocks(this);
executorService = Executors.newScheduledThreadPool(2);
when(channel.alloc()).thenReturn(ByteBufAllocator.DEFAULT);
when(channel.writeAndFlush(Mockito.any())).thenReturn(channelFuture);
when(client.allocateNoCheck(any(ByteBufAllocator.class), anyInt())).thenAnswer((Answer<ByteBuf>) invocation -> {
ByteBufAllocator allocator = invocation.getArgument(0);
int capacity = invocation.getArgument(1);
ByteBuf buffer = allocator.buffer(capacity);
buffers.add(buffer);
return buffer;
});
when(client.maxFrameSize()).thenReturn(Integer.MAX_VALUE);
when(client.publishInternal(anyByte(), anyList(), any(OutboundEntityWriteCallback.class), any(ToLongFunction.class))).thenAnswer(invocation -> client.publishInternal(channel, invocation.getArgument(0), invocation.getArgument(1), invocation.getArgument(2), invocation.getArgument(3)));
when(client.publishInternal(any(Channel.class), anyByte(), anyList(), any(OutboundEntityWriteCallback.class), any(ToLongFunction.class))).thenCallRealMethod();
when(env.scheduledExecutorService()).thenReturn(executorService);
when(env.locator()).thenReturn(client);
when(env.locatorOperation(any())).thenCallRealMethod();
when(env.clock()).thenReturn(clock);
when(env.codec()).thenReturn(new SimpleCodec());
doAnswer((Answer<Runnable>) invocationOnMock -> {
StreamProducer p = invocationOnMock.getArgument(0);
p.setClient(client);
p.setPublisherId((byte) 0);
return () -> {
};
}).when(env).registerProducer(any(StreamProducer.class), nullable(String.class), anyString());
}
use of com.rabbitmq.stream.codec.SimpleCodec in project rabbitmq-stream-java-client by rabbitmq.
the class ClientTest method publishConsumeWithSimpleCodec.
@Test
void publishConsumeWithSimpleCodec() throws Exception {
int messageCount = 1000;
Codec codec = new SimpleCodec();
Client publisher = cf.get(new Client.ClientParameters().codec(codec));
publisher.declarePublisher(b(1), null, stream);
IntStream.range(0, 1000).forEach(i -> publisher.publish(b(1), Collections.singletonList(publisher.messageBuilder().addData(String.valueOf(i).getBytes(UTF8)).build())));
CountDownLatch consumeLatch = new CountDownLatch(messageCount);
Set<String> messageBodies = ConcurrentHashMap.newKeySet(messageCount);
Client consumer = cf.get(new Client.ClientParameters().codec(codec).chunkListener((client, subscriptionId, offset, messageCount1, dataSize) -> client.credit(subscriptionId, 1)).messageListener((subscriptionId, offset, chunkTimestamp, message) -> {
messageBodies.add(new String(message.getBodyAsBinary()));
consumeLatch.countDown();
}));
consumer.subscribe(b(1), stream, OffsetSpecification.first(), 10);
assertThat(consumeLatch.await(10, SECONDS)).isTrue();
IntStream.range(0, messageCount).forEach(i -> assertThat(messageBodies).contains(String.valueOf(i)));
}
Aggregations