use of io.smallrye.reactive.messaging.kafka.impl.KafkaSink in project smallrye-reactive-messaging by smallrye.
the class KafkaSinkTest method testSinkUsingInteger.
@SuppressWarnings("unchecked")
@Test
public void testSinkUsingInteger() {
ConsumerTask<String, Integer> consumed = companion.consumeIntegers().fromTopics(topic, 10, Duration.ofSeconds(10));
MapBasedConfig config = getBaseConfig().with("topic", topic).with("value.serializer", IntegerSerializer.class.getName()).with("partition", 0).with("channel-name", "testSinkUsingInteger");
KafkaConnectorOutgoingConfiguration oc = new KafkaConnectorOutgoingConfiguration(config);
sink = new KafkaSink(oc, CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance());
Subscriber<? extends Message<?>> subscriber = sink.getSink().build();
Multi.createFrom().range(0, 10).map(Message::of).subscribe((Subscriber<? super Message<Integer>>) subscriber);
assertThat(consumed.awaitCompletion(Duration.ofMinutes(1)).count()).isEqualTo(10);
}
use of io.smallrye.reactive.messaging.kafka.impl.KafkaSink in project smallrye-reactive-messaging by smallrye.
the class ReactiveKafkaProducerTest method createSink.
public KafkaSink createSink() {
MapBasedConfig config = createProducerConfig().put("channel-name", "test-" + ThreadLocalRandom.current().nextInt()).put("topic", topic);
KafkaSink sink = new KafkaSink(new KafkaConnectorOutgoingConfiguration(config), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance());
this.sinks.add(sink);
return sink;
}
use of io.smallrye.reactive.messaging.kafka.impl.KafkaSink in project smallrye-reactive-messaging by smallrye.
the class SerializerConfigurationTest method testThatWhenNotSetKeySerializerIsString.
@Test
public void testThatWhenNotSetKeySerializerIsString() {
MapBasedConfig config = commonConsumerConfiguration();
sink = new KafkaSink(new KafkaConnectorOutgoingConfiguration(config), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance());
ConsumerTask<String, String> consumed = companion.consumeStrings().fromTopics(topic, 4, Duration.ofSeconds(10));
Subscriber<? extends Message<?>> subscriber = sink.getSink().build();
Multi.createFrom().items(Message.of(of("key", "value")), Message.of(of(null, "value")), Message.of(of("key", null)), Message.of(of(null, null))).subscribe((Subscriber<? super Message<?>>) subscriber);
await().until(() -> consumed.getRecords().size() == 4);
assertThat(consumed.getRecords().get(0).key()).isEqualTo("key");
assertThat(consumed.getRecords().get(0).value()).isEqualTo("value");
assertThat(consumed.getRecords().get(1).key()).isEqualTo(null);
assertThat(consumed.getRecords().get(1).value()).isEqualTo("value");
assertThat(consumed.getRecords().get(2).key()).isEqualTo("key");
assertThat(consumed.getRecords().get(2).value()).isEqualTo(null);
assertThat(consumed.getRecords().get(3).key()).isEqualTo(null);
assertThat(consumed.getRecords().get(3).value()).isEqualTo(null);
}
use of io.smallrye.reactive.messaging.kafka.impl.KafkaSink in project smallrye-reactive-messaging by smallrye.
the class SerializerConfigurationTest method testKeySerializationFailure.
@Test
public void testKeySerializationFailure() {
MapBasedConfig config = commonConsumerConfiguration().with("value.serializer", JsonObjectSerializer.class.getName()).with("key.serializer", JsonObjectSerializer.class.getName()).with("retries", 0L);
sink = new KafkaSink(new KafkaConnectorOutgoingConfiguration(config), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance());
Subscriber<? extends Message<?>> subscriber = sink.getSink().build();
AtomicBoolean nacked = new AtomicBoolean();
Multi.createFrom().items(Message.of(of(125.25, new JsonObject().put("k", "v"))).withNack(t -> {
nacked.set(true);
return CompletableFuture.completedFuture(null);
})).subscribe((Subscriber<? super Message<?>>) subscriber);
await().until(nacked::get);
}
use of io.smallrye.reactive.messaging.kafka.impl.KafkaSink in project smallrye-reactive-messaging by smallrye.
the class KafkaSinkWithCloudEventsTest method testSendingBinaryCloudEventsWithContentType.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testSendingBinaryCloudEventsWithContentType() {
KafkaMapBasedConfig config = newCommonConfig();
config.put("topic", topic);
config.put("value.serializer", StringSerializer.class.getName());
config.put("channel-name", topic);
KafkaConnectorOutgoingConfiguration oc = new KafkaConnectorOutgoingConfiguration(config);
sink = new KafkaSink(oc, CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance());
ConsumerTask<String, String> records = companion.consumeStrings().fromTopics(topic);
Message<?> message = Message.of("hello").addMetadata(OutgoingCloudEventMetadata.builder().withSource(URI.create("test://test")).withType("type").withId("some id").withDataContentType("text/plain").build());
Multi.createFrom().<Message<?>>item(message).subscribe().withSubscriber((Subscriber) sink.getSink().build());
await().until(() -> records.getRecords().size() == 1);
ConsumerRecord<String, String> record = records.getRecords().get(0);
assertThat(record.topic()).isEqualTo(topic);
assertThat(record.key()).isNull();
assertThat(record.headers()).contains(new RecordHeader("ce_specversion", "1.0".getBytes()), new RecordHeader("ce_type", "type".getBytes()), // Rules 3.2.1
new RecordHeader("ce_datacontenttype", "text/plain".getBytes()), new RecordHeader("content-type", "text/plain".getBytes()), new RecordHeader("ce_source", "test://test".getBytes()), new RecordHeader("ce_id", "some id".getBytes()));
assertThat(record.value()).isEqualTo("hello");
}
Aggregations