use of com.bakdata.quick.common.config.KafkaConfig in project quick by bakdata.
the class MutationFetcherTest method shouldIngestDataWithDifferentKeyTypes.
@ParameterizedTest(name = "shouldIngestDataWithDifferentKeyTypes ({0})")
@MethodSource("provideValueArgumentsForKeys")
<K, T> void shouldIngestDataWithDifferentKeyTypes(final TestParameterBuilder<K, String, T> testParameter) throws Exception {
final String topic = testParameter.getTopic();
final QuickData<T> keyInfo = testParameter.getInfoType();
final QuickTopicData<T, String> info = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, keyInfo, TestTypeUtils.newStringData());
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), schemaRegistry.getUrl());
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final TopicTypeService typeService = topicTypeService(keyInfo.getType(), QuickTopicType.STRING, null);
final KafkaIngestService kafkaIngestService = new KafkaIngestService(typeService, kafkaConfig);
final DataFetcher<String> mutationFetcher = new MutationFetcher<>(topic, "id", "name", new Lazy<>(() -> info), kafkaIngestService);
final KeyValue<K, String> keyValue = testParameter.getKeyValue();
final DataFetchingEnvironment env = DataFetchingEnvironmentImpl.newDataFetchingEnvironment().arguments(Map.of("id", keyValue.getKey(), "name", keyValue.getValue())).build();
final String actual = mutationFetcher.get(env);
assertThat(actual).isEqualTo(keyValue.getValue());
final Optional<KeyValue<String, String>> consumedRecords = kafkaCluster.read(ReadKeyValues.from(topic, String.class).with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyInfo.getSerde().deserializer().getClass()).with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class)).stream().findFirst();
assertThat(consumedRecords).isPresent().get().hasFieldOrPropertyWithValue("key", keyValue.getKey()).hasFieldOrPropertyWithValue("value", keyValue.getValue());
}
use of com.bakdata.quick.common.config.KafkaConfig in project quick by bakdata.
the class MutationFetcherTest method shouldIngestDataWithDifferentValueTypes.
@ParameterizedTest(name = "shouldIngestDataWithDifferentValueTypes ({0})")
@MethodSource("provideValueArguments")
<V, T> void shouldIngestDataWithDifferentValueTypes(final TestParameterBuilder<String, V, T> testParameter) throws Exception {
final String topic = testParameter.getTopic();
final QuickData<T> valueInfo = testParameter.getInfoType();
final QuickTopicData<String, T> info = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, TestTypeUtils.newStringData(), valueInfo);
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), schemaRegistry.getUrl());
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final KafkaIngestService kafkaIngestService = new KafkaIngestService(topicTypeService(QuickTopicType.STRING, valueInfo.getType(), null), kafkaConfig);
final DataFetcher<T> mutationFetcher = new MutationFetcher<>(topic, "id", "name", new Lazy<>(() -> info), kafkaIngestService);
final KeyValue<String, V> keyValue = testParameter.getKeyValue();
final DataFetchingEnvironment env = DataFetchingEnvironmentImpl.newDataFetchingEnvironment().arguments(Map.of("id", keyValue.getKey(), "name", keyValue.getValue())).build();
final T actual = mutationFetcher.get(env);
assertThat(actual).isEqualTo(keyValue.getValue());
final Optional<KeyValue<String, T>> consumedRecords = kafkaCluster.read(ReadKeyValues.from(topic, testParameter.getClassType()).with("schema.registry.url", schemaRegistry.getUrl()).with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueInfo.getSerde().deserializer().getClass())).stream().findFirst();
assertThat(consumedRecords).isPresent().get().hasFieldOrPropertyWithValue("key", keyValue.getKey()).hasFieldOrPropertyWithValue("value", keyValue.getValue());
}
use of com.bakdata.quick.common.config.KafkaConfig in project quick by bakdata.
the class SubscriptionFetcherTest method shouldFetchValues.
@ParameterizedTest(name = "shouldFetchValues ({0})")
@MethodSource("provideValueArguments")
<V> void shouldFetchValues(final String topic, final List<KeyValue<String, V>> keyValues, final QuickData<V> valueInfo, final List<V> expected) throws InterruptedException {
final QuickTopicData<String, V> info = new QuickTopicData<>(topic, TopicWriteType.IMMUTABLE, TestTypeUtils.newStringData(), valueInfo);
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), "http://no");
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final SendKeyValuesTransactional<String, V> sendRequest = SendKeyValuesTransactional.inTransaction(topic, keyValues).with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class).with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueInfo.getSerde().serializer().getClass()).build();
kafkaCluster.send(sendRequest);
final SubscriptionFetcher<String, V> subscriptionFetcher = new SubscriptionFetcher<>(kafkaConfig, new Lazy<>(() -> info), "test-query", OffsetStrategy.EARLIEST, null);
final Publisher<V> publisher = subscriptionFetcher.get(DataFetchingEnvironmentImpl.newDataFetchingEnvironment().build());
final TestSubscriber<V> testSubscriber = TestSubscriber.create();
// ensures that multiple subscriber work
final TestSubscriber<V> test2Subscriber = TestSubscriber.create();
publisher.subscribe(testSubscriber);
publisher.subscribe(test2Subscriber);
await().atMost(Duration.ofSeconds(10)).untilAsserted(() -> {
assertThat(testSubscriber.values()).containsExactlyElementsOf(expected);
assertThat(test2Subscriber.values()).containsExactlyElementsOf(expected);
});
}
use of com.bakdata.quick.common.config.KafkaConfig in project quick by bakdata.
the class KafkaTopicServiceTest method setUp.
@BeforeEach
void setUp() {
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), this.schemaRegistry.getUrl());
this.topicService = new KafkaTopicService(this.topicRegistryClient, this.gatewayClient, this.graphQLToAvroConverter, this.mirrorService, this.gatewayService, TOPIC_CONFIG, kafkaConfig);
}
use of com.bakdata.quick.common.config.KafkaConfig in project quick by bakdata.
the class TopicRegistryInitializerTest method shouldNotFailIfTopicExists.
@Test
void shouldNotFailIfTopicExists() {
final String topicName = UUID.randomUUID().toString();
kafkaCluster.createTopic(TopicConfig.forTopic(topicName).useDefaults());
this.successfulMock();
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), this.schemaRegistry.getUrl());
final TopicRegistryConfig registryConfig = new TopicRegistryConfig(topicName, TEST_NAME, 3, (short) 1);
final TopicRegistryInitializer topicRegistryInitializer = new TopicRegistryInitializer(kafkaConfig, registryConfig, this.mock);
assertThatCode(() -> {
topicRegistryInitializer.onStartUp(new StartupEvent(this.applicationContext));
}).doesNotThrowAnyException();
}
Aggregations