use of com.bakdata.quick.common.type.QuickTopicData in project quick by bakdata.
the class MutationFetcherTest method shouldIngestDataWithDifferentKeyTypes.
@ParameterizedTest(name = "shouldIngestDataWithDifferentKeyTypes ({0})")
@MethodSource("provideValueArgumentsForKeys")
<K, T> void shouldIngestDataWithDifferentKeyTypes(final TestParameterBuilder<K, String, T> testParameter) throws Exception {
final String topic = testParameter.getTopic();
final QuickData<T> keyInfo = testParameter.getInfoType();
final QuickTopicData<T, String> info = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, keyInfo, TestTypeUtils.newStringData());
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), schemaRegistry.getUrl());
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final TopicTypeService typeService = topicTypeService(keyInfo.getType(), QuickTopicType.STRING, null);
final KafkaIngestService kafkaIngestService = new KafkaIngestService(typeService, kafkaConfig);
final DataFetcher<String> mutationFetcher = new MutationFetcher<>(topic, "id", "name", new Lazy<>(() -> info), kafkaIngestService);
final KeyValue<K, String> keyValue = testParameter.getKeyValue();
final DataFetchingEnvironment env = DataFetchingEnvironmentImpl.newDataFetchingEnvironment().arguments(Map.of("id", keyValue.getKey(), "name", keyValue.getValue())).build();
final String actual = mutationFetcher.get(env);
assertThat(actual).isEqualTo(keyValue.getValue());
final Optional<KeyValue<String, String>> consumedRecords = kafkaCluster.read(ReadKeyValues.from(topic, String.class).with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyInfo.getSerde().deserializer().getClass()).with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class)).stream().findFirst();
assertThat(consumedRecords).isPresent().get().hasFieldOrPropertyWithValue("key", keyValue.getKey()).hasFieldOrPropertyWithValue("value", keyValue.getValue());
}
use of com.bakdata.quick.common.type.QuickTopicData in project quick by bakdata.
the class MutationFetcherTest method shouldIngestDataWithDifferentValueTypes.
@ParameterizedTest(name = "shouldIngestDataWithDifferentValueTypes ({0})")
@MethodSource("provideValueArguments")
<V, T> void shouldIngestDataWithDifferentValueTypes(final TestParameterBuilder<String, V, T> testParameter) throws Exception {
final String topic = testParameter.getTopic();
final QuickData<T> valueInfo = testParameter.getInfoType();
final QuickTopicData<String, T> info = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, TestTypeUtils.newStringData(), valueInfo);
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), schemaRegistry.getUrl());
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final KafkaIngestService kafkaIngestService = new KafkaIngestService(topicTypeService(QuickTopicType.STRING, valueInfo.getType(), null), kafkaConfig);
final DataFetcher<T> mutationFetcher = new MutationFetcher<>(topic, "id", "name", new Lazy<>(() -> info), kafkaIngestService);
final KeyValue<String, V> keyValue = testParameter.getKeyValue();
final DataFetchingEnvironment env = DataFetchingEnvironmentImpl.newDataFetchingEnvironment().arguments(Map.of("id", keyValue.getKey(), "name", keyValue.getValue())).build();
final T actual = mutationFetcher.get(env);
assertThat(actual).isEqualTo(keyValue.getValue());
final Optional<KeyValue<String, T>> consumedRecords = kafkaCluster.read(ReadKeyValues.from(topic, testParameter.getClassType()).with("schema.registry.url", schemaRegistry.getUrl()).with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueInfo.getSerde().deserializer().getClass())).stream().findFirst();
assertThat(consumedRecords).isPresent().get().hasFieldOrPropertyWithValue("key", keyValue.getKey()).hasFieldOrPropertyWithValue("value", keyValue.getValue());
}
use of com.bakdata.quick.common.type.QuickTopicData in project quick by bakdata.
the class IngestController method convertIngestData.
/**
* Processes raw input data for ingesting.
*
* <p>
* Two steps are necessary to ingest the raw payload:
* <ul>
* <li> parse it
* <li> check for existing keys if the topic is immutable
* </ul>
*
* <p>
* This method handles both steps and then forwards all non-existing {@link KeyValuePair} to the
* {@link IngestService}.
* Further, it handles merging the error messages. The method returns an error for existing keys as well as possible
* errors in the ingest service.
*
* @param topic the topic to ingest to
* @param payload the raw payload
* @param data the topic information for the topic
* @param <K> the key type of the topic
* @param <V> the value type of the topic
* @return merged completable possibly containing errors from existing keys or the ingest service
*/
private <K, V> Completable convertIngestData(final String topic, final String payload, final QuickTopicData<K, V> data) {
final Single<List<KeyValuePair<K, V>>> list = Single.fromCallable(() -> this.parser.parseInputData(payload, data));
return list.flatMap(pairs -> this.filter.prepareIngest(data, pairs)).flatMapCompletable(pairs -> {
final Completable existingError = createErrorsForExistingKeys(topic, pairs);
final Completable ingest = this.ingestService.sendData(topic, pairs.getDataToIngest());
return Completable.mergeArrayDelayError(existingError, ingest);
});
}
use of com.bakdata.quick.common.type.QuickTopicData in project quick by bakdata.
the class SubscriptionFetcherTest method shouldFetchValues.
@ParameterizedTest(name = "shouldFetchValues ({0})")
@MethodSource("provideValueArguments")
<V> void shouldFetchValues(final String topic, final List<KeyValue<String, V>> keyValues, final QuickData<V> valueInfo, final List<V> expected) throws InterruptedException {
final QuickTopicData<String, V> info = new QuickTopicData<>(topic, TopicWriteType.IMMUTABLE, TestTypeUtils.newStringData(), valueInfo);
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), "http://no");
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final SendKeyValuesTransactional<String, V> sendRequest = SendKeyValuesTransactional.inTransaction(topic, keyValues).with(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class).with(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueInfo.getSerde().serializer().getClass()).build();
kafkaCluster.send(sendRequest);
final SubscriptionFetcher<String, V> subscriptionFetcher = new SubscriptionFetcher<>(kafkaConfig, new Lazy<>(() -> info), "test-query", OffsetStrategy.EARLIEST, null);
final Publisher<V> publisher = subscriptionFetcher.get(DataFetchingEnvironmentImpl.newDataFetchingEnvironment().build());
final TestSubscriber<V> testSubscriber = TestSubscriber.create();
// ensures that multiple subscriber work
final TestSubscriber<V> test2Subscriber = TestSubscriber.create();
publisher.subscribe(testSubscriber);
publisher.subscribe(test2Subscriber);
await().atMost(Duration.ofSeconds(10)).untilAsserted(() -> {
assertThat(testSubscriber.values()).containsExactlyElementsOf(expected);
assertThat(test2Subscriber.values()).containsExactlyElementsOf(expected);
});
}
use of com.bakdata.quick.common.type.QuickTopicData in project quick by bakdata.
the class MirrorTopologyTest method createTopology.
static Topology createTopology(final Properties properties) {
final String topic = INPUT_TOPICS.get(0);
final QuickTopicData<Integer, Integer> data = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, newIntegerData(), newIntegerData());
data.getKeyData().getSerde().configure(Maps.fromProperties(properties), true);
data.getValueData().getSerde().configure(Maps.fromProperties(properties), false);
final QuickTopologyData<Integer, Integer> topologyInfo = QuickTopologyData.<Integer, Integer>builder().inputTopics(INPUT_TOPICS).topicData(data).build();
final MirrorTopology<Integer, Integer> mirrorTopology = MirrorTopology.<Integer, Integer>builder().topologyData(topologyInfo).storeName(STORE_NAME).retentionStoreName(RETENTION_STORE_NAME).storeType(StoreType.INMEMORY).build();
final StreamsBuilder builder = new StreamsBuilder();
return mirrorTopology.createTopology(builder);
}
Aggregations