Search in sources :

Example 1 with TopicsConfig

use of io.kestra.runner.kafka.configs.TopicsConfig in project kestra by kestra-io.

the class KafkaFlowListenersTest method invalidFlows.

@Test
public void invalidFlows() throws JsonProcessingException, InterruptedException {
    TopicsConfig topicsConfig = KafkaQueue.topicsConfig(applicationContext, Flow.class);
    KafkaProducerService.Producer<String> producer = kafkaProducerService.of(KafkaFlowListenersTest.class, Serdes.String());
    producer.send(new ProducerRecord<>(topicsConfig.getName(), "", JacksonMapper.ofJson().writeValueAsString(Map.of("id", "invalid", "namespace", "io.kestra.unittest", "revision", 1, "tasks", List.of(Map.of("id", "invalid", "type", "io.kestra.core.tasks.debugs.Echo", "level", "invalid"))))));
    producer.send(new ProducerRecord<>(topicsConfig.getName(), "", JacksonMapper.ofJson().writeValueAsString(Map.of("id", "invalid", "namespace", "io.kestra.unittest", "revision", 1, "tasks", List.of(Map.of("id", "invalid", "type", "io.kestra.core.tasks.debugs.Invalid", "level", "invalid"))))));
    String flowId = IdUtils.create();
    Flow flow = create(flowId, IdUtils.create());
    flowRepository.create(flow);
    CountDownLatch countDownLatch = new CountDownLatch(1);
    flowListenersService.listen(flows -> {
        if (flows.stream().anyMatch(f -> f.getId().equals(flowId))) {
            countDownLatch.countDown();
        }
    });
    countDownLatch.await(1, TimeUnit.MINUTES);
    assertThat(countDownLatch.getCount(), is(0L));
}
Also used : KafkaProducerService(io.kestra.runner.kafka.services.KafkaProducerService) TopicsConfig(io.kestra.runner.kafka.configs.TopicsConfig) CountDownLatch(java.util.concurrent.CountDownLatch) Flow(io.kestra.core.models.flows.Flow) Test(org.junit.jupiter.api.Test) FlowListenersTest(io.kestra.core.runners.FlowListenersTest)

Example 2 with TopicsConfig

use of io.kestra.runner.kafka.configs.TopicsConfig in project kestra by kestra-io.

the class KafkaElasticIndexerTest method run.

@Test
void run() throws IOException, InterruptedException {
    String topic = this.topicsConfig.stream().filter(indicesConfig -> indicesConfig.getCls() == Execution.class).findFirst().orElseThrow().getName();
    CountDownLatch countDownLatch = new CountDownLatch(1);
    RestHighLevelClient elasticClientSpy = spy(elasticClient);
    doAnswer(invocation -> {
        countDownLatch.countDown();
        return invocation.callRealMethod();
    }).when(elasticClientSpy).bulk(any(), any());
    KafkaConsumerService kafkaConsumerServiceSpy = mock(KafkaConsumerService.class);
    MockConsumer<String, String> mockConsumer = mockConsumer(topic);
    doReturn(mockConsumer).when(kafkaConsumerServiceSpy).of(any(), any(), any());
    ConsumerRecord<String, String> first = buildExecutionRecord(topic, 0);
    mockConsumer.addRecord(first);
    mockConsumer.addRecord(buildExecutionRecord(topic, 1));
    mockConsumer.addRecord(buildExecutionRecord(topic, 2));
    mockConsumer.addRecord(buildExecutionRecord(topic, 3));
    mockConsumer.addRecord(buildExecutionRecord(topic, 4));
    mockConsumer.addRecord(buildRecord(topic, first.key(), null, 5));
    KafkaElasticIndexer indexer = new KafkaElasticIndexer(metricRegistry, elasticClientSpy, indexerConfig, topicsConfig, indicesConfigs, elasticSearchIndicesService, kafkaConsumerServiceSpy, executorsUtils);
    Thread thread = new Thread(indexer);
    thread.start();
    countDownLatch.await();
    assertThat(countDownLatch.getCount(), is(0L));
}
Also used : ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) JacksonMapper(io.kestra.core.serializers.JacksonMapper) TestsUtils(io.kestra.core.utils.TestsUtils) HashMap(java.util.HashMap) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) TopicsConfig(io.kestra.runner.kafka.configs.TopicsConfig) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) RestHighLevelClient(org.opensearch.client.RestHighLevelClient) MicronautTest(io.micronaut.test.extensions.junit5.annotation.MicronautTest) TopicPartition(org.apache.kafka.common.TopicPartition) ImmutableMap(com.google.common.collect.ImmutableMap) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) Execution(io.kestra.core.models.executions.Execution) KafkaConsumerService(io.kestra.runner.kafka.services.KafkaConsumerService) ElasticSearchIndicesService(io.kestra.repository.elasticsearch.ElasticSearchIndicesService) IndicesConfig(io.kestra.repository.elasticsearch.configs.IndicesConfig) Test(org.junit.jupiter.api.Test) CountDownLatch(java.util.concurrent.CountDownLatch) Mockito(org.mockito.Mockito) List(java.util.List) ExecutorsUtils(io.kestra.core.utils.ExecutorsUtils) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Matchers.is(org.hamcrest.Matchers.is) Flow(io.kestra.core.models.flows.Flow) MetricRegistry(io.kestra.core.metrics.MetricRegistry) Inject(jakarta.inject.Inject) Collections(java.util.Collections) KafkaConsumerService(io.kestra.runner.kafka.services.KafkaConsumerService) RestHighLevelClient(org.opensearch.client.RestHighLevelClient) CountDownLatch(java.util.concurrent.CountDownLatch) MicronautTest(io.micronaut.test.extensions.junit5.annotation.MicronautTest) Test(org.junit.jupiter.api.Test)

Example 3 with TopicsConfig

use of io.kestra.runner.kafka.configs.TopicsConfig in project kestra by kestra-io.

the class KafkaDeserializationExceptionHandler method handle.

@Override
public DeserializationHandlerResponse handle(ProcessorContext context, ConsumerRecord<byte[], byte[]> record, Exception exception) {
    String message = "Exception caught during deserialization, stream will continue! applicationId: {}, taskId: {}, topic: {}, partition: {}, offset: {}";
    Object[] args = { context.applicationId(), context.taskId(), record.topic(), record.partition(), record.offset(), exception };
    TopicsConfig topicsConfig = KafkaQueue.topicsConfigByTopicName(applicationContext, record.topic());
    if (topicsConfig.getCls() == Flow.class || topicsConfig.getCls() == Template.class) {
        if (log.isDebugEnabled()) {
            log.debug(message, args);
        }
    } else {
        log.warn(message, args);
    }
    return DeserializationHandlerResponse.CONTINUE;
}
Also used : TopicsConfig(io.kestra.runner.kafka.configs.TopicsConfig) Flow(io.kestra.core.models.flows.Flow) Template(io.kestra.core.models.templates.Template)

Example 4 with TopicsConfig

use of io.kestra.runner.kafka.configs.TopicsConfig in project kestra by kestra-io.

the class KafkaAdminService method createIfNotExist.

@SuppressWarnings("deprecation")
public void createIfNotExist(TopicsConfig topicConfig) {
    NewTopic newTopic = new NewTopic(topicConfig.getName(), topicConfig.getPartitions() != null ? topicConfig.getPartitions() : topicDefaultsConfig.getPartitions(), topicConfig.getReplicationFactor() != null ? topicConfig.getReplicationFactor() : topicDefaultsConfig.getReplicationFactor());
    Map<String, String> properties = new HashMap<>();
    if (topicDefaultsConfig.getProperties() != null) {
        properties.putAll(topicDefaultsConfig.getProperties());
    }
    if (topicConfig.getProperties() != null) {
        properties.putAll(topicConfig.getProperties());
    }
    newTopic.configs(properties);
    try {
        this.of().createTopics(Collections.singletonList(newTopic)).all().get();
        log.info("Topic '{}' created", newTopic.name());
    } catch (ExecutionException | InterruptedException | TimeoutException e) {
        if (e.getCause() instanceof TopicExistsException) {
            try {
                adminClient.alterConfigs(new HashMap<>() {

                    {
                        put(new ConfigResource(ConfigResource.Type.TOPIC, newTopic.name()), new org.apache.kafka.clients.admin.Config(newTopic.configs().entrySet().stream().map(config -> new ConfigEntry(config.getKey(), config.getValue())).collect(Collectors.toList())));
                    }
                }).all().get();
                log.info("Topic Config '{}' updated", newTopic.name());
            } catch (ExecutionException | InterruptedException exception) {
                if (!(exception.getCause() instanceof TopicExistsException)) {
                    log.warn("Unable to update topic '{}'", newTopic.name(), exception);
                }
            }
        } else {
            throw new RuntimeException(e);
        }
    }
}
Also used : Tag(io.micrometer.core.instrument.Tag) CommonClientConfigs(org.apache.kafka.clients.CommonClientConfigs) KafkaClientMetrics(io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics) TimeoutException(org.apache.kafka.common.errors.TimeoutException) java.util(java.util) TopicDefaultsConfig(io.kestra.runner.kafka.configs.TopicDefaultsConfig) NewTopic(org.apache.kafka.clients.admin.NewTopic) Singleton(jakarta.inject.Singleton) Value(io.micronaut.context.annotation.Value) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) ClientConfig(io.kestra.runner.kafka.configs.ClientConfig) Collectors(java.util.stream.Collectors) AdminClient(org.apache.kafka.clients.admin.AdminClient) ExecutionException(java.util.concurrent.ExecutionException) ConfigResource(org.apache.kafka.common.config.ConfigResource) PreDestroy(javax.annotation.PreDestroy) Slf4j(lombok.extern.slf4j.Slf4j) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) TopicsConfig(io.kestra.runner.kafka.configs.TopicsConfig) MetricRegistry(io.kestra.core.metrics.MetricRegistry) Inject(jakarta.inject.Inject) TopicExistsException(org.apache.kafka.common.errors.TopicExistsException) ConfigResource(org.apache.kafka.common.config.ConfigResource) ConfigEntry(org.apache.kafka.clients.admin.ConfigEntry) NewTopic(org.apache.kafka.clients.admin.NewTopic) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(org.apache.kafka.common.errors.TimeoutException)

Example 5 with TopicsConfig

use of io.kestra.runner.kafka.configs.TopicsConfig in project kestra by kestra-io.

the class KafkaQueueTest method topicsConfig.

@Test
void topicsConfig() {
    TopicsConfig topicsConfig = KafkaQueue.topicsConfig(applicationContext, Execution.class);
    assertThat(topicsConfig.getCls(), is(Execution.class));
    TopicsConfig byName = KafkaQueue.topicsConfigByTopicName(applicationContext, topicsConfig.getName());
    assertThat(byName, is(topicsConfig));
    topicsConfig = KafkaQueue.topicsConfig(applicationContext, KafkaStreamSourceService.TOPIC_FLOWLAST);
    assertThat(topicsConfig.getKey(), is(KafkaStreamSourceService.TOPIC_FLOWLAST));
    byName = KafkaQueue.topicsConfigByTopicName(applicationContext, topicsConfig.getName());
    assertThat(byName, is(topicsConfig));
}
Also used : Execution(io.kestra.core.models.executions.Execution) TopicsConfig(io.kestra.runner.kafka.configs.TopicsConfig) Test(org.junit.jupiter.api.Test) MicronautTest(io.micronaut.test.extensions.junit5.annotation.MicronautTest)

Aggregations

TopicsConfig (io.kestra.runner.kafka.configs.TopicsConfig)5 Flow (io.kestra.core.models.flows.Flow)3 Test (org.junit.jupiter.api.Test)3 MetricRegistry (io.kestra.core.metrics.MetricRegistry)2 Execution (io.kestra.core.models.executions.Execution)2 MicronautTest (io.micronaut.test.extensions.junit5.annotation.MicronautTest)2 Inject (jakarta.inject.Inject)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 Template (io.kestra.core.models.templates.Template)1 FlowListenersTest (io.kestra.core.runners.FlowListenersTest)1 JacksonMapper (io.kestra.core.serializers.JacksonMapper)1 ExecutorsUtils (io.kestra.core.utils.ExecutorsUtils)1 TestsUtils (io.kestra.core.utils.TestsUtils)1 ElasticSearchIndicesService (io.kestra.repository.elasticsearch.ElasticSearchIndicesService)1 IndicesConfig (io.kestra.repository.elasticsearch.configs.IndicesConfig)1 ClientConfig (io.kestra.runner.kafka.configs.ClientConfig)1 TopicDefaultsConfig (io.kestra.runner.kafka.configs.TopicDefaultsConfig)1 KafkaConsumerService (io.kestra.runner.kafka.services.KafkaConsumerService)1