Search in sources :

Example 1 with Record

use of org.akhq.models.Record in project akhq by tchiotludo.

the class TopicControllerTest method dataDelete.

@Test
@Order(5)
void dataDelete() {
    Record retrieve = this.retrieve(HttpRequest.DELETE(CREATE_TOPIC_URL + "/data", ImmutableMap.of("key", new String(Base64.getEncoder().encode("my-key".getBytes())), "partition", 1)), Record.class);
    assertEquals(1, retrieve.getOffset());
// get data
// @TODO: Failed to see the message
// records = this.retrieveNextList(HttpRequest.GET(CREATE_TOPIC_URL + "/data"), Record.class);
// assertEquals(2, records.getResults().size());
// assertEquals("my-value", records.getResults().get(0).getValue());
// assertNull(records.getResults().get(1).getValue());
}
Also used : Record(org.akhq.models.Record) AbstractTest(org.akhq.AbstractTest)

Example 2 with Record

use of org.akhq.models.Record in project akhq by tchiotludo.

the class RecordRepository method search.

public Flowable<Event<SearchEvent>> search(Topic topic, Options options) throws ExecutionException, InterruptedException {
    AtomicInteger matchesCount = new AtomicInteger();
    Properties properties = new Properties();
    properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, options.getSize());
    return Flowable.generate(() -> {
        KafkaConsumer<byte[], byte[]> consumer = this.kafkaModule.getConsumer(options.clusterId, properties);
        Map<TopicPartition, Long> partitions = getTopicPartitionForSortOldest(topic, options, consumer);
        if (partitions.size() == 0) {
            return new SearchState(consumer, null);
        }
        consumer.assign(partitions.keySet());
        partitions.forEach(consumer::seek);
        partitions.forEach((topicPartition, first) -> log.trace("Search [topic: {}] [partition: {}] [start: {}]", topicPartition.topic(), topicPartition.partition(), first));
        return new SearchState(consumer, new SearchEvent(topic));
    }, (searchState, emitter) -> {
        SearchEvent searchEvent = searchState.getSearchEvent();
        KafkaConsumer<byte[], byte[]> consumer = searchState.getConsumer();
        // end
        if (searchEvent == null || searchEvent.emptyPoll == 666) {
            emitter.onNext(new SearchEvent(topic).end());
            emitter.onComplete();
            consumer.close();
            return new SearchState(consumer, searchEvent);
        }
        SearchEvent currentEvent = new SearchEvent(searchEvent);
        ConsumerRecords<byte[], byte[]> records = this.poll(consumer);
        if (records.isEmpty()) {
            currentEvent.emptyPoll++;
        } else {
            currentEvent.emptyPoll = 0;
        }
        List<Record> list = new ArrayList<>();
        for (ConsumerRecord<byte[], byte[]> record : records) {
            currentEvent.updateProgress(record);
            Record current = newRecord(record, options, topic);
            if (searchFilter(options, current)) {
                list.add(current);
                matchesCount.getAndIncrement();
                log.trace("Record [topic: {}] [partition: {}] [offset: {}] [key: {}]", record.topic(), record.partition(), record.offset(), record.key());
            }
        }
        currentEvent.records = list;
        if (currentEvent.emptyPoll >= 1) {
            currentEvent.emptyPoll = 666;
            emitter.onNext(currentEvent.end());
        } else if (matchesCount.get() >= options.getSize()) {
            currentEvent.emptyPoll = 666;
            emitter.onNext(currentEvent.progress(options));
        } else {
            emitter.onNext(currentEvent.progress(options));
        }
        return new SearchState(consumer, currentEvent);
    });
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.akhq.models.Record) org.apache.kafka.clients.consumer(org.apache.kafka.clients.consumer)

Example 3 with Record

use of org.akhq.models.Record in project akhq by tchiotludo.

the class RecordRepository method getLastRecord.

public Map<String, Record> getLastRecord(String clusterId, List<String> topicsName) throws ExecutionException, InterruptedException {
    Map<String, Topic> topics = topicRepository.findByName(clusterId, topicsName).stream().collect(Collectors.toMap(Topic::getName, Function.identity()));
    List<TopicPartition> topicPartitions = topics.values().stream().flatMap(topic -> topic.getPartitions().stream()).map(partition -> new TopicPartition(partition.getTopic(), partition.getId())).collect(Collectors.toList());
    KafkaConsumer<byte[], byte[]> consumer = kafkaModule.getConsumer(clusterId, new Properties() {

        {
            put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, topicPartitions.size() * 3);
        }
    });
    consumer.assign(topicPartitions);
    consumer.endOffsets(consumer.assignment()).forEach((topicPartition, offset) -> {
        consumer.seek(topicPartition, Math.max(0, offset - 2));
    });
    ConcurrentHashMap<String, Record> records = new ConcurrentHashMap<>();
    this.poll(consumer).forEach(record -> {
        if (!records.containsKey(record.topic())) {
            records.put(record.topic(), newRecord(record, clusterId, topics.get(record.topic())));
        } else {
            Record current = records.get(record.topic());
            if (current.getTimestamp().toInstant().toEpochMilli() < record.timestamp()) {
                records.put(record.topic(), newRecord(record, clusterId, topics.get(record.topic())));
            }
        }
    });
    consumer.close();
    return records;
}
Also used : JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) Environment(io.micronaut.context.env.Environment) org.apache.kafka.clients.consumer(org.apache.kafka.clients.consumer) java.util(java.util) TopicController(org.akhq.controllers.TopicController) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.akhq.models.Record) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Debug(org.akhq.utils.Debug) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Function(java.util.function.Function) Event(io.micronaut.http.sse.Event) RecordsToDelete(org.apache.kafka.clients.admin.RecordsToDelete) Topic(org.akhq.models.Topic) AvroToJsonSerializer(org.akhq.utils.AvroToJsonSerializer) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Flowable(io.reactivex.Flowable) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) KeyValue(org.akhq.models.KeyValue) KafkaModule(org.akhq.modules.KafkaModule) SchemaSerializer(org.akhq.modules.schemaregistry.SchemaSerializer) URIBuilder(org.codehaus.httpcache4j.uri.URIBuilder) Splitter(com.google.common.base.Splitter) RecordWithSchemaSerializerFactory(org.akhq.modules.schemaregistry.RecordWithSchemaSerializerFactory) TopicPartition(org.apache.kafka.common.TopicPartition) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Singleton(jakarta.inject.Singleton) Value(io.micronaut.context.annotation.Value) KafkaFuture(org.apache.kafka.common.KafkaFuture) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) Collectors(java.util.stream.Collectors) lombok(lombok) Partition(org.akhq.models.Partition) ExecutionException(java.util.concurrent.ExecutionException) StringUtils(io.micronaut.core.util.StringUtils) Slf4j(lombok.extern.slf4j.Slf4j) Stream(java.util.stream.Stream) SchemaRegistryType(org.akhq.configs.SchemaRegistryType) Inject(jakarta.inject.Inject) DeletedRecords(org.apache.kafka.clients.admin.DeletedRecords) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.akhq.models.Record) Topic(org.akhq.models.Topic) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Example 4 with Record

use of org.akhq.models.Record in project akhq by tchiotludo.

the class RecordRepository method consumeOldest.

private List<Record> consumeOldest(Topic topic, Options options) {
    KafkaConsumer<byte[], byte[]> consumer = this.kafkaModule.getConsumer(options.clusterId);
    Map<TopicPartition, Long> partitions = getTopicPartitionForSortOldest(topic, options, consumer);
    List<Record> list = new ArrayList<>();
    if (partitions.size() > 0) {
        consumer.assign(partitions.keySet());
        partitions.forEach(consumer::seek);
        if (log.isTraceEnabled()) {
            partitions.forEach((topicPartition, first) -> log.trace("Consume [topic: {}] [partition: {}] [start: {}]", topicPartition.topic(), topicPartition.partition(), first));
        }
        ConsumerRecords<byte[], byte[]> records = this.poll(consumer);
        for (ConsumerRecord<byte[], byte[]> record : records) {
            Record current = newRecord(record, options, topic);
            if (searchFilter(options, current)) {
                list.add(current);
            }
        }
    }
    consumer.close();
    list.sort(Comparator.comparing(Record::getTimestamp));
    return list;
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.akhq.models.Record) org.apache.kafka.clients.consumer(org.apache.kafka.clients.consumer)

Example 5 with Record

use of org.akhq.models.Record in project akhq by tchiotludo.

the class RecordRepository method newRecord.

private Record newRecord(ConsumerRecord<byte[], byte[]> record, String clusterId, Topic topic) {
    SchemaRegistryType schemaRegistryType = this.schemaRegistryRepository.getSchemaRegistryType(clusterId);
    SchemaRegistryClient client = this.kafkaModule.getRegistryClient(clusterId);
    return new Record(client, record, this.schemaRegistryRepository.getSchemaRegistryType(clusterId), this.schemaRegistryRepository.getKafkaAvroDeserializer(clusterId), schemaRegistryType == SchemaRegistryType.CONFLUENT ? this.schemaRegistryRepository.getKafkaJsonDeserializer(clusterId) : null, schemaRegistryType == SchemaRegistryType.CONFLUENT ? this.schemaRegistryRepository.getKafkaProtoDeserializer(clusterId) : null, this.avroToJsonSerializer, this.customDeserializerRepository.getProtobufToJsonDeserializer(clusterId), this.customDeserializerRepository.getAvroToJsonDeserializer(clusterId), avroWireFormatConverter.convertValueToWireFormat(record, client, this.schemaRegistryRepository.getSchemaRegistryType(clusterId)), topic);
}
Also used : ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.akhq.models.Record) SchemaRegistryType(org.akhq.configs.SchemaRegistryType) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)

Aggregations

Record (org.akhq.models.Record)10 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)6 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)4 Inject (jakarta.inject.Inject)4 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)4 SchemaRegistryType (org.akhq.configs.SchemaRegistryType)4 org.apache.kafka.clients.consumer (org.apache.kafka.clients.consumer)4 TopicPartition (org.apache.kafka.common.TopicPartition)4 URIBuilder (org.codehaus.httpcache4j.uri.URIBuilder)4 Environment (io.micronaut.context.env.Environment)3 java.util (java.util)3 ExecutionException (java.util.concurrent.ExecutionException)3 Collectors (java.util.stream.Collectors)3 Stream (java.util.stream.Stream)3 Slf4j (lombok.extern.slf4j.Slf4j)3 AbstractTest (org.akhq.AbstractTest)3 Topic (org.akhq.models.Topic)3 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)3 JsonProperty (com.fasterxml.jackson.annotation.JsonProperty)2 Splitter (com.google.common.base.Splitter)2