Search in sources :

Example 1 with QueryTopicOffsetSummary

use of io.confluent.ksql.rest.entity.QueryTopicOffsetSummary in project ksql by confluentinc.

the class ListSourceExecutor method queryOffsetSummaries.

private static List<QueryOffsetSummary> queryOffsetSummaries(final KsqlConfig ksqlConfig, final ServiceContext serviceContext, final List<RunningQuery> writeQueries) {
    final Map<String, Map<TopicPartition, OffsetAndMetadata>> offsetsPerQuery = new HashMap<>(writeQueries.size());
    final Map<String, Set<String>> topicsPerQuery = new HashMap<>();
    final Set<String> allTopics = new HashSet<>();
    // Get topics and offsets per running query
    for (RunningQuery query : writeQueries) {
        final QueryId queryId = query.getId();
        final String applicationId = QueryApplicationId.build(ksqlConfig, true, queryId);
        final Map<TopicPartition, OffsetAndMetadata> topicAndConsumerOffsets = serviceContext.getConsumerGroupClient().listConsumerGroupOffsets(applicationId);
        offsetsPerQuery.put(applicationId, topicAndConsumerOffsets);
        final Set<String> topics = topicAndConsumerOffsets.keySet().stream().map(TopicPartition::topic).collect(Collectors.toSet());
        topicsPerQuery.put(applicationId, topics);
        allTopics.addAll(topics);
    }
    // Get topics descriptions and start/end offsets
    final Map<String, TopicDescription> sourceTopicDescriptions = serviceContext.getTopicClient().describeTopics(allTopics);
    final Map<TopicPartition, Long> topicAndStartOffsets = serviceContext.getTopicClient().listTopicsStartOffsets(allTopics);
    final Map<TopicPartition, Long> topicAndEndOffsets = serviceContext.getTopicClient().listTopicsEndOffsets(allTopics);
    // Build consumer offsets summary
    final List<QueryOffsetSummary> offsetSummaries = new ArrayList<>();
    for (Entry<String, Set<String>> entry : topicsPerQuery.entrySet()) {
        final List<QueryTopicOffsetSummary> topicSummaries = new ArrayList<>();
        for (String topic : entry.getValue()) {
            topicSummaries.add(new QueryTopicOffsetSummary(topic, consumerPartitionOffsets(sourceTopicDescriptions.get(topic), topicAndStartOffsets, topicAndEndOffsets, offsetsPerQuery.get(entry.getKey()))));
        }
        offsetSummaries.add(new QueryOffsetSummary(entry.getKey(), topicSummaries));
    }
    return offsetSummaries;
}
Also used : ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) QueryId(io.confluent.ksql.query.QueryId) ArrayList(java.util.ArrayList) RunningQuery(io.confluent.ksql.rest.entity.RunningQuery) TopicPartition(org.apache.kafka.common.TopicPartition) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) QueryTopicOffsetSummary(io.confluent.ksql.rest.entity.QueryTopicOffsetSummary) Map(java.util.Map) HashMap(java.util.HashMap) HashSet(java.util.HashSet)

Example 2 with QueryTopicOffsetSummary

use of io.confluent.ksql.rest.entity.QueryTopicOffsetSummary in project ksql by confluentinc.

the class ConsoleTest method shouldPrintTopicDescribeExtended.

@Test
public void shouldPrintTopicDescribeExtended() {
    // Given:
    final List<RunningQuery> readQueries = ImmutableList.of(new RunningQuery("read query", ImmutableSet.of("sink1"), ImmutableSet.of("sink1 topic"), new QueryId("readId"), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT));
    final List<RunningQuery> writeQueries = ImmutableList.of(new RunningQuery("write query", ImmutableSet.of("sink2"), ImmutableSet.of("sink2 topic"), new QueryId("writeId"), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT));
    final KsqlEntityList entityList = new KsqlEntityList(ImmutableList.of(new SourceDescriptionEntity("e", new SourceDescription("TestSource", Optional.empty(), readQueries, writeQueries, buildTestSchema(SqlTypes.STRING), DataSourceType.KTABLE.getKsqlType(), "2000-01-01", "stats", "errors", true, "json", "avro", "kafka-topic", 2, 1, "sql statement text", ImmutableList.of(new QueryOffsetSummary("consumer1", ImmutableList.of(new QueryTopicOffsetSummary("kafka-topic", ImmutableList.of(new ConsumerPartitionOffsets(0, 100, 900, 800), new ConsumerPartitionOffsets(1, 50, 900, 900))), new QueryTopicOffsetSummary("kafka-topic-2", ImmutableList.of(new ConsumerPartitionOffsets(0, 0, 90, 80), new ConsumerPartitionOffsets(1, 10, 90, 90))))), new QueryOffsetSummary("consumer2", ImmutableList.of())), ImmutableList.of("S1", "S2")), Collections.emptyList())));
    // When:
    console.printKsqlEntityList(entityList);
    // Then:
    final String output = terminal.getOutputString();
    Approvals.verify(output, approvalOptions);
}
Also used : KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) RunningQuery(io.confluent.ksql.rest.entity.RunningQuery) ConsumerPartitionOffsets(io.confluent.ksql.rest.entity.ConsumerPartitionOffsets) QueryId(io.confluent.ksql.query.QueryId) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) SourceDescriptionEntity(io.confluent.ksql.rest.entity.SourceDescriptionEntity) QueryTopicOffsetSummary(io.confluent.ksql.rest.entity.QueryTopicOffsetSummary) Matchers.containsString(org.hamcrest.Matchers.containsString) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) Test(org.junit.Test)

Example 3 with QueryTopicOffsetSummary

use of io.confluent.ksql.rest.entity.QueryTopicOffsetSummary in project ksql by confluentinc.

the class Console method printSourceDescription.

private void printSourceDescription(final SourceDescription source) {
    final boolean isTable = source.getType().equalsIgnoreCase("TABLE");
    writer().println(String.format("%-20s : %s", "Name", source.getName()));
    if (!source.isExtended()) {
        printSchema(source.getWindowType(), source.getFields(), isTable);
        writer().println("For runtime statistics and query details run: DESCRIBE <Stream,Table> EXTENDED;");
        return;
    }
    writer().println(String.format("%-20s : %s", "Type", source.getType()));
    printTopicInfo(source);
    writer().println(String.format("%-20s : %s", "Statement", source.getStatement()));
    writer().println("");
    printSchema(source.getWindowType(), source.getFields(), isTable);
    printSourceConstraints(source.getSourceConstraints());
    printQueries(source.getReadQueries(), source.getType(), "read");
    printQueries(source.getWriteQueries(), source.getType(), "write");
    printStatistics(source);
    writer().println(String.format("(%s)", "Statistics of the local KSQL server interaction with the Kafka topic " + source.getTopic()));
    if (!source.getQueryOffsetSummaries().isEmpty()) {
        writer().println();
        writer().println("Consumer Groups summary:");
        for (QueryOffsetSummary entry : source.getQueryOffsetSummaries()) {
            writer().println();
            writer().println(String.format("%-20s : %s", "Consumer Group", entry.getGroupId()));
            if (entry.getTopicSummaries().isEmpty()) {
                writer().println("<no offsets committed by this group yet>");
            }
            for (QueryTopicOffsetSummary topicSummary : entry.getTopicSummaries()) {
                writer().println();
                writer().println(String.format("%-20s : %s", "Kafka topic", topicSummary.getKafkaTopic()));
                writer().println(String.format("%-20s : %s", "Max lag", topicSummary.getOffsets().stream().mapToLong(s -> s.getLogEndOffset() - s.getConsumerOffset()).max().orElse(0)));
                writer().println("");
                final Table taskTable = new Table.Builder().withColumnHeaders(ImmutableList.of("Partition", "Start Offset", "End Offset", "Offset", "Lag")).withRows(topicSummary.getOffsets().stream().map(offset -> ImmutableList.of(String.valueOf(offset.getPartition()), String.valueOf(offset.getLogStartOffset()), String.valueOf(offset.getLogEndOffset()), String.valueOf(offset.getConsumerOffset()), String.valueOf(offset.getLogEndOffset() - offset.getConsumerOffset())))).build();
                taskTable.print(this);
            }
        }
    }
}
Also used : Table(io.confluent.ksql.cli.console.table.Table) Builder(io.confluent.ksql.cli.console.table.Table.Builder) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) QueryTopicOffsetSummary(io.confluent.ksql.rest.entity.QueryTopicOffsetSummary)

Aggregations

QueryOffsetSummary (io.confluent.ksql.rest.entity.QueryOffsetSummary)3 QueryTopicOffsetSummary (io.confluent.ksql.rest.entity.QueryTopicOffsetSummary)3 QueryId (io.confluent.ksql.query.QueryId)2 RunningQuery (io.confluent.ksql.rest.entity.RunningQuery)2 ImmutableSet (com.google.common.collect.ImmutableSet)1 Table (io.confluent.ksql.cli.console.table.Table)1 Builder (io.confluent.ksql.cli.console.table.Table.Builder)1 ConsumerPartitionOffsets (io.confluent.ksql.rest.entity.ConsumerPartitionOffsets)1 KsqlEntityList (io.confluent.ksql.rest.entity.KsqlEntityList)1 SourceDescription (io.confluent.ksql.rest.entity.SourceDescription)1 SourceDescriptionEntity (io.confluent.ksql.rest.entity.SourceDescriptionEntity)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Map (java.util.Map)1 Set (java.util.Set)1 TopicDescription (org.apache.kafka.clients.admin.TopicDescription)1 OffsetAndMetadata (org.apache.kafka.clients.consumer.OffsetAndMetadata)1 TopicPartition (org.apache.kafka.common.TopicPartition)1 Matchers.containsString (org.hamcrest.Matchers.containsString)1