Search in sources :

Example 1 with QueryOffsetSummary

use of io.confluent.ksql.rest.entity.QueryOffsetSummary in project ksql by confluentinc.

the class ListSourceExecutor method describeSource.

private static SourceDescriptionWithWarnings describeSource(final KsqlConfig ksqlConfig, final KsqlExecutionContext ksqlExecutionContext, final ServiceContext serviceContext, final SourceName name, final boolean extended, final ConfiguredStatement<? extends StatementWithExtendedClause> statement, final SessionProperties sessionProperties, final Collection<SourceDescription> remoteSourceDescriptions) {
    final DataSource dataSource = ksqlExecutionContext.getMetaStore().getSource(name);
    if (dataSource == null) {
        throw new KsqlStatementException(String.format("Could not find STREAM/TABLE '%s' in the Metastore", name.text()), statement.getStatementText());
    }
    final List<RunningQuery> readQueries = getQueries(ksqlExecutionContext, q -> q.getSourceNames().contains(dataSource.getName()));
    final List<RunningQuery> writeQueries = getQueries(ksqlExecutionContext, q -> q.getSinkName().equals(Optional.of(dataSource.getName())));
    Optional<TopicDescription> topicDescription = Optional.empty();
    List<QueryOffsetSummary> queryOffsetSummaries = Collections.emptyList();
    List<String> sourceConstraints = Collections.emptyList();
    final List<KsqlWarning> warnings = new LinkedList<>();
    try {
        topicDescription = Optional.of(serviceContext.getTopicClient().describeTopic(dataSource.getKafkaTopicName()));
        sourceConstraints = getSourceConstraints(name, ksqlExecutionContext.getMetaStore());
    } catch (final KafkaException | KafkaResponseGetFailedException e) {
        warnings.add(new KsqlWarning("Error from Kafka: " + e.getMessage()));
    }
    if (extended) {
        queryOffsetSummaries = queryOffsetSummaries(ksqlConfig, serviceContext, writeQueries);
        return new SourceDescriptionWithWarnings(warnings, SourceDescriptionFactory.create(dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, remoteSourceDescriptions.stream().flatMap(sd -> sd.getClusterStatistics().stream()), remoteSourceDescriptions.stream().flatMap(sd -> sd.getClusterErrorStats().stream()), sessionProperties.getKsqlHostInfo(), ksqlExecutionContext.metricCollectors()));
    }
    return new SourceDescriptionWithWarnings(warnings, SourceDescriptionFactory.create(dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, java.util.stream.Stream.empty(), java.util.stream.Stream.empty(), sessionProperties.getKsqlHostInfo(), ksqlExecutionContext.metricCollectors()));
}
Also used : KsqlWarning(io.confluent.ksql.rest.entity.KsqlWarning) LinkedList(java.util.LinkedList) DataSource(io.confluent.ksql.metastore.model.DataSource) RunningQuery(io.confluent.ksql.rest.entity.RunningQuery) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) KafkaException(org.apache.kafka.common.KafkaException)

Example 2 with QueryOffsetSummary

use of io.confluent.ksql.rest.entity.QueryOffsetSummary in project ksql by confluentinc.

the class ListSourceExecutor method queryOffsetSummaries.

private static List<QueryOffsetSummary> queryOffsetSummaries(final KsqlConfig ksqlConfig, final ServiceContext serviceContext, final List<RunningQuery> writeQueries) {
    final Map<String, Map<TopicPartition, OffsetAndMetadata>> offsetsPerQuery = new HashMap<>(writeQueries.size());
    final Map<String, Set<String>> topicsPerQuery = new HashMap<>();
    final Set<String> allTopics = new HashSet<>();
    // Get topics and offsets per running query
    for (RunningQuery query : writeQueries) {
        final QueryId queryId = query.getId();
        final String applicationId = QueryApplicationId.build(ksqlConfig, true, queryId);
        final Map<TopicPartition, OffsetAndMetadata> topicAndConsumerOffsets = serviceContext.getConsumerGroupClient().listConsumerGroupOffsets(applicationId);
        offsetsPerQuery.put(applicationId, topicAndConsumerOffsets);
        final Set<String> topics = topicAndConsumerOffsets.keySet().stream().map(TopicPartition::topic).collect(Collectors.toSet());
        topicsPerQuery.put(applicationId, topics);
        allTopics.addAll(topics);
    }
    // Get topics descriptions and start/end offsets
    final Map<String, TopicDescription> sourceTopicDescriptions = serviceContext.getTopicClient().describeTopics(allTopics);
    final Map<TopicPartition, Long> topicAndStartOffsets = serviceContext.getTopicClient().listTopicsStartOffsets(allTopics);
    final Map<TopicPartition, Long> topicAndEndOffsets = serviceContext.getTopicClient().listTopicsEndOffsets(allTopics);
    // Build consumer offsets summary
    final List<QueryOffsetSummary> offsetSummaries = new ArrayList<>();
    for (Entry<String, Set<String>> entry : topicsPerQuery.entrySet()) {
        final List<QueryTopicOffsetSummary> topicSummaries = new ArrayList<>();
        for (String topic : entry.getValue()) {
            topicSummaries.add(new QueryTopicOffsetSummary(topic, consumerPartitionOffsets(sourceTopicDescriptions.get(topic), topicAndStartOffsets, topicAndEndOffsets, offsetsPerQuery.get(entry.getKey()))));
        }
        offsetSummaries.add(new QueryOffsetSummary(entry.getKey(), topicSummaries));
    }
    return offsetSummaries;
}
Also used : ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) QueryId(io.confluent.ksql.query.QueryId) ArrayList(java.util.ArrayList) RunningQuery(io.confluent.ksql.rest.entity.RunningQuery) TopicPartition(org.apache.kafka.common.TopicPartition) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) QueryTopicOffsetSummary(io.confluent.ksql.rest.entity.QueryTopicOffsetSummary) Map(java.util.Map) HashMap(java.util.HashMap) HashSet(java.util.HashSet)

Example 3 with QueryOffsetSummary

use of io.confluent.ksql.rest.entity.QueryOffsetSummary in project ksql by confluentinc.

the class ConsoleTest method shouldPrintTopicDescribeExtended.

@Test
public void shouldPrintTopicDescribeExtended() {
    // Given:
    final List<RunningQuery> readQueries = ImmutableList.of(new RunningQuery("read query", ImmutableSet.of("sink1"), ImmutableSet.of("sink1 topic"), new QueryId("readId"), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT));
    final List<RunningQuery> writeQueries = ImmutableList.of(new RunningQuery("write query", ImmutableSet.of("sink2"), ImmutableSet.of("sink2 topic"), new QueryId("writeId"), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT));
    final KsqlEntityList entityList = new KsqlEntityList(ImmutableList.of(new SourceDescriptionEntity("e", new SourceDescription("TestSource", Optional.empty(), readQueries, writeQueries, buildTestSchema(SqlTypes.STRING), DataSourceType.KTABLE.getKsqlType(), "2000-01-01", "stats", "errors", true, "json", "avro", "kafka-topic", 2, 1, "sql statement text", ImmutableList.of(new QueryOffsetSummary("consumer1", ImmutableList.of(new QueryTopicOffsetSummary("kafka-topic", ImmutableList.of(new ConsumerPartitionOffsets(0, 100, 900, 800), new ConsumerPartitionOffsets(1, 50, 900, 900))), new QueryTopicOffsetSummary("kafka-topic-2", ImmutableList.of(new ConsumerPartitionOffsets(0, 0, 90, 80), new ConsumerPartitionOffsets(1, 10, 90, 90))))), new QueryOffsetSummary("consumer2", ImmutableList.of())), ImmutableList.of("S1", "S2")), Collections.emptyList())));
    // When:
    console.printKsqlEntityList(entityList);
    // Then:
    final String output = terminal.getOutputString();
    Approvals.verify(output, approvalOptions);
}
Also used : KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) RunningQuery(io.confluent.ksql.rest.entity.RunningQuery) ConsumerPartitionOffsets(io.confluent.ksql.rest.entity.ConsumerPartitionOffsets) QueryId(io.confluent.ksql.query.QueryId) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) SourceDescriptionEntity(io.confluent.ksql.rest.entity.SourceDescriptionEntity) QueryTopicOffsetSummary(io.confluent.ksql.rest.entity.QueryTopicOffsetSummary) Matchers.containsString(org.hamcrest.Matchers.containsString) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) Test(org.junit.Test)

Example 4 with QueryOffsetSummary

use of io.confluent.ksql.rest.entity.QueryOffsetSummary in project ksql by confluentinc.

the class Console method printSourceDescription.

private void printSourceDescription(final SourceDescription source) {
    final boolean isTable = source.getType().equalsIgnoreCase("TABLE");
    writer().println(String.format("%-20s : %s", "Name", source.getName()));
    if (!source.isExtended()) {
        printSchema(source.getWindowType(), source.getFields(), isTable);
        writer().println("For runtime statistics and query details run: DESCRIBE <Stream,Table> EXTENDED;");
        return;
    }
    writer().println(String.format("%-20s : %s", "Type", source.getType()));
    printTopicInfo(source);
    writer().println(String.format("%-20s : %s", "Statement", source.getStatement()));
    writer().println("");
    printSchema(source.getWindowType(), source.getFields(), isTable);
    printSourceConstraints(source.getSourceConstraints());
    printQueries(source.getReadQueries(), source.getType(), "read");
    printQueries(source.getWriteQueries(), source.getType(), "write");
    printStatistics(source);
    writer().println(String.format("(%s)", "Statistics of the local KSQL server interaction with the Kafka topic " + source.getTopic()));
    if (!source.getQueryOffsetSummaries().isEmpty()) {
        writer().println();
        writer().println("Consumer Groups summary:");
        for (QueryOffsetSummary entry : source.getQueryOffsetSummaries()) {
            writer().println();
            writer().println(String.format("%-20s : %s", "Consumer Group", entry.getGroupId()));
            if (entry.getTopicSummaries().isEmpty()) {
                writer().println("<no offsets committed by this group yet>");
            }
            for (QueryTopicOffsetSummary topicSummary : entry.getTopicSummaries()) {
                writer().println();
                writer().println(String.format("%-20s : %s", "Kafka topic", topicSummary.getKafkaTopic()));
                writer().println(String.format("%-20s : %s", "Max lag", topicSummary.getOffsets().stream().mapToLong(s -> s.getLogEndOffset() - s.getConsumerOffset()).max().orElse(0)));
                writer().println("");
                final Table taskTable = new Table.Builder().withColumnHeaders(ImmutableList.of("Partition", "Start Offset", "End Offset", "Offset", "Lag")).withRows(topicSummary.getOffsets().stream().map(offset -> ImmutableList.of(String.valueOf(offset.getPartition()), String.valueOf(offset.getLogStartOffset()), String.valueOf(offset.getLogEndOffset()), String.valueOf(offset.getConsumerOffset()), String.valueOf(offset.getLogEndOffset() - offset.getConsumerOffset())))).build();
                taskTable.print(this);
            }
        }
    }
}
Also used : Table(io.confluent.ksql.cli.console.table.Table) Builder(io.confluent.ksql.cli.console.table.Table.Builder) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) QueryTopicOffsetSummary(io.confluent.ksql.rest.entity.QueryTopicOffsetSummary)

Aggregations

QueryOffsetSummary (io.confluent.ksql.rest.entity.QueryOffsetSummary)4 QueryTopicOffsetSummary (io.confluent.ksql.rest.entity.QueryTopicOffsetSummary)3 RunningQuery (io.confluent.ksql.rest.entity.RunningQuery)3 QueryId (io.confluent.ksql.query.QueryId)2 TopicDescription (org.apache.kafka.clients.admin.TopicDescription)2 ImmutableSet (com.google.common.collect.ImmutableSet)1 Table (io.confluent.ksql.cli.console.table.Table)1 Builder (io.confluent.ksql.cli.console.table.Table.Builder)1 KafkaResponseGetFailedException (io.confluent.ksql.exception.KafkaResponseGetFailedException)1 DataSource (io.confluent.ksql.metastore.model.DataSource)1 ConsumerPartitionOffsets (io.confluent.ksql.rest.entity.ConsumerPartitionOffsets)1 KsqlEntityList (io.confluent.ksql.rest.entity.KsqlEntityList)1 KsqlWarning (io.confluent.ksql.rest.entity.KsqlWarning)1 SourceDescription (io.confluent.ksql.rest.entity.SourceDescription)1 SourceDescriptionEntity (io.confluent.ksql.rest.entity.SourceDescriptionEntity)1 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedList (java.util.LinkedList)1