use of io.confluent.ksql.rest.entity.QueryTopicOffsetSummary in project ksql by confluentinc.
the class ListSourceExecutor method queryOffsetSummaries.
private static List<QueryOffsetSummary> queryOffsetSummaries(final KsqlConfig ksqlConfig, final ServiceContext serviceContext, final List<RunningQuery> writeQueries) {
final Map<String, Map<TopicPartition, OffsetAndMetadata>> offsetsPerQuery = new HashMap<>(writeQueries.size());
final Map<String, Set<String>> topicsPerQuery = new HashMap<>();
final Set<String> allTopics = new HashSet<>();
// Get topics and offsets per running query
for (RunningQuery query : writeQueries) {
final QueryId queryId = query.getId();
final String applicationId = QueryApplicationId.build(ksqlConfig, true, queryId);
final Map<TopicPartition, OffsetAndMetadata> topicAndConsumerOffsets = serviceContext.getConsumerGroupClient().listConsumerGroupOffsets(applicationId);
offsetsPerQuery.put(applicationId, topicAndConsumerOffsets);
final Set<String> topics = topicAndConsumerOffsets.keySet().stream().map(TopicPartition::topic).collect(Collectors.toSet());
topicsPerQuery.put(applicationId, topics);
allTopics.addAll(topics);
}
// Get topics descriptions and start/end offsets
final Map<String, TopicDescription> sourceTopicDescriptions = serviceContext.getTopicClient().describeTopics(allTopics);
final Map<TopicPartition, Long> topicAndStartOffsets = serviceContext.getTopicClient().listTopicsStartOffsets(allTopics);
final Map<TopicPartition, Long> topicAndEndOffsets = serviceContext.getTopicClient().listTopicsEndOffsets(allTopics);
// Build consumer offsets summary
final List<QueryOffsetSummary> offsetSummaries = new ArrayList<>();
for (Entry<String, Set<String>> entry : topicsPerQuery.entrySet()) {
final List<QueryTopicOffsetSummary> topicSummaries = new ArrayList<>();
for (String topic : entry.getValue()) {
topicSummaries.add(new QueryTopicOffsetSummary(topic, consumerPartitionOffsets(sourceTopicDescriptions.get(topic), topicAndStartOffsets, topicAndEndOffsets, offsetsPerQuery.get(entry.getKey()))));
}
offsetSummaries.add(new QueryOffsetSummary(entry.getKey(), topicSummaries));
}
return offsetSummaries;
}
use of io.confluent.ksql.rest.entity.QueryTopicOffsetSummary in project ksql by confluentinc.
the class ConsoleTest method shouldPrintTopicDescribeExtended.
@Test
public void shouldPrintTopicDescribeExtended() {
// Given:
final List<RunningQuery> readQueries = ImmutableList.of(new RunningQuery("read query", ImmutableSet.of("sink1"), ImmutableSet.of("sink1 topic"), new QueryId("readId"), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT));
final List<RunningQuery> writeQueries = ImmutableList.of(new RunningQuery("write query", ImmutableSet.of("sink2"), ImmutableSet.of("sink2 topic"), new QueryId("writeId"), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT));
final KsqlEntityList entityList = new KsqlEntityList(ImmutableList.of(new SourceDescriptionEntity("e", new SourceDescription("TestSource", Optional.empty(), readQueries, writeQueries, buildTestSchema(SqlTypes.STRING), DataSourceType.KTABLE.getKsqlType(), "2000-01-01", "stats", "errors", true, "json", "avro", "kafka-topic", 2, 1, "sql statement text", ImmutableList.of(new QueryOffsetSummary("consumer1", ImmutableList.of(new QueryTopicOffsetSummary("kafka-topic", ImmutableList.of(new ConsumerPartitionOffsets(0, 100, 900, 800), new ConsumerPartitionOffsets(1, 50, 900, 900))), new QueryTopicOffsetSummary("kafka-topic-2", ImmutableList.of(new ConsumerPartitionOffsets(0, 0, 90, 80), new ConsumerPartitionOffsets(1, 10, 90, 90))))), new QueryOffsetSummary("consumer2", ImmutableList.of())), ImmutableList.of("S1", "S2")), Collections.emptyList())));
// When:
console.printKsqlEntityList(entityList);
// Then:
final String output = terminal.getOutputString();
Approvals.verify(output, approvalOptions);
}
use of io.confluent.ksql.rest.entity.QueryTopicOffsetSummary in project ksql by confluentinc.
the class Console method printSourceDescription.
private void printSourceDescription(final SourceDescription source) {
final boolean isTable = source.getType().equalsIgnoreCase("TABLE");
writer().println(String.format("%-20s : %s", "Name", source.getName()));
if (!source.isExtended()) {
printSchema(source.getWindowType(), source.getFields(), isTable);
writer().println("For runtime statistics and query details run: DESCRIBE <Stream,Table> EXTENDED;");
return;
}
writer().println(String.format("%-20s : %s", "Type", source.getType()));
printTopicInfo(source);
writer().println(String.format("%-20s : %s", "Statement", source.getStatement()));
writer().println("");
printSchema(source.getWindowType(), source.getFields(), isTable);
printSourceConstraints(source.getSourceConstraints());
printQueries(source.getReadQueries(), source.getType(), "read");
printQueries(source.getWriteQueries(), source.getType(), "write");
printStatistics(source);
writer().println(String.format("(%s)", "Statistics of the local KSQL server interaction with the Kafka topic " + source.getTopic()));
if (!source.getQueryOffsetSummaries().isEmpty()) {
writer().println();
writer().println("Consumer Groups summary:");
for (QueryOffsetSummary entry : source.getQueryOffsetSummaries()) {
writer().println();
writer().println(String.format("%-20s : %s", "Consumer Group", entry.getGroupId()));
if (entry.getTopicSummaries().isEmpty()) {
writer().println("<no offsets committed by this group yet>");
}
for (QueryTopicOffsetSummary topicSummary : entry.getTopicSummaries()) {
writer().println();
writer().println(String.format("%-20s : %s", "Kafka topic", topicSummary.getKafkaTopic()));
writer().println(String.format("%-20s : %s", "Max lag", topicSummary.getOffsets().stream().mapToLong(s -> s.getLogEndOffset() - s.getConsumerOffset()).max().orElse(0)));
writer().println("");
final Table taskTable = new Table.Builder().withColumnHeaders(ImmutableList.of("Partition", "Start Offset", "End Offset", "Offset", "Lag")).withRows(topicSummary.getOffsets().stream().map(offset -> ImmutableList.of(String.valueOf(offset.getPartition()), String.valueOf(offset.getLogStartOffset()), String.valueOf(offset.getLogEndOffset()), String.valueOf(offset.getConsumerOffset()), String.valueOf(offset.getLogEndOffset() - offset.getConsumerOffset())))).build();
taskTable.print(this);
}
}
}
}
Aggregations