use of io.confluent.ksql.rest.entity.Queries in project ksql by confluentinc.
the class Console method printAsTable.
private void printAsTable(KsqlEntity ksqlEntity) {
List<String> header = new ArrayList<>();
List<String> footer = new ArrayList<>();
List<String> columnHeaders = new ArrayList<>();
List<List<String>> rowValues = new ArrayList<>();
if (ksqlEntity instanceof CommandStatusEntity) {
CommandStatusEntity commandStatusEntity = (CommandStatusEntity) ksqlEntity;
columnHeaders = Arrays.asList("Message");
CommandStatus commandStatus = commandStatusEntity.getCommandStatus();
rowValues = Collections.singletonList(Arrays.asList(commandStatus.getMessage().split("\n", 2)[0]));
} else if (ksqlEntity instanceof ErrorMessageEntity) {
ErrorMessage errorMessage = ((ErrorMessageEntity) ksqlEntity).getErrorMessage();
printErrorMessage(errorMessage);
return;
} else if (ksqlEntity instanceof PropertiesList) {
PropertiesList propertiesList = CliUtils.propertiesListWithOverrides((PropertiesList) ksqlEntity, restClient.getLocalProperties());
Map<String, Object> properties = propertiesList.getProperties();
columnHeaders = Arrays.asList("Property", "Value");
rowValues = properties.entrySet().stream().map(propertyEntry -> Arrays.asList(propertyEntry.getKey(), Objects.toString(propertyEntry.getValue()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof Queries) {
List<Queries.RunningQuery> runningQueries = ((Queries) ksqlEntity).getQueries();
columnHeaders = Arrays.asList("Query ID", "Kafka Topic", "Query String");
rowValues = runningQueries.stream().map(runningQuery -> Arrays.asList(runningQuery.getId().toString(), runningQuery.getKafkaTopic(), runningQuery.getQueryString())).collect(Collectors.toList());
footer.add("For detailed information on a Query run: EXPLAIN <Query ID>;");
} else if (ksqlEntity instanceof SourceDescription) {
SourceDescription sourceDescription = (SourceDescription) ksqlEntity;
List<SourceDescription.FieldSchemaInfo> fields = sourceDescription.getSchema();
if (!fields.isEmpty()) {
columnHeaders = Arrays.asList("Field", "Type");
rowValues = fields.stream().map(field -> Arrays.asList(field.getName(), formatFieldType(field, sourceDescription.getKey()))).collect(Collectors.toList());
}
printExtendedInformation(header, footer, sourceDescription);
} else if (ksqlEntity instanceof TopicDescription) {
columnHeaders = new ArrayList<>();
columnHeaders.add("Topic Name");
columnHeaders.add("Kafka Topic");
columnHeaders.add("Type");
List<String> topicInfo = new ArrayList<>();
TopicDescription topicDescription = (TopicDescription) ksqlEntity;
topicInfo.add(topicDescription.getName());
topicInfo.add(topicDescription.getKafkaTopic());
topicInfo.add(topicDescription.getFormat());
if (topicDescription.getFormat().equalsIgnoreCase("AVRO")) {
columnHeaders.add("AvroSchema");
topicInfo.add(topicDescription.getSchemaString());
}
rowValues = Arrays.asList(topicInfo);
} else if (ksqlEntity instanceof StreamsList) {
List<SourceInfo.Stream> streamInfos = ((StreamsList) ksqlEntity).getStreams();
columnHeaders = Arrays.asList("Stream Name", "Kafka Topic", "Format");
rowValues = streamInfos.stream().map(streamInfo -> Arrays.asList(streamInfo.getName(), streamInfo.getTopic(), streamInfo.getFormat())).collect(Collectors.toList());
} else if (ksqlEntity instanceof TablesList) {
List<SourceInfo.Table> tableInfos = ((TablesList) ksqlEntity).getTables();
columnHeaders = Arrays.asList("Table Name", "Kafka Topic", "Format", "Windowed");
rowValues = tableInfos.stream().map(tableInfo -> Arrays.asList(tableInfo.getName(), tableInfo.getTopic(), tableInfo.getFormat(), Boolean.toString(tableInfo.getIsWindowed()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof KsqlTopicsList) {
List<KsqlTopicInfo> topicInfos = ((KsqlTopicsList) ksqlEntity).getTopics();
columnHeaders = Arrays.asList("Ksql Topic", "Kafka Topic", "Format");
rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), topicInfo.getKafkaTopic(), topicInfo.getFormat().name())).collect(Collectors.toList());
} else if (ksqlEntity instanceof KafkaTopicsList) {
List<KafkaTopicInfo> topicInfos = ((KafkaTopicsList) ksqlEntity).getTopics();
columnHeaders = Arrays.asList("Kafka Topic", "Registered", "Partitions", "Partition Replicas", "Consumers", "Consumer Groups");
rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), Boolean.toString(topicInfo.getRegistered()), Integer.toString(topicInfo.getReplicaInfo().size()), getTopicReplicaInfo(topicInfo.getReplicaInfo()), Integer.toString(topicInfo.getConsumerCount()), Integer.toString(topicInfo.getConsumerGroupCount()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof ExecutionPlan) {
ExecutionPlan executionPlan = (ExecutionPlan) ksqlEntity;
columnHeaders = Arrays.asList("Execution Plan");
rowValues = Collections.singletonList(Arrays.asList(executionPlan.getExecutionPlan()));
} else {
throw new RuntimeException(String.format("Unexpected KsqlEntity class: '%s'", ksqlEntity.getClass().getCanonicalName()));
}
printTable(columnHeaders, rowValues, header, footer);
}
use of io.confluent.ksql.rest.entity.Queries in project ksql by confluentinc.
the class KsqlResource method describe.
private SourceDescription describe(String name, boolean extended) throws KsqlException {
StructuredDataSource dataSource = ksqlEngine.getMetaStore().getSource(name);
if (dataSource == null) {
throw new KsqlException(String.format("Could not find STREAM/TABLE '%s' in the Metastore", name));
}
List<PersistentQueryMetadata> queries = ksqlEngine.getPersistentQueries().values().stream().filter(meta -> ((KsqlStructuredDataOutputNode) meta.getOutputNode()).getKafkaTopicName().equals(dataSource.getKsqlTopic().getTopicName())).collect(Collectors.toList());
return new SourceDescription(dataSource, extended, dataSource.getKsqlTopic().getKsqlTopicSerDe().getSerDe().name(), "", "", getReadQueryIds(queries), getWriteQueryIds(queries), ksqlEngine.getTopicClient());
}
use of io.confluent.ksql.rest.entity.Queries in project ksql by confluentinc.
the class ConsoleTest method testPrintKSqlEntityList.
@Test
public void testPrintKSqlEntityList() throws IOException {
Map<String, Object> properties = new HashMap<>();
properties.put("k1", 1);
properties.put("k2", "v2");
properties.put("k3", true);
List<Queries.RunningQuery> queries = new ArrayList<>();
queries.add(new Queries.RunningQuery("select * from t1", "TestTopic", new QueryId("0")));
for (int i = 0; i < 5; i++) {
KsqlEntityList entityList = new KsqlEntityList(Arrays.asList(new CommandStatusEntity("e", "topic/1/create", "SUCCESS", "Success Message"), new ErrorMessageEntity("e", new FakeException()), new PropertiesList("e", properties), new Queries("e", queries), new SourceDescription("e", "TestSource", Collections.EMPTY_LIST, Collections.EMPTY_LIST, buildTestSchema(i), DataSource.DataSourceType.KTABLE.getKqlType(), "key", "2000-01-01", "stats", "errors", false, "avro", "kadka-topic", "topology", "executionPlan", 1, 1), new TopicDescription("e", "TestTopic", "TestKafkaTopic", "AVRO", "schemaString"), new StreamsList("e", Arrays.asList(new SourceInfo.Stream("TestStream", "TestTopic", "AVRO"))), new TablesList("e", Arrays.asList(new SourceInfo.Table("TestTable", "TestTopic", "JSON", false))), new KsqlTopicsList("e", Arrays.asList(new KsqlTopicInfo("TestTopic", "TestKafkaTopic", DataSource.DataSourceSerDe.JSON))), new KafkaTopicsList("e", Arrays.asList(new KafkaTopicInfo("TestKafkaTopic", true, ImmutableList.of(1), 1, 1))), new ExecutionPlan("Test Execution Plan")));
terminal.printKsqlEntityList(entityList);
}
}
use of io.confluent.ksql.rest.entity.Queries in project ksql by confluentinc.
the class KsqlResourceTest method testShowQueries.
@Test
public void testShowQueries() throws Exception {
KsqlResource testResource = TestKsqlResourceUtil.get(ksqlEngine, ksqlRestConfig);
final String ksqlString = "SHOW QUERIES;";
final ListQueries ksqlStatement = new ListQueries(Optional.empty());
final String testKafkaTopic = "lol";
final String testQueryStatement = String.format("CREATE STREAM %s AS SELECT * FROM test_stream WHERE S2_F2 > 69;", testKafkaTopic);
Queries queries = makeSingleRequest(testResource, ksqlString, ksqlStatement, Collections.emptyMap(), Queries.class);
List<Queries.RunningQuery> testQueries = queries.getQueries();
assertEquals(0, testQueries.size());
}
use of io.confluent.ksql.rest.entity.Queries in project ksql by confluentinc.
the class KsqlResource method showQueries.
// Only shows queries running on the current machine, not across the entire cluster
private Queries showQueries(String statementText) {
List<Queries.RunningQuery> runningQueries = new ArrayList<>();
for (PersistentQueryMetadata persistentQueryMetadata : ksqlEngine.getPersistentQueries().values()) {
KsqlStructuredDataOutputNode ksqlStructuredDataOutputNode = (KsqlStructuredDataOutputNode) persistentQueryMetadata.getOutputNode();
runningQueries.add(new Queries.RunningQuery(persistentQueryMetadata.getStatementString(), ksqlStructuredDataOutputNode.getKafkaTopicName(), persistentQueryMetadata.getId()));
}
return new Queries(statementText, runningQueries);
}
Aggregations