Search in sources :

Example 1 with SourceDescription

use of io.confluent.ksql.rest.entity.SourceDescription in project ksql by confluentinc.

the class Console method printAsTable.

private void printAsTable(KsqlEntity ksqlEntity) {
    List<String> header = new ArrayList<>();
    List<String> footer = new ArrayList<>();
    List<String> columnHeaders = new ArrayList<>();
    List<List<String>> rowValues = new ArrayList<>();
    if (ksqlEntity instanceof CommandStatusEntity) {
        CommandStatusEntity commandStatusEntity = (CommandStatusEntity) ksqlEntity;
        columnHeaders = Arrays.asList("Message");
        CommandStatus commandStatus = commandStatusEntity.getCommandStatus();
        rowValues = Collections.singletonList(Arrays.asList(commandStatus.getMessage().split("\n", 2)[0]));
    } else if (ksqlEntity instanceof ErrorMessageEntity) {
        ErrorMessage errorMessage = ((ErrorMessageEntity) ksqlEntity).getErrorMessage();
        printErrorMessage(errorMessage);
        return;
    } else if (ksqlEntity instanceof PropertiesList) {
        PropertiesList propertiesList = CliUtils.propertiesListWithOverrides((PropertiesList) ksqlEntity, restClient.getLocalProperties());
        Map<String, Object> properties = propertiesList.getProperties();
        columnHeaders = Arrays.asList("Property", "Value");
        rowValues = properties.entrySet().stream().map(propertyEntry -> Arrays.asList(propertyEntry.getKey(), Objects.toString(propertyEntry.getValue()))).collect(Collectors.toList());
    } else if (ksqlEntity instanceof Queries) {
        List<Queries.RunningQuery> runningQueries = ((Queries) ksqlEntity).getQueries();
        columnHeaders = Arrays.asList("Query ID", "Kafka Topic", "Query String");
        rowValues = runningQueries.stream().map(runningQuery -> Arrays.asList(runningQuery.getId().toString(), runningQuery.getKafkaTopic(), runningQuery.getQueryString())).collect(Collectors.toList());
        footer.add("For detailed information on a Query run: EXPLAIN <Query ID>;");
    } else if (ksqlEntity instanceof SourceDescription) {
        SourceDescription sourceDescription = (SourceDescription) ksqlEntity;
        List<SourceDescription.FieldSchemaInfo> fields = sourceDescription.getSchema();
        if (!fields.isEmpty()) {
            columnHeaders = Arrays.asList("Field", "Type");
            rowValues = fields.stream().map(field -> Arrays.asList(field.getName(), formatFieldType(field, sourceDescription.getKey()))).collect(Collectors.toList());
        }
        printExtendedInformation(header, footer, sourceDescription);
    } else if (ksqlEntity instanceof TopicDescription) {
        columnHeaders = new ArrayList<>();
        columnHeaders.add("Topic Name");
        columnHeaders.add("Kafka Topic");
        columnHeaders.add("Type");
        List<String> topicInfo = new ArrayList<>();
        TopicDescription topicDescription = (TopicDescription) ksqlEntity;
        topicInfo.add(topicDescription.getName());
        topicInfo.add(topicDescription.getKafkaTopic());
        topicInfo.add(topicDescription.getFormat());
        if (topicDescription.getFormat().equalsIgnoreCase("AVRO")) {
            columnHeaders.add("AvroSchema");
            topicInfo.add(topicDescription.getSchemaString());
        }
        rowValues = Arrays.asList(topicInfo);
    } else if (ksqlEntity instanceof StreamsList) {
        List<SourceInfo.Stream> streamInfos = ((StreamsList) ksqlEntity).getStreams();
        columnHeaders = Arrays.asList("Stream Name", "Kafka Topic", "Format");
        rowValues = streamInfos.stream().map(streamInfo -> Arrays.asList(streamInfo.getName(), streamInfo.getTopic(), streamInfo.getFormat())).collect(Collectors.toList());
    } else if (ksqlEntity instanceof TablesList) {
        List<SourceInfo.Table> tableInfos = ((TablesList) ksqlEntity).getTables();
        columnHeaders = Arrays.asList("Table Name", "Kafka Topic", "Format", "Windowed");
        rowValues = tableInfos.stream().map(tableInfo -> Arrays.asList(tableInfo.getName(), tableInfo.getTopic(), tableInfo.getFormat(), Boolean.toString(tableInfo.getIsWindowed()))).collect(Collectors.toList());
    } else if (ksqlEntity instanceof KsqlTopicsList) {
        List<KsqlTopicInfo> topicInfos = ((KsqlTopicsList) ksqlEntity).getTopics();
        columnHeaders = Arrays.asList("Ksql Topic", "Kafka Topic", "Format");
        rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), topicInfo.getKafkaTopic(), topicInfo.getFormat().name())).collect(Collectors.toList());
    } else if (ksqlEntity instanceof KafkaTopicsList) {
        List<KafkaTopicInfo> topicInfos = ((KafkaTopicsList) ksqlEntity).getTopics();
        columnHeaders = Arrays.asList("Kafka Topic", "Registered", "Partitions", "Partition Replicas", "Consumers", "Consumer Groups");
        rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), Boolean.toString(topicInfo.getRegistered()), Integer.toString(topicInfo.getReplicaInfo().size()), getTopicReplicaInfo(topicInfo.getReplicaInfo()), Integer.toString(topicInfo.getConsumerCount()), Integer.toString(topicInfo.getConsumerGroupCount()))).collect(Collectors.toList());
    } else if (ksqlEntity instanceof ExecutionPlan) {
        ExecutionPlan executionPlan = (ExecutionPlan) ksqlEntity;
        columnHeaders = Arrays.asList("Execution Plan");
        rowValues = Collections.singletonList(Arrays.asList(executionPlan.getExecutionPlan()));
    } else {
        throw new RuntimeException(String.format("Unexpected KsqlEntity class: '%s'", ksqlEntity.getClass().getCanonicalName()));
    }
    printTable(columnHeaders, rowValues, header, footer);
}
Also used : Arrays(java.util.Arrays) StreamsList(io.confluent.ksql.rest.entity.StreamsList) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) LoggerFactory(org.slf4j.LoggerFactory) KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) KsqlEntity(io.confluent.ksql.rest.entity.KsqlEntity) ErrorMessage(io.confluent.ksql.rest.entity.ErrorMessage) KsqlTopicInfo(io.confluent.ksql.rest.entity.KsqlTopicInfo) ServerInfo(io.confluent.ksql.rest.entity.ServerInfo) KsqlRestClient(io.confluent.ksql.rest.client.KsqlRestClient) InfoCmp(org.jline.utils.InfoCmp) CliUtils(io.confluent.ksql.util.CliUtils) Map(java.util.Map) ExecutionPlan(io.confluent.ksql.rest.entity.ExecutionPlan) Queries(io.confluent.ksql.rest.entity.Queries) TablesList(io.confluent.ksql.rest.entity.TablesList) SchemaMapper(io.confluent.ksql.rest.entity.SchemaMapper) Terminal(org.jline.terminal.Terminal) PrintWriter(java.io.PrintWriter) Logger(org.slf4j.Logger) CommandStatus(io.confluent.ksql.rest.entity.CommandStatus) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) CommandStatusEntity(io.confluent.ksql.rest.entity.CommandStatusEntity) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) KsqlTopicsList(io.confluent.ksql.rest.entity.KsqlTopicsList) SourceInfo(io.confluent.ksql.rest.entity.SourceInfo) ErrorMessageEntity(io.confluent.ksql.rest.entity.ErrorMessageEntity) List(java.util.List) TopicDescription(io.confluent.ksql.rest.entity.TopicDescription) StringUtil(io.confluent.ksql.util.StringUtil) EndOfFileException(org.jline.reader.EndOfFileException) GenericRow(io.confluent.ksql.GenericRow) Closeable(java.io.Closeable) KafkaTopicInfo(io.confluent.ksql.rest.entity.KafkaTopicInfo) Comparator(java.util.Comparator) Collections(java.util.Collections) Queries(io.confluent.ksql.rest.entity.Queries) SourceInfo(io.confluent.ksql.rest.entity.SourceInfo) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) ArrayList(java.util.ArrayList) CommandStatusEntity(io.confluent.ksql.rest.entity.CommandStatusEntity) ExecutionPlan(io.confluent.ksql.rest.entity.ExecutionPlan) StreamsList(io.confluent.ksql.rest.entity.StreamsList) KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) ArrayList(java.util.ArrayList) TablesList(io.confluent.ksql.rest.entity.TablesList) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) KsqlTopicsList(io.confluent.ksql.rest.entity.KsqlTopicsList) List(java.util.List) StreamsList(io.confluent.ksql.rest.entity.StreamsList) KsqlTopicInfo(io.confluent.ksql.rest.entity.KsqlTopicInfo) ErrorMessageEntity(io.confluent.ksql.rest.entity.ErrorMessageEntity) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) TablesList(io.confluent.ksql.rest.entity.TablesList) CommandStatus(io.confluent.ksql.rest.entity.CommandStatus) TopicDescription(io.confluent.ksql.rest.entity.TopicDescription) ErrorMessage(io.confluent.ksql.rest.entity.ErrorMessage) KsqlTopicsList(io.confluent.ksql.rest.entity.KsqlTopicsList) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription)

Example 2 with SourceDescription

use of io.confluent.ksql.rest.entity.SourceDescription in project ksql by confluentinc.

the class KsqlResource method describe.

private SourceDescription describe(String name, boolean extended) throws KsqlException {
    StructuredDataSource dataSource = ksqlEngine.getMetaStore().getSource(name);
    if (dataSource == null) {
        throw new KsqlException(String.format("Could not find STREAM/TABLE '%s' in the Metastore", name));
    }
    List<PersistentQueryMetadata> queries = ksqlEngine.getPersistentQueries().values().stream().filter(meta -> ((KsqlStructuredDataOutputNode) meta.getOutputNode()).getKafkaTopicName().equals(dataSource.getKsqlTopic().getTopicName())).collect(Collectors.toList());
    return new SourceDescription(dataSource, extended, dataSource.getKsqlTopic().getKsqlTopicSerDe().getSerDe().name(), "", "", getReadQueryIds(queries), getWriteQueryIds(queries), ksqlEngine.getTopicClient());
}
Also used : CreateTableCommand(io.confluent.ksql.ddl.commands.CreateTableCommand) Query(io.confluent.ksql.parser.tree.Query) CreateTableAsSelect(io.confluent.ksql.parser.tree.CreateTableAsSelect) Interval(org.antlr.v4.runtime.misc.Interval) StreamsList(io.confluent.ksql.rest.entity.StreamsList) Produces(javax.ws.rs.Produces) AbstractStreamCreateStatement(io.confluent.ksql.parser.tree.AbstractStreamCreateStatement) LoggerFactory(org.slf4j.LoggerFactory) Path(javax.ws.rs.Path) TimeoutException(java.util.concurrent.TimeoutException) KsqlEngine(io.confluent.ksql.KsqlEngine) KsqlParser(io.confluent.ksql.parser.KsqlParser) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MediaType(javax.ws.rs.core.MediaType) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic) CommandId(io.confluent.ksql.rest.server.computation.CommandId) Pair(io.confluent.ksql.util.Pair) Consumes(javax.ws.rs.Consumes) CreateTable(io.confluent.ksql.parser.tree.CreateTable) Explain(io.confluent.ksql.parser.tree.Explain) Map(java.util.Map) RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) KsqlStream(io.confluent.ksql.metastore.KsqlStream) QueryId(io.confluent.ksql.query.QueryId) DropTopic(io.confluent.ksql.parser.tree.DropTopic) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) DdlCommandResult(io.confluent.ksql.ddl.commands.DdlCommandResult) QueryMetadata(io.confluent.ksql.util.QueryMetadata) CommandStatus(io.confluent.ksql.rest.entity.CommandStatus) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) DropStream(io.confluent.ksql.parser.tree.DropStream) Collection(java.util.Collection) KsqlTable(io.confluent.ksql.metastore.KsqlTable) KafkaConsumerGroupClient(io.confluent.ksql.util.KafkaConsumerGroupClient) Collectors(java.util.stream.Collectors) ErrorMessageEntity(io.confluent.ksql.rest.entity.ErrorMessageEntity) List(java.util.List) Response(javax.ws.rs.core.Response) SetProperty(io.confluent.ksql.parser.tree.SetProperty) TopicDescription(io.confluent.ksql.rest.entity.TopicDescription) KafkaConsumerGroupClientImpl(io.confluent.ksql.util.KafkaConsumerGroupClientImpl) TerminateQuery(io.confluent.ksql.parser.tree.TerminateQuery) DdlCommandExec(io.confluent.ksql.ddl.commands.DdlCommandExec) DdlCommand(io.confluent.ksql.ddl.commands.DdlCommand) KsqlException(io.confluent.ksql.util.KsqlException) Statement(io.confluent.ksql.parser.tree.Statement) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) DdlStatement(io.confluent.ksql.parser.tree.DdlStatement) ListTopics(io.confluent.ksql.parser.tree.ListTopics) KsqlRestApplication(io.confluent.ksql.rest.server.KsqlRestApplication) HashMap(java.util.HashMap) KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) CreateStreamAsSelect(io.confluent.ksql.parser.tree.CreateStreamAsSelect) ListProperties(io.confluent.ksql.parser.tree.ListProperties) ArrayList(java.util.ArrayList) KsqlEntity(io.confluent.ksql.rest.entity.KsqlEntity) CreateStream(io.confluent.ksql.parser.tree.CreateStream) CharStream(org.antlr.v4.runtime.CharStream) ShowColumns(io.confluent.ksql.parser.tree.ShowColumns) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) Queries(io.confluent.ksql.rest.entity.Queries) TablesList(io.confluent.ksql.rest.entity.TablesList) StatementExecutor(io.confluent.ksql.rest.server.computation.StatementExecutor) DropSourceCommand(io.confluent.ksql.ddl.commands.DropSourceCommand) ListTables(io.confluent.ksql.parser.tree.ListTables) DropTable(io.confluent.ksql.parser.tree.DropTable) POST(javax.ws.rs.POST) ListStreams(io.confluent.ksql.parser.tree.ListStreams) RunScript(io.confluent.ksql.parser.tree.RunScript) ListQueries(io.confluent.ksql.parser.tree.ListQueries) CommandStatusEntity(io.confluent.ksql.rest.entity.CommandStatusEntity) ListRegisteredTopics(io.confluent.ksql.parser.tree.ListRegisteredTopics) DropTopicCommand(io.confluent.ksql.ddl.commands.DropTopicCommand) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) CommandStore(io.confluent.ksql.rest.server.computation.CommandStore) DataSource(io.confluent.ksql.serde.DataSource) TimeUnit(java.util.concurrent.TimeUnit) KsqlTopicsList(io.confluent.ksql.rest.entity.KsqlTopicsList) CreateAsSelect(io.confluent.ksql.parser.tree.CreateAsSelect) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) AvroUtil(io.confluent.ksql.util.AvroUtil) Collections(java.util.Collections) SqlBaseParser(io.confluent.ksql.parser.SqlBaseParser) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlException(io.confluent.ksql.util.KsqlException) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription)

Example 3 with SourceDescription

use of io.confluent.ksql.rest.entity.SourceDescription in project ksql by confluentinc.

the class KsqlResource method getStatementExecutionPlan.

private SourceDescription getStatementExecutionPlan(String queryId, Statement statement, String statementText, Map<String, Object> properties) throws KsqlException {
    if (queryId != null) {
        PersistentQueryMetadata metadata = ksqlEngine.getPersistentQueries().get(new QueryId(queryId));
        if (metadata == null) {
            throw new KsqlException(("Query with id:" + queryId + " does not exist, use SHOW QUERIES to view the full set of " + "queries."));
        }
        KsqlStructuredDataOutputNode outputNode = (KsqlStructuredDataOutputNode) metadata.getOutputNode();
        return new SourceDescription(outputNode, metadata.getStatementString(), metadata.getStatementString(), metadata.getTopologyDescription(), metadata.getExecutionPlan(), ksqlEngine.getTopicClient());
    }
    DdlCommandTask ddlCommandTask = ddlCommandTasks.get(statement.getClass());
    if (ddlCommandTask != null) {
        try {
            String executionPlan = ddlCommandTask.execute(statement, statementText, properties);
            return new SourceDescription("", "User-Evaluation", Collections.EMPTY_LIST, Collections.EMPTY_LIST, Collections.EMPTY_LIST, "QUERY", "", "", "", "", true, "", "", "", executionPlan, 0, 0);
        } catch (KsqlException ksqlException) {
            throw ksqlException;
        } catch (Throwable t) {
            throw new KsqlException("Cannot RUN execution plan for this statement, " + statement, t);
        }
    }
    throw new KsqlException("Cannot FIND execution plan for this statement:" + statement);
}
Also used : QueryId(io.confluent.ksql.query.QueryId) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) KsqlException(io.confluent.ksql.util.KsqlException) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription)

Example 4 with SourceDescription

use of io.confluent.ksql.rest.entity.SourceDescription in project ksql by confluentinc.

the class ConsoleTest method testPrintKSqlEntityList.

@Test
public void testPrintKSqlEntityList() throws IOException {
    Map<String, Object> properties = new HashMap<>();
    properties.put("k1", 1);
    properties.put("k2", "v2");
    properties.put("k3", true);
    List<Queries.RunningQuery> queries = new ArrayList<>();
    queries.add(new Queries.RunningQuery("select * from t1", "TestTopic", new QueryId("0")));
    for (int i = 0; i < 5; i++) {
        KsqlEntityList entityList = new KsqlEntityList(Arrays.asList(new CommandStatusEntity("e", "topic/1/create", "SUCCESS", "Success Message"), new ErrorMessageEntity("e", new FakeException()), new PropertiesList("e", properties), new Queries("e", queries), new SourceDescription("e", "TestSource", Collections.EMPTY_LIST, Collections.EMPTY_LIST, buildTestSchema(i), DataSource.DataSourceType.KTABLE.getKqlType(), "key", "2000-01-01", "stats", "errors", false, "avro", "kadka-topic", "topology", "executionPlan", 1, 1), new TopicDescription("e", "TestTopic", "TestKafkaTopic", "AVRO", "schemaString"), new StreamsList("e", Arrays.asList(new SourceInfo.Stream("TestStream", "TestTopic", "AVRO"))), new TablesList("e", Arrays.asList(new SourceInfo.Table("TestTable", "TestTopic", "JSON", false))), new KsqlTopicsList("e", Arrays.asList(new KsqlTopicInfo("TestTopic", "TestKafkaTopic", DataSource.DataSourceSerDe.JSON))), new KafkaTopicsList("e", Arrays.asList(new KafkaTopicInfo("TestKafkaTopic", true, ImmutableList.of(1), 1, 1))), new ExecutionPlan("Test Execution Plan")));
        terminal.printKsqlEntityList(entityList);
    }
}
Also used : KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) Queries(io.confluent.ksql.rest.entity.Queries) HashMap(java.util.HashMap) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) ArrayList(java.util.ArrayList) CommandStatusEntity(io.confluent.ksql.rest.entity.CommandStatusEntity) ExecutionPlan(io.confluent.ksql.rest.entity.ExecutionPlan) StreamsList(io.confluent.ksql.rest.entity.StreamsList) QueryId(io.confluent.ksql.query.QueryId) KsqlTopicInfo(io.confluent.ksql.rest.entity.KsqlTopicInfo) ErrorMessageEntity(io.confluent.ksql.rest.entity.ErrorMessageEntity) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) TablesList(io.confluent.ksql.rest.entity.TablesList) FakeException(io.confluent.ksql.FakeException) KafkaTopicInfo(io.confluent.ksql.rest.entity.KafkaTopicInfo) TopicDescription(io.confluent.ksql.rest.entity.TopicDescription) KsqlTopicsList(io.confluent.ksql.rest.entity.KsqlTopicsList) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) Test(org.junit.Test)

Example 5 with SourceDescription

use of io.confluent.ksql.rest.entity.SourceDescription in project ksql by confluentinc.

the class KsqlResourceTest method testDescribeStatement.

@Test
public void testDescribeStatement() throws Exception {
    KsqlResource testResource = TestKsqlResourceUtil.get(ksqlEngine, ksqlRestConfig);
    final String tableName = "TEST_TABLE";
    final String ksqlString = String.format("DESCRIBE %s;", tableName);
    final ShowColumns ksqlStatement = new ShowColumns(QualifiedName.of(tableName), false, false);
    SourceDescription testDescription = makeSingleRequest(testResource, ksqlString, ksqlStatement, Collections.emptyMap(), SourceDescription.class);
    SourceDescription expectedDescription = new SourceDescription(testResource.getKsqlEngine().getMetaStore().getSource(tableName), false, "serdes", "topo", "exec-plan", Collections.EMPTY_LIST, Collections.EMPTY_LIST, null);
    assertEquals(expectedDescription, testDescription);
}
Also used : ShowColumns(io.confluent.ksql.parser.tree.ShowColumns) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) Test(org.junit.Test)

Aggregations

SourceDescription (io.confluent.ksql.rest.entity.SourceDescription)5 QueryId (io.confluent.ksql.query.QueryId)3 CommandStatusEntity (io.confluent.ksql.rest.entity.CommandStatusEntity)3 ErrorMessageEntity (io.confluent.ksql.rest.entity.ErrorMessageEntity)3 KafkaTopicsList (io.confluent.ksql.rest.entity.KafkaTopicsList)3 KsqlEntityList (io.confluent.ksql.rest.entity.KsqlEntityList)3 KsqlTopicsList (io.confluent.ksql.rest.entity.KsqlTopicsList)3 PropertiesList (io.confluent.ksql.rest.entity.PropertiesList)3 Queries (io.confluent.ksql.rest.entity.Queries)3 StreamsList (io.confluent.ksql.rest.entity.StreamsList)3 TablesList (io.confluent.ksql.rest.entity.TablesList)3 TopicDescription (io.confluent.ksql.rest.entity.TopicDescription)3 ArrayList (java.util.ArrayList)3 KsqlStructuredDataOutputNode (io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode)2 CommandStatus (io.confluent.ksql.rest.entity.CommandStatus)2 KsqlEntity (io.confluent.ksql.rest.entity.KsqlEntity)2 KsqlException (io.confluent.ksql.util.KsqlException)2 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)2 Collections (java.util.Collections)2 List (java.util.List)2