Search in sources :

Example 1 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class KsqlResource method describe.

private SourceDescription describe(String name, boolean extended) throws KsqlException {
    StructuredDataSource dataSource = ksqlEngine.getMetaStore().getSource(name);
    if (dataSource == null) {
        throw new KsqlException(String.format("Could not find STREAM/TABLE '%s' in the Metastore", name));
    }
    List<PersistentQueryMetadata> queries = ksqlEngine.getPersistentQueries().values().stream().filter(meta -> ((KsqlStructuredDataOutputNode) meta.getOutputNode()).getKafkaTopicName().equals(dataSource.getKsqlTopic().getTopicName())).collect(Collectors.toList());
    return new SourceDescription(dataSource, extended, dataSource.getKsqlTopic().getKsqlTopicSerDe().getSerDe().name(), "", "", getReadQueryIds(queries), getWriteQueryIds(queries), ksqlEngine.getTopicClient());
}
Also used : CreateTableCommand(io.confluent.ksql.ddl.commands.CreateTableCommand) Query(io.confluent.ksql.parser.tree.Query) CreateTableAsSelect(io.confluent.ksql.parser.tree.CreateTableAsSelect) Interval(org.antlr.v4.runtime.misc.Interval) StreamsList(io.confluent.ksql.rest.entity.StreamsList) Produces(javax.ws.rs.Produces) AbstractStreamCreateStatement(io.confluent.ksql.parser.tree.AbstractStreamCreateStatement) LoggerFactory(org.slf4j.LoggerFactory) Path(javax.ws.rs.Path) TimeoutException(java.util.concurrent.TimeoutException) KsqlEngine(io.confluent.ksql.KsqlEngine) KsqlParser(io.confluent.ksql.parser.KsqlParser) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MediaType(javax.ws.rs.core.MediaType) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic) CommandId(io.confluent.ksql.rest.server.computation.CommandId) Pair(io.confluent.ksql.util.Pair) Consumes(javax.ws.rs.Consumes) CreateTable(io.confluent.ksql.parser.tree.CreateTable) Explain(io.confluent.ksql.parser.tree.Explain) Map(java.util.Map) RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) KsqlStream(io.confluent.ksql.metastore.KsqlStream) QueryId(io.confluent.ksql.query.QueryId) DropTopic(io.confluent.ksql.parser.tree.DropTopic) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) DdlCommandResult(io.confluent.ksql.ddl.commands.DdlCommandResult) QueryMetadata(io.confluent.ksql.util.QueryMetadata) CommandStatus(io.confluent.ksql.rest.entity.CommandStatus) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) DropStream(io.confluent.ksql.parser.tree.DropStream) Collection(java.util.Collection) KsqlTable(io.confluent.ksql.metastore.KsqlTable) KafkaConsumerGroupClient(io.confluent.ksql.util.KafkaConsumerGroupClient) Collectors(java.util.stream.Collectors) ErrorMessageEntity(io.confluent.ksql.rest.entity.ErrorMessageEntity) List(java.util.List) Response(javax.ws.rs.core.Response) SetProperty(io.confluent.ksql.parser.tree.SetProperty) TopicDescription(io.confluent.ksql.rest.entity.TopicDescription) KafkaConsumerGroupClientImpl(io.confluent.ksql.util.KafkaConsumerGroupClientImpl) TerminateQuery(io.confluent.ksql.parser.tree.TerminateQuery) DdlCommandExec(io.confluent.ksql.ddl.commands.DdlCommandExec) DdlCommand(io.confluent.ksql.ddl.commands.DdlCommand) KsqlException(io.confluent.ksql.util.KsqlException) Statement(io.confluent.ksql.parser.tree.Statement) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) DdlStatement(io.confluent.ksql.parser.tree.DdlStatement) ListTopics(io.confluent.ksql.parser.tree.ListTopics) KsqlRestApplication(io.confluent.ksql.rest.server.KsqlRestApplication) HashMap(java.util.HashMap) KsqlEntityList(io.confluent.ksql.rest.entity.KsqlEntityList) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) CreateStreamAsSelect(io.confluent.ksql.parser.tree.CreateStreamAsSelect) ListProperties(io.confluent.ksql.parser.tree.ListProperties) ArrayList(java.util.ArrayList) KsqlEntity(io.confluent.ksql.rest.entity.KsqlEntity) CreateStream(io.confluent.ksql.parser.tree.CreateStream) CharStream(org.antlr.v4.runtime.CharStream) ShowColumns(io.confluent.ksql.parser.tree.ShowColumns) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) Queries(io.confluent.ksql.rest.entity.Queries) TablesList(io.confluent.ksql.rest.entity.TablesList) StatementExecutor(io.confluent.ksql.rest.server.computation.StatementExecutor) DropSourceCommand(io.confluent.ksql.ddl.commands.DropSourceCommand) ListTables(io.confluent.ksql.parser.tree.ListTables) DropTable(io.confluent.ksql.parser.tree.DropTable) POST(javax.ws.rs.POST) ListStreams(io.confluent.ksql.parser.tree.ListStreams) RunScript(io.confluent.ksql.parser.tree.RunScript) ListQueries(io.confluent.ksql.parser.tree.ListQueries) CommandStatusEntity(io.confluent.ksql.rest.entity.CommandStatusEntity) ListRegisteredTopics(io.confluent.ksql.parser.tree.ListRegisteredTopics) DropTopicCommand(io.confluent.ksql.ddl.commands.DropTopicCommand) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) CommandStore(io.confluent.ksql.rest.server.computation.CommandStore) DataSource(io.confluent.ksql.serde.DataSource) TimeUnit(java.util.concurrent.TimeUnit) KsqlTopicsList(io.confluent.ksql.rest.entity.KsqlTopicsList) CreateAsSelect(io.confluent.ksql.parser.tree.CreateAsSelect) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) AvroUtil(io.confluent.ksql.util.AvroUtil) Collections(java.util.Collections) SqlBaseParser(io.confluent.ksql.parser.SqlBaseParser) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlException(io.confluent.ksql.util.KsqlException) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription)

Example 2 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class AstBuilder method getResultDatasource.

private StructuredDataSource getResultDatasource(Select select, Table into) {
    SchemaBuilder dataSource = SchemaBuilder.struct().name(into.toString());
    for (SelectItem selectItem : select.getSelectItems()) {
        if (selectItem instanceof SingleColumn) {
            SingleColumn singleColumn = (SingleColumn) selectItem;
            String fieldName = singleColumn.getAlias().get();
            dataSource = dataSource.field(fieldName, Schema.BOOLEAN_SCHEMA);
        }
    }
    KsqlTopic ksqlTopic = new KsqlTopic(into.getName().toString(), into.getName().toString(), null);
    StructuredDataSource resultStream = new KsqlStream("AstBuilder-Into", into.getName().toString(), dataSource.schema(), dataSource.fields().get(0), null, ksqlTopic);
    return resultStream;
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStream(io.confluent.ksql.metastore.KsqlStream) SelectItem(io.confluent.ksql.parser.tree.SelectItem) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SingleColumn(io.confluent.ksql.parser.tree.SingleColumn) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic)

Example 3 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class AstBuilder method getSelectStartItems.

private List<SelectItem> getSelectStartItems(final SelectItem selectItem, final Relation from) {
    List<SelectItem> selectItems = new ArrayList<>();
    AllColumns allColumns = (AllColumns) selectItem;
    if (from instanceof Join) {
        Join join = (Join) from;
        AliasedRelation left = (AliasedRelation) join.getLeft();
        StructuredDataSource leftDataSource = dataSourceExtractor.getMetaStore().getSource(left.getRelation().toString());
        if (leftDataSource == null) {
            throw new InvalidColumnReferenceException(left.getRelation().toString() + " does not exist.");
        }
        AliasedRelation right = (AliasedRelation) join.getRight();
        StructuredDataSource rightDataSource = dataSourceExtractor.getMetaStore().getSource(right.getRelation().toString());
        if (rightDataSource == null) {
            throw new InvalidColumnReferenceException(right.getRelation().toString() + " does not exist.");
        }
        for (Field field : leftDataSource.getSchema().fields()) {
            QualifiedNameReference qualifiedNameReference = new QualifiedNameReference(allColumns.getLocation().get(), QualifiedName.of(left.getAlias() + "." + field.name()));
            SingleColumn newSelectItem = new SingleColumn(qualifiedNameReference, left.getAlias() + "_" + field.name());
            selectItems.add(newSelectItem);
        }
        for (Field field : rightDataSource.getSchema().fields()) {
            QualifiedNameReference qualifiedNameReference = new QualifiedNameReference(allColumns.getLocation().get(), QualifiedName.of(right.getAlias() + "." + field.name()));
            SingleColumn newSelectItem = new SingleColumn(qualifiedNameReference, right.getAlias() + "_" + field.name());
            selectItems.add(newSelectItem);
        }
    } else {
        AliasedRelation fromRel = (AliasedRelation) from;
        StructuredDataSource fromDataSource = dataSourceExtractor.getMetaStore().getSource(((Table) fromRel.getRelation()).getName().getSuffix());
        if (fromDataSource == null) {
            throw new InvalidColumnReferenceException(((Table) fromRel.getRelation()).getName().getSuffix() + " does not exist.");
        }
        for (Field field : fromDataSource.getSchema().fields()) {
            QualifiedNameReference qualifiedNameReference = new QualifiedNameReference(allColumns.getLocation().get(), QualifiedName.of(fromDataSource.getName() + "." + field.name()));
            SingleColumn newSelectItem = new SingleColumn(qualifiedNameReference, field.name());
            selectItems.add(newSelectItem);
        }
    }
    return selectItems;
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) Field(org.apache.kafka.connect.data.Field) DropTable(io.confluent.ksql.parser.tree.DropTable) Table(io.confluent.ksql.parser.tree.Table) CreateTable(io.confluent.ksql.parser.tree.CreateTable) SelectItem(io.confluent.ksql.parser.tree.SelectItem) ArrayList(java.util.ArrayList) Join(io.confluent.ksql.parser.tree.Join) NaturalJoin(io.confluent.ksql.parser.tree.NaturalJoin) AllColumns(io.confluent.ksql.parser.tree.AllColumns) SingleColumn(io.confluent.ksql.parser.tree.SingleColumn) QualifiedNameReference(io.confluent.ksql.parser.tree.QualifiedNameReference) AliasedRelation(io.confluent.ksql.parser.tree.AliasedRelation)

Example 4 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class DataSourceExtractor method visitAliasedRelation.

@Override
public Node visitAliasedRelation(final SqlBaseParser.AliasedRelationContext context) {
    Table table = (Table) visit(context.relationPrimary());
    String alias = null;
    if (context.children.size() == 1) {
        alias = table.getName().getSuffix().toUpperCase();
    } else if (context.children.size() == 2) {
        alias = context.children.get(1).getText().toUpperCase();
    }
    if (!isJoin) {
        this.fromAlias = alias;
        StructuredDataSource fromDataSource = metaStore.getSource(table.getName().getSuffix());
        if (fromDataSource == null) {
            throw new KsqlException(table.getName().getSuffix() + " does not exist.");
        }
        return null;
    }
    // TODO: Figure out if the call to toUpperCase() here is really necessary
    return new AliasedRelation(getLocation(context), table, alias.toUpperCase(), getColumnAliases(context.columnAliases()));
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) Table(io.confluent.ksql.parser.tree.Table) AliasedRelation(io.confluent.ksql.parser.tree.AliasedRelation)

Example 5 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class Analyzer method visitJoin.

@Override
protected Node visitJoin(final Join node, final AnalysisContext context) {
    AliasedRelation left = (AliasedRelation) process(node.getLeft(), context);
    AliasedRelation right = (AliasedRelation) process(node.getRight(), context);
    String leftSideName = ((Table) left.getRelation()).getName().getSuffix();
    StructuredDataSource leftDataSource = metaStore.getSource(leftSideName);
    if (leftDataSource == null) {
        throw new KsqlException(format("Resource %s does not exist.", leftSideName));
    }
    leftDataSource = timestampColumn(left, leftDataSource);
    String rightSideName = ((Table) right.getRelation()).getName().getSuffix();
    StructuredDataSource rightDataSource = metaStore.getSource(rightSideName);
    if (rightDataSource == null) {
        throw new KsqlException(format("Resource %s does not exist.", rightSideName));
    }
    rightDataSource = timestampColumn(right, rightDataSource);
    String leftAlias = left.getAlias();
    String rightAlias = right.getAlias();
    JoinNode.Type joinType = getJoinType(node);
    if (!node.getCriteria().isPresent()) {
        throw new KsqlException(String.format("%s Join criteria is not set.", node.getLocation().isPresent() ? node.getLocation().get().toString() : ""));
    }
    JoinOn joinOn = (JoinOn) (node.getCriteria().get());
    ComparisonExpression comparisonExpression = (ComparisonExpression) joinOn.getExpression();
    Pair<String, String> leftSide = fetchKeyFieldName(comparisonExpression, leftAlias, leftDataSource.getSchema());
    Pair<String, String> rightSide = fetchKeyFieldName(comparisonExpression, rightAlias, rightDataSource.getSchema());
    String leftKeyFieldName = leftSide.getRight();
    String rightKeyFieldName = rightSide.getRight();
    if (comparisonExpression.getType() != ComparisonExpression.Type.EQUAL) {
        throw new KsqlException("Only equality join criteria is supported.");
    }
    StructuredDataSourceNode leftSourceKafkaTopicNode = new StructuredDataSourceNode(new PlanNodeId("KafkaTopic_Left"), leftDataSource, leftDataSource.getSchema());
    StructuredDataSourceNode rightSourceKafkaTopicNode = new StructuredDataSourceNode(new PlanNodeId("KafkaTopic_Right"), rightDataSource, rightDataSource.getSchema());
    JoinNode joinNode = new JoinNode(new PlanNodeId("Join"), joinType, leftSourceKafkaTopicNode, rightSourceKafkaTopicNode, leftKeyFieldName, rightKeyFieldName, leftAlias, rightAlias);
    analysis.setJoin(joinNode);
    return null;
}
Also used : PlanNodeId(io.confluent.ksql.planner.plan.PlanNodeId) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) ComparisonExpression(io.confluent.ksql.parser.tree.ComparisonExpression) JoinNode(io.confluent.ksql.planner.plan.JoinNode) StructuredDataSourceNode(io.confluent.ksql.planner.plan.StructuredDataSourceNode) KsqlException(io.confluent.ksql.util.KsqlException) JoinOn(io.confluent.ksql.parser.tree.JoinOn) AliasedRelation(io.confluent.ksql.parser.tree.AliasedRelation)

Aggregations

StructuredDataSource (io.confluent.ksql.metastore.StructuredDataSource)17 KsqlStream (io.confluent.ksql.metastore.KsqlStream)8 KsqlException (io.confluent.ksql.util.KsqlException)7 AliasedRelation (io.confluent.ksql.parser.tree.AliasedRelation)4 Table (io.confluent.ksql.parser.tree.Table)4 KsqlTable (io.confluent.ksql.metastore.KsqlTable)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)3 StructuredDataSourceNode (io.confluent.ksql.planner.plan.StructuredDataSourceNode)3 Test (org.junit.Test)3 CreateTable (io.confluent.ksql.parser.tree.CreateTable)2 DropTable (io.confluent.ksql.parser.tree.DropTable)2 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2 PlanNode (io.confluent.ksql.planner.plan.PlanNode)2 PlanNodeId (io.confluent.ksql.planner.plan.PlanNodeId)2 SchemaKTable (io.confluent.ksql.structured.SchemaKTable)2 Pair (io.confluent.ksql.util.Pair)2 ArrayList (java.util.ArrayList)2 Topology (org.apache.kafka.streams.Topology)2 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)1