Search in sources :

Example 6 with KsqlStream

use of io.confluent.ksql.metastore.KsqlStream in project ksql by confluentinc.

the class QueryEngine method getResultDatasource.

StructuredDataSource getResultDatasource(final Select select, final String name) {
    SchemaBuilder dataSource = SchemaBuilder.struct().name(name);
    for (SelectItem selectItem : select.getSelectItems()) {
        if (selectItem instanceof SingleColumn) {
            SingleColumn singleColumn = (SingleColumn) selectItem;
            String fieldName = singleColumn.getAlias().get();
            dataSource = dataSource.field(fieldName, Schema.BOOLEAN_SCHEMA);
        }
    }
    KsqlTopic ksqlTopic = new KsqlTopic(name, name, null);
    return new KsqlStream("QueryEngine-DDLCommand-Not-Needed", name, dataSource.schema(), null, null, ksqlTopic);
}
Also used : KsqlStream(io.confluent.ksql.metastore.KsqlStream) SelectItem(io.confluent.ksql.parser.tree.SelectItem) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SingleColumn(io.confluent.ksql.parser.tree.SingleColumn) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic)

Example 7 with KsqlStream

use of io.confluent.ksql.metastore.KsqlStream in project ksql by confluentinc.

the class Analyzer method analyzeNonStdOutSink.

private void analyzeNonStdOutSink() {
    List<Pair<StructuredDataSource, String>> fromDataSources = analysis.getFromDataSources();
    StructuredDataSource intoStructuredDataSource = analysis.getInto();
    String intoKafkaTopicName = analysis.getIntoKafkaTopicName();
    if (intoKafkaTopicName == null) {
        intoKafkaTopicName = intoStructuredDataSource.getName();
    }
    KsqlTopicSerDe intoTopicSerde = fromDataSources.get(0).getLeft().getKsqlTopic().getKsqlTopicSerDe();
    if (analysis.getIntoFormat() != null) {
        switch(analysis.getIntoFormat().toUpperCase()) {
            case DataSource.AVRO_SERDE_NAME:
                intoTopicSerde = new KsqlAvroTopicSerDe();
                break;
            case DataSource.JSON_SERDE_NAME:
                intoTopicSerde = new KsqlJsonTopicSerDe();
                break;
            case DataSource.DELIMITED_SERDE_NAME:
                intoTopicSerde = new KsqlDelimitedTopicSerDe();
                break;
            default:
                throw new KsqlException(String.format("Unsupported format: %s", analysis.getIntoFormat()));
        }
    } else {
        if (intoTopicSerde instanceof KsqlAvroTopicSerDe) {
            intoTopicSerde = new KsqlAvroTopicSerDe();
        }
    }
    KsqlTopic newIntoKsqlTopic = new KsqlTopic(intoKafkaTopicName, intoKafkaTopicName, intoTopicSerde);
    KsqlStream intoKsqlStream = new KsqlStream(sqlExpression, intoStructuredDataSource.getName(), null, null, null, newIntoKsqlTopic);
    analysis.setInto(intoKsqlStream);
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStream(io.confluent.ksql.metastore.KsqlStream) KsqlDelimitedTopicSerDe(io.confluent.ksql.serde.delimited.KsqlDelimitedTopicSerDe) KsqlAvroTopicSerDe(io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe) KsqlTopicSerDe(io.confluent.ksql.serde.KsqlTopicSerDe) KsqlJsonTopicSerDe(io.confluent.ksql.serde.json.KsqlJsonTopicSerDe) KsqlException(io.confluent.ksql.util.KsqlException) Pair(io.confluent.ksql.util.Pair) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic)

Example 8 with KsqlStream

use of io.confluent.ksql.metastore.KsqlStream in project ksql by confluentinc.

the class Analyzer method analyzeNonStdOutTable.

private StructuredDataSource analyzeNonStdOutTable(final Table node) {
    StructuredDataSource into = new KsqlStream(sqlExpression, node.getName().getSuffix(), null, null, null, null);
    setIntoProperties(into, node);
    return into;
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStream(io.confluent.ksql.metastore.KsqlStream)

Example 9 with KsqlStream

use of io.confluent.ksql.metastore.KsqlStream in project ksql by confluentinc.

the class CreateStreamCommand method run.

@Override
public DdlCommandResult run(MetaStore metaStore) {
    if (registerTopicCommand != null) {
        registerTopicCommand.run(metaStore);
    }
    checkMetaData(metaStore, sourceName, topicName);
    KsqlStream ksqlStream = new KsqlStream(sqlExpression, sourceName, schema, (keyColumnName.length() == 0) ? null : SchemaUtil.getFieldByName(schema, keyColumnName).orElse(null), (timestampColumnName.length() == 0) ? null : SchemaUtil.getFieldByName(schema, timestampColumnName).orElse(null), metaStore.getTopic(topicName));
    // TODO: Need to check if the topic exists.
    // Add the topic to the metastore
    metaStore.putSource(ksqlStream.cloneWithTimeKeyColumns());
    return new DdlCommandResult(true, "Stream created");
}
Also used : KsqlStream(io.confluent.ksql.metastore.KsqlStream)

Example 10 with KsqlStream

use of io.confluent.ksql.metastore.KsqlStream in project ksql by confluentinc.

the class PhysicalPlanBuilder method buildPlanForStructuredOutputNode.

private QueryMetadata buildPlanForStructuredOutputNode(String sqlExpression, final SchemaKStream schemaKStream, final KsqlStructuredDataOutputNode outputNode, final String serviceId, final String persistanceQueryPrefix, final String statement) {
    if (metaStore.getTopic(outputNode.getKafkaTopicName()) == null) {
        metaStore.putTopic(outputNode.getKsqlTopic());
    }
    StructuredDataSource sinkDataSource;
    if (schemaKStream instanceof SchemaKTable) {
        SchemaKTable schemaKTable = (SchemaKTable) schemaKStream;
        sinkDataSource = new KsqlTable(sqlExpression, outputNode.getId().toString(), outputNode.getSchema(), schemaKStream.getKeyField(), outputNode.getTimestampField(), outputNode.getKsqlTopic(), outputNode.getId().toString() + ksqlConfig.get(KsqlConfig.KSQL_TABLE_STATESTORE_NAME_SUFFIX_CONFIG), schemaKTable.isWindowed());
    } else {
        sinkDataSource = new KsqlStream(sqlExpression, outputNode.getId().toString(), outputNode.getSchema(), schemaKStream.getKeyField(), outputNode.getTimestampField(), outputNode.getKsqlTopic());
    }
    if (updateMetastore) {
        metaStore.putSource(sinkDataSource.cloneWithTimeKeyColumns());
    }
    final QueryId queryId = sinkDataSource.getPersistentQueryId();
    final String applicationId = serviceId + persistanceQueryPrefix + queryId;
    KafkaStreams streams = buildStreams(builder, applicationId, ksqlConfig, overriddenStreamsProperties);
    Topology topology = builder.build();
    return new PersistentQueryMetadata(statement, streams, outputNode, schemaKStream.getExecutionPlan(""), queryId, (schemaKStream instanceof SchemaKTable) ? DataSource.DataSourceType.KTABLE : DataSource.DataSourceType.KSTREAM, applicationId, kafkaTopicClient, outputNode.getSchema(), sinkDataSource.getKsqlTopic(), topology);
}
Also used : SchemaKTable(io.confluent.ksql.structured.SchemaKTable) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStream(io.confluent.ksql.metastore.KsqlStream) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KsqlTable(io.confluent.ksql.metastore.KsqlTable) QueryId(io.confluent.ksql.query.QueryId) Topology(org.apache.kafka.streams.Topology) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata)

Aggregations

KsqlStream (io.confluent.ksql.metastore.KsqlStream)11 StructuredDataSource (io.confluent.ksql.metastore.StructuredDataSource)6 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)5 KsqlTable (io.confluent.ksql.metastore.KsqlTable)3 KsqlJsonTopicSerDe (io.confluent.ksql.serde.json.KsqlJsonTopicSerDe)3 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)3 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2 Schema (org.apache.kafka.connect.data.Schema)2 AggregateAnalysis (io.confluent.ksql.analyzer.AggregateAnalysis)1 Analysis (io.confluent.ksql.analyzer.Analysis)1 QueryAnalyzer (io.confluent.ksql.analyzer.QueryAnalyzer)1 FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)1 MetaStore (io.confluent.ksql.metastore.MetaStore)1 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)1 ListStreams (io.confluent.ksql.parser.tree.ListStreams)1 LogicalPlanner (io.confluent.ksql.planner.LogicalPlanner)1 KsqlStructuredDataOutputNode (io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode)1 PlanNode (io.confluent.ksql.planner.plan.PlanNode)1 PlanNodeId (io.confluent.ksql.planner.plan.PlanNodeId)1