Search in sources :

Example 11 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class Analyzer method analyzeNonStdOutSink.

private void analyzeNonStdOutSink() {
    List<Pair<StructuredDataSource, String>> fromDataSources = analysis.getFromDataSources();
    StructuredDataSource intoStructuredDataSource = analysis.getInto();
    String intoKafkaTopicName = analysis.getIntoKafkaTopicName();
    if (intoKafkaTopicName == null) {
        intoKafkaTopicName = intoStructuredDataSource.getName();
    }
    KsqlTopicSerDe intoTopicSerde = fromDataSources.get(0).getLeft().getKsqlTopic().getKsqlTopicSerDe();
    if (analysis.getIntoFormat() != null) {
        switch(analysis.getIntoFormat().toUpperCase()) {
            case DataSource.AVRO_SERDE_NAME:
                intoTopicSerde = new KsqlAvroTopicSerDe();
                break;
            case DataSource.JSON_SERDE_NAME:
                intoTopicSerde = new KsqlJsonTopicSerDe();
                break;
            case DataSource.DELIMITED_SERDE_NAME:
                intoTopicSerde = new KsqlDelimitedTopicSerDe();
                break;
            default:
                throw new KsqlException(String.format("Unsupported format: %s", analysis.getIntoFormat()));
        }
    } else {
        if (intoTopicSerde instanceof KsqlAvroTopicSerDe) {
            intoTopicSerde = new KsqlAvroTopicSerDe();
        }
    }
    KsqlTopic newIntoKsqlTopic = new KsqlTopic(intoKafkaTopicName, intoKafkaTopicName, intoTopicSerde);
    KsqlStream intoKsqlStream = new KsqlStream(sqlExpression, intoStructuredDataSource.getName(), null, null, null, newIntoKsqlTopic);
    analysis.setInto(intoKsqlStream);
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStream(io.confluent.ksql.metastore.KsqlStream) KsqlDelimitedTopicSerDe(io.confluent.ksql.serde.delimited.KsqlDelimitedTopicSerDe) KsqlAvroTopicSerDe(io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe) KsqlTopicSerDe(io.confluent.ksql.serde.KsqlTopicSerDe) KsqlJsonTopicSerDe(io.confluent.ksql.serde.json.KsqlJsonTopicSerDe) KsqlException(io.confluent.ksql.util.KsqlException) Pair(io.confluent.ksql.util.Pair) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic)

Example 12 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class Analyzer method visitTable.

@Override
protected Node visitTable(final Table node, final AnalysisContext context) {
    StructuredDataSource into;
    if (node.isStdOut) {
        into = new KsqlStdOut(KsqlStdOut.KSQL_STDOUT_NAME, null, null, null, StructuredDataSource.DataSourceType.KSTREAM);
    } else if (context.getParentType() == AnalysisContext.ParentType.INTO) {
        into = analyzeNonStdOutTable(node);
    } else {
        throw new KsqlException("INTO clause is not set correctly!");
    }
    analysis.setInto(into);
    return null;
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStdOut(io.confluent.ksql.metastore.KsqlStdOut) KsqlException(io.confluent.ksql.util.KsqlException)

Example 13 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class Analyzer method analyzeNonStdOutTable.

private StructuredDataSource analyzeNonStdOutTable(final Table node) {
    StructuredDataSource into = new KsqlStream(sqlExpression, node.getName().getSuffix(), null, null, null, null);
    setIntoProperties(into, node);
    return into;
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlStream(io.confluent.ksql.metastore.KsqlStream)

Example 14 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class Analyzer method visitAliasedRelation.

@Override
protected Node visitAliasedRelation(AliasedRelation node, AnalysisContext context) {
    String structuredDataSourceName = ((Table) node.getRelation()).getName().getSuffix();
    if (metaStore.getSource(structuredDataSourceName) == null) {
        throw new KsqlException(structuredDataSourceName + " does not exist.");
    }
    StructuredDataSource structuredDataSource = metaStore.getSource(structuredDataSourceName);
    if (((Table) node.getRelation()).getProperties() != null) {
        if (((Table) node.getRelation()).getProperties().get(DdlConfig.TIMESTAMP_NAME_PROPERTY) != null) {
            String timestampFieldName = ((Table) node.getRelation()).getProperties().get(DdlConfig.TIMESTAMP_NAME_PROPERTY).toString().toUpperCase();
            if (!timestampFieldName.startsWith("'") && !timestampFieldName.endsWith("'")) {
                throw new KsqlException("Property name should be String with single qoute.");
            }
            timestampFieldName = timestampFieldName.substring(1, timestampFieldName.length() - 1);
            structuredDataSource = structuredDataSource.cloneWithTimeField(timestampFieldName);
        }
    }
    Pair<StructuredDataSource, String> fromDataSource = new Pair<>(structuredDataSource, node.getAlias());
    analysis.addDataSource(fromDataSource);
    return node;
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) Table(io.confluent.ksql.parser.tree.Table) KsqlException(io.confluent.ksql.util.KsqlException) Pair(io.confluent.ksql.util.Pair)

Example 15 with StructuredDataSource

use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.

the class DropSourceCommand method run.

@Override
public DdlCommandResult run(MetaStore metaStore) {
    StructuredDataSource dataSource = metaStore.getSource(sourceName);
    if (dataSource == null) {
        throw new KsqlException("Source " + sourceName + " does not exist.");
    }
    if (dataSource.getDataSourceType() != dataSourceType) {
        throw new KsqlException(String.format("Incompatible data source type is %s, but statement was DROP %s", dataSource.getDataSourceType() == DataSource.DataSourceType.KSTREAM ? "STREAM" : "TABLE", dataSourceType == DataSource.DataSourceType.KSTREAM ? "STREAM" : "TABLE"));
    }
    DropTopicCommand dropTopicCommand = new DropTopicCommand(dataSource.getKsqlTopic().getTopicName());
    dropTopicCommand.run(metaStore);
    metaStore.deleteSource(sourceName);
    queryTerminator.terminateQueryForEntity(sourceName);
    return new DdlCommandResult(true, "Source " + sourceName + " was dropped");
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) KsqlException(io.confluent.ksql.util.KsqlException)

Aggregations

StructuredDataSource (io.confluent.ksql.metastore.StructuredDataSource)17 KsqlStream (io.confluent.ksql.metastore.KsqlStream)8 KsqlException (io.confluent.ksql.util.KsqlException)7 AliasedRelation (io.confluent.ksql.parser.tree.AliasedRelation)4 Table (io.confluent.ksql.parser.tree.Table)4 KsqlTable (io.confluent.ksql.metastore.KsqlTable)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)3 StructuredDataSourceNode (io.confluent.ksql.planner.plan.StructuredDataSourceNode)3 Test (org.junit.Test)3 CreateTable (io.confluent.ksql.parser.tree.CreateTable)2 DropTable (io.confluent.ksql.parser.tree.DropTable)2 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2 PlanNode (io.confluent.ksql.planner.plan.PlanNode)2 PlanNodeId (io.confluent.ksql.planner.plan.PlanNodeId)2 SchemaKTable (io.confluent.ksql.structured.SchemaKTable)2 Pair (io.confluent.ksql.util.Pair)2 ArrayList (java.util.ArrayList)2 Topology (org.apache.kafka.streams.Topology)2 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)1