Search in sources :

Example 26 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class AnalysisTest method givenNoneWindowedSource.

private static void givenNoneWindowedSource(final KsqlStream<?> dataSource) {
    final KsqlTopic topic = mock(KsqlTopic.class);
    when(topic.getKeyFormat()).thenReturn(KeyFormat.nonWindowed(A_FORMAT, SerdeFeatures.of()));
    when(dataSource.getKsqlTopic()).thenReturn(topic);
}
Also used : KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic)

Example 27 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class AnalyzerFunctionalTest method registerKafkaSource.

private void registerKafkaSource() {
    final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(COL0, SqlTypes.BIGINT).build();
    final KsqlTopic topic = new KsqlTopic("ks", KeyFormat.nonWindowed(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of()), ValueFormat.of(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of()));
    final KsqlStream<?> stream = new KsqlStream<>("sqlexpression", SourceName.of("KAFKA_SOURCE"), schema, Optional.empty(), false, topic, false);
    jsonMetaStore.putSource(stream, false);
}
Also used : KsqlStream(io.confluent.ksql.metastore.model.KsqlStream) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic)

Example 28 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class AnalyzerFunctionalTest method shouldNotInheritNamespaceExplicitlySetUpstreamForAvro.

@Test
public void shouldNotInheritNamespaceExplicitlySetUpstreamForAvro() {
    final String simpleQuery = "create stream s1 as select * from S0;";
    final MutableMetaStore newAvroMetaStore = avroMetaStore.copy();
    final KsqlTopic ksqlTopic = new KsqlTopic("s0", KeyFormat.nonWindowed(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of()), ValueFormat.of(FormatInfo.of(FormatFactory.AVRO.name(), ImmutableMap.of(ConnectProperties.FULL_SCHEMA_NAME, "org.ac.s1")), SerdeFeatures.of()));
    final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("FIELD1"), SqlTypes.BIGINT).build();
    final KsqlStream<?> ksqlStream = new KsqlStream<>("create stream s0 with(KAFKA_TOPIC='s0', VALUE_AVRO_SCHEMA_FULL_NAME='org.ac.s1', VALUE_FORMAT='avro');", SourceName.of("S0"), schema, Optional.empty(), false, ksqlTopic, false);
    newAvroMetaStore.putSource(ksqlStream, false);
    final List<Statement> statements = parse(simpleQuery, newAvroMetaStore);
    final CreateStreamAsSelect createStreamAsSelect = (CreateStreamAsSelect) statements.get(0);
    final Query query = createStreamAsSelect.getQuery();
    final Analyzer analyzer = new Analyzer(newAvroMetaStore, "", ROWPARTITION_ROWOFFSET_ENABLED, PULL_LIMIT_CLAUSE_ENABLED);
    final Analysis analysis = analyzer.analyze(query, Optional.of(createStreamAsSelect.getSink()));
    assertThat(analysis.getInto(), is(not(Optional.empty())));
    assertThat(analysis.getInto().get().getNewTopic().get().getValueFormat(), is(FormatInfo.of(FormatFactory.AVRO.name())));
}
Also used : KsqlStream(io.confluent.ksql.metastore.model.KsqlStream) Query(io.confluent.ksql.parser.tree.Query) Statement(io.confluent.ksql.parser.tree.Statement) PreparedStatement(io.confluent.ksql.parser.KsqlParser.PreparedStatement) MutableMetaStore(io.confluent.ksql.metastore.MutableMetaStore) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Matchers.containsString(org.hamcrest.Matchers.containsString) CreateStreamAsSelect(io.confluent.ksql.parser.tree.CreateStreamAsSelect) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) Test(org.junit.Test)

Example 29 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class KsqlResourceTest method givenSource.

private void givenSource(final DataSourceType type, final String sourceName, final String topicName, final LogicalSchema schema, final Set<SourceName> sourceReferences) {
    final KsqlTopic ksqlTopic = new KsqlTopic(topicName, KeyFormat.nonWindowed(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of()), ValueFormat.of(FormatInfo.of(FormatFactory.JSON.name()), SerdeFeatures.of()));
    givenKafkaTopicExists(topicName);
    final DataSource source;
    switch(type) {
        case KSTREAM:
            source = new KsqlStream<>("statementText", SourceName.of(sourceName), schema, Optional.empty(), false, ksqlTopic, false);
            break;
        case KTABLE:
            source = new KsqlTable<>("statementText", SourceName.of(sourceName), schema, Optional.empty(), false, ksqlTopic, false);
            break;
        default:
            throw new IllegalArgumentException(type.toString());
    }
    metaStore.putSource(source, false);
    metaStore.addSourceReferences(source.getName(), sourceReferences);
}
Also used : KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 30 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class QueryBuilder method buildPersistentQueryInDedicatedRuntime.

@SuppressWarnings("ParameterNumber")
PersistentQueryMetadata buildPersistentQueryInDedicatedRuntime(final KsqlConfig ksqlConfig, final KsqlConstants.PersistentQueryType persistentQueryType, final String statementText, final QueryId queryId, final Optional<DataSource> sinkDataSource, final Set<DataSource> sources, final ExecutionStep<?> physicalPlan, final String planSummary, final QueryMetadata.Listener listener, final Supplier<List<PersistentQueryMetadata>> allPersistentQueries, final StreamsBuilder streamsBuilder, final MetricCollectors metricCollectors) {
    final String applicationId = QueryApplicationId.build(ksqlConfig, true, queryId);
    final Map<String, Object> streamsProperties = buildStreamsProperties(applicationId, Optional.of(queryId), metricCollectors, config.getConfig(true), processingLogContext);
    final LogicalSchema logicalSchema;
    final KeyFormat keyFormat;
    final ValueFormat valueFormat;
    final KsqlTopic ksqlTopic;
    switch(persistentQueryType) {
        // CREATE_SOURCE does not have a sink, so the schema is obtained from the query source
        case CREATE_SOURCE:
            final DataSource dataSource = Iterables.getOnlyElement(sources);
            logicalSchema = dataSource.getSchema();
            keyFormat = dataSource.getKsqlTopic().getKeyFormat();
            valueFormat = dataSource.getKsqlTopic().getValueFormat();
            ksqlTopic = dataSource.getKsqlTopic();
            break;
        default:
            logicalSchema = sinkDataSource.get().getSchema();
            keyFormat = sinkDataSource.get().getKsqlTopic().getKeyFormat();
            valueFormat = sinkDataSource.get().getKsqlTopic().getValueFormat();
            ksqlTopic = sinkDataSource.get().getKsqlTopic();
            break;
    }
    final PhysicalSchema querySchema = PhysicalSchema.from(logicalSchema, keyFormat.getFeatures(), valueFormat.getFeatures());
    final RuntimeBuildContext runtimeBuildContext = buildContext(applicationId, queryId, streamsBuilder);
    final Object result = buildQueryImplementation(physicalPlan, runtimeBuildContext);
    final Topology topology = streamsBuilder.build(PropertiesUtil.asProperties(streamsProperties));
    final Optional<MaterializationProviderBuilderFactory.MaterializationProviderBuilder> materializationProviderBuilder = getMaterializationInfo(result).map(info -> materializationProviderBuilderFactory.materializationProviderBuilder(info, querySchema, keyFormat, streamsProperties, applicationId, queryId.toString()));
    final Optional<ScalablePushRegistry> scalablePushRegistry = applyScalablePushProcessor(querySchema.logicalSchema(), result, allPersistentQueries, streamsProperties, applicationId, ksqlConfig, ksqlTopic, serviceContext);
    return new PersistentQueryMetadataImpl(persistentQueryType, statementText, querySchema, sources.stream().map(DataSource::getName).collect(Collectors.toSet()), sinkDataSource, planSummary, queryId, materializationProviderBuilder, applicationId, topology, kafkaStreamsBuilder, runtimeBuildContext.getSchemas(), streamsProperties, config.getOverrides(), ksqlConfig.getLong(KSQL_SHUTDOWN_TIMEOUT_MS_CONFIG), getConfiguredQueryErrorClassifier(ksqlConfig, applicationId), physicalPlan, ksqlConfig.getInt(KsqlConfig.KSQL_QUERY_ERROR_MAX_QUEUE_SIZE), getUncaughtExceptionProcessingLogger(queryId), ksqlConfig.getLong(KsqlConfig.KSQL_QUERY_RETRY_BACKOFF_INITIAL_MS), ksqlConfig.getLong(KsqlConfig.KSQL_QUERY_RETRY_BACKOFF_MAX_MS), listener, scalablePushRegistry);
}
Also used : ValueFormat(io.confluent.ksql.serde.ValueFormat) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Topology(org.apache.kafka.streams.Topology) NamedTopology(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopology) KeyFormat(io.confluent.ksql.serde.KeyFormat) DataSource(io.confluent.ksql.metastore.model.DataSource) ScalablePushRegistry(io.confluent.ksql.physical.scalablepush.ScalablePushRegistry) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) BinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.BinPackedPersistentQueryMetadataImpl) SandboxedBinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.SandboxedBinPackedPersistentQueryMetadataImpl) PersistentQueryMetadataImpl(io.confluent.ksql.util.PersistentQueryMetadataImpl) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic)

Aggregations

KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)33 DataSource (io.confluent.ksql.metastore.model.DataSource)10 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 KsqlStream (io.confluent.ksql.metastore.model.KsqlStream)7 KeyFormat (io.confluent.ksql.serde.KeyFormat)6 Test (org.junit.Test)6 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Before (org.junit.Before)5 KsqlTable (io.confluent.ksql.metastore.model.KsqlTable)4 KsqlStructuredDataOutputNode (io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode)4 Matchers.containsString (org.hamcrest.Matchers.containsString)4 InternalFunctionRegistry (io.confluent.ksql.function.InternalFunctionRegistry)3 ValueFormat (io.confluent.ksql.serde.ValueFormat)3 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 SuppressFBWarnings (edu.umd.cs.findbugs.annotations.SuppressFBWarnings)2 CreateTableCommand (io.confluent.ksql.execution.ddl.commands.CreateTableCommand)2 RuntimeBuildContext (io.confluent.ksql.execution.runtime.RuntimeBuildContext)2 MutableMetaStore (io.confluent.ksql.metastore.MutableMetaStore)2