Search in sources :

Example 26 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class QueryBuilder method buildPersistentQueryInSharedRuntime.

@SuppressWarnings("ParameterNumber")
PersistentQueryMetadata buildPersistentQueryInSharedRuntime(final KsqlConfig ksqlConfig, final KsqlConstants.PersistentQueryType persistentQueryType, final String statementText, final QueryId queryId, final Optional<DataSource> sinkDataSource, final Set<DataSource> sources, final ExecutionStep<?> physicalPlan, final String planSummary, final QueryMetadata.Listener listener, final Supplier<List<PersistentQueryMetadata>> allPersistentQueries, final String applicationId, final MetricCollectors metricCollectors) {
    final SharedKafkaStreamsRuntime sharedKafkaStreamsRuntime = getKafkaStreamsInstance(applicationId, sources.stream().map(DataSource::getName).collect(Collectors.toSet()), queryId, metricCollectors);
    final Map<String, Object> queryOverrides = sharedKafkaStreamsRuntime.getStreamProperties();
    final LogicalSchema logicalSchema;
    final KeyFormat keyFormat;
    final ValueFormat valueFormat;
    final KsqlTopic ksqlTopic;
    switch(persistentQueryType) {
        // CREATE_SOURCE does not have a sink, so the schema is obtained from the query source
        case CREATE_SOURCE:
            final DataSource dataSource = Iterables.getOnlyElement(sources);
            logicalSchema = dataSource.getSchema();
            keyFormat = dataSource.getKsqlTopic().getKeyFormat();
            valueFormat = dataSource.getKsqlTopic().getValueFormat();
            ksqlTopic = dataSource.getKsqlTopic();
            break;
        default:
            logicalSchema = sinkDataSource.get().getSchema();
            keyFormat = sinkDataSource.get().getKsqlTopic().getKeyFormat();
            valueFormat = sinkDataSource.get().getKsqlTopic().getValueFormat();
            ksqlTopic = sinkDataSource.get().getKsqlTopic();
            break;
    }
    final PhysicalSchema querySchema = PhysicalSchema.from(logicalSchema, keyFormat.getFeatures(), valueFormat.getFeatures());
    final NamedTopologyBuilder namedTopologyBuilder = sharedKafkaStreamsRuntime.getKafkaStreams().newNamedTopologyBuilder(queryId.toString(), PropertiesUtil.asProperties(queryOverrides));
    final RuntimeBuildContext runtimeBuildContext = buildContext(applicationId, queryId, namedTopologyBuilder);
    final Object result = buildQueryImplementation(physicalPlan, runtimeBuildContext);
    final NamedTopology topology = namedTopologyBuilder.build();
    final Optional<MaterializationProviderBuilderFactory.MaterializationProviderBuilder> materializationProviderBuilder = getMaterializationInfo(result).map(info -> materializationProviderBuilderFactory.materializationProviderBuilder(info, querySchema, keyFormat, queryOverrides, applicationId, queryId.toString()));
    final Optional<ScalablePushRegistry> scalablePushRegistry = applyScalablePushProcessor(querySchema.logicalSchema(), result, allPersistentQueries, queryOverrides, applicationId, ksqlConfig, ksqlTopic, serviceContext);
    final BinPackedPersistentQueryMetadataImpl binPackedPersistentQueryMetadata = new BinPackedPersistentQueryMetadataImpl(persistentQueryType, statementText, querySchema, sources.stream().map(DataSource::getName).collect(Collectors.toSet()), planSummary, applicationId, topology, sharedKafkaStreamsRuntime, runtimeBuildContext.getSchemas(), config.getOverrides(), queryId, materializationProviderBuilder, physicalPlan, getUncaughtExceptionProcessingLogger(queryId), sinkDataSource, listener, queryOverrides, scalablePushRegistry, (streamsRuntime) -> getNamedTopology(streamsRuntime, queryId, applicationId, queryOverrides, physicalPlan));
    if (real) {
        return binPackedPersistentQueryMetadata;
    } else {
        return SandboxedBinPackedPersistentQueryMetadataImpl.of(binPackedPersistentQueryMetadata, listener);
    }
}
Also used : ValueFormat(io.confluent.ksql.serde.ValueFormat) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KeyFormat(io.confluent.ksql.serde.KeyFormat) DataSource(io.confluent.ksql.metastore.model.DataSource) ScalablePushRegistry(io.confluent.ksql.physical.scalablepush.ScalablePushRegistry) SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) NamedTopologyBuilder(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder) NamedTopology(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopology) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) BinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.BinPackedPersistentQueryMetadataImpl) SandboxedBinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.SandboxedBinPackedPersistentQueryMetadataImpl)

Example 27 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class KsqlAuthorizationValidatorImpl method getCreateAsSelectSinkTopic.

private KsqlTopic getCreateAsSelectSinkTopic(final MetaStore metaStore, final CreateAsSelect createAsSelect) {
    final CreateSourceAsProperties properties = createAsSelect.getProperties();
    final String sinkTopicName;
    final KeyFormat sinkKeyFormat;
    final ValueFormat sinkValueFormat;
    if (!properties.getKafkaTopic().isPresent()) {
        final DataSource dataSource = metaStore.getSource(createAsSelect.getName());
        if (dataSource != null) {
            sinkTopicName = dataSource.getKafkaTopicName();
            sinkKeyFormat = dataSource.getKsqlTopic().getKeyFormat();
            sinkValueFormat = dataSource.getKsqlTopic().getValueFormat();
        } else {
            throw new KsqlException("Cannot validate for topic access from an unknown stream/table: " + createAsSelect.getName());
        }
    } else {
        sinkTopicName = properties.getKafkaTopic().get();
        // If no format is specified for the sink topic, then use the format from the primary
        // source topic.
        final SourceTopicsExtractor extractor = new SourceTopicsExtractor(metaStore);
        extractor.process(createAsSelect.getQuery(), null);
        final KsqlTopic primaryKsqlTopic = extractor.getPrimarySourceTopic();
        final Optional<Format> keyFormat = properties.getKeyFormat().map(formatName -> FormatFactory.fromName(formatName));
        final Optional<Format> valueFormat = properties.getValueFormat().map(formatName -> FormatFactory.fromName(formatName));
        sinkKeyFormat = keyFormat.map(format -> KeyFormat.of(FormatInfo.of(format.name()), format.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? SerdeFeatures.of(SerdeFeature.SCHEMA_INFERENCE) : SerdeFeatures.of(), Optional.empty())).orElse(primaryKsqlTopic.getKeyFormat());
        sinkValueFormat = valueFormat.map(format -> ValueFormat.of(FormatInfo.of(format.name()), format.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? SerdeFeatures.of(SerdeFeature.SCHEMA_INFERENCE) : SerdeFeatures.of())).orElse(primaryKsqlTopic.getValueFormat());
    }
    return new KsqlTopic(sinkTopicName, sinkKeyFormat, sinkValueFormat);
}
Also used : ValueFormat(io.confluent.ksql.serde.ValueFormat) KeyFormat(io.confluent.ksql.serde.KeyFormat) ValueFormat(io.confluent.ksql.serde.ValueFormat) Format(io.confluent.ksql.serde.Format) CreateSourceAsProperties(io.confluent.ksql.parser.properties.with.CreateSourceAsProperties) SourceTopicsExtractor(io.confluent.ksql.topic.SourceTopicsExtractor) KeyFormat(io.confluent.ksql.serde.KeyFormat) KsqlException(io.confluent.ksql.util.KsqlException) DataSource(io.confluent.ksql.metastore.model.DataSource) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic)

Example 28 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class TopicDeleteInjectorTest method shouldThrowIfTopicDoesNotExist.

@Test
public void shouldThrowIfTopicDoesNotExist() {
    // Given:
    final SourceName STREAM_1 = SourceName.of("stream1");
    final DataSource other1 = givenSource(STREAM_1, "topicName");
    when(metaStore.getSource(STREAM_1)).thenAnswer(inv -> other1);
    when(other1.getKafkaTopicName()).thenReturn("topicName");
    final ConfiguredStatement<DropStream> dropStatement = givenStatement("DROP stream1 DELETE TOPIC;", new DropStream(SourceName.of("stream1"), true, true));
    doThrow(RuntimeException.class).when(topicClient).deleteTopics(ImmutableList.of("topicName"));
    // When:
    final Exception e = assertThrows(RuntimeException.class, () -> deleteInjector.inject(dropStatement));
    // Then:
    assertThat(e.getMessage(), containsString("" + "Could not delete the corresponding kafka topic: topicName"));
}
Also used : SourceName(io.confluent.ksql.name.SourceName) DropStream(io.confluent.ksql.parser.tree.DropStream) RestClientException(io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException) IOException(java.io.IOException) KsqlException(io.confluent.ksql.util.KsqlException) DataSource(io.confluent.ksql.metastore.model.DataSource) Test(org.junit.Test)

Example 29 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class TopicDeleteInjectorTest method givenSource.

private static DataSource givenSource(final SourceName name, final String topicName) {
    final DataSource source = mock(DataSource.class);
    when(source.getName()).thenReturn(name);
    when(source.getKafkaTopicName()).thenReturn(topicName);
    return source;
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 30 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class MetaStoreImpl method putSource.

@Override
public void putSource(final DataSource dataSource, final boolean allowReplace) {
    final SourceInfo existing = dataSources.get(dataSource.getName());
    if (existing != null && !allowReplace) {
        final SourceName name = dataSource.getName();
        final String newType = dataSource.getDataSourceType().getKsqlType().toLowerCase();
        final String existingType = existing.source.getDataSourceType().getKsqlType().toLowerCase();
        throw new KsqlException(String.format("Cannot add %s '%s': A %s with the same name already exists", newType, name.text(), existingType));
    } else if (existing != null) {
        existing.source.canUpgradeTo(dataSource).ifPresent(msg -> {
            throw new KsqlException("Cannot upgrade data source: " + msg);
        });
    }
    // Replace the dataSource if one exists, which may contain changes in the Schema, with
    // a copy of the previous source info
    dataSources.put(dataSource.getName(), (existing != null) ? existing.copyWith(dataSource) : new SourceInfo(dataSource));
    LOG.info("Source {} created on the metastore", dataSource.getName().text());
    // Re-build the DROP constraints if existing sources have references to this new source.
    // This logic makes sure that drop constraints are set back if sources were deleted during
    // the metastore restoration (See deleteSource()).
    dataSources.forEach((name, info) -> {
        info.references.forEach(ref -> {
            if (ref.equals(dataSource.getName())) {
                LOG.debug("Add a drop constraint reference back to source '{}' from source '{}'", dataSource.getName().text(), name.text());
                addConstraint(dataSource.getName(), name);
            }
        });
    });
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource) Iterables(com.google.common.collect.Iterables) AggregateFunctionInitArguments(io.confluent.ksql.function.AggregateFunctionInitArguments) UdfFactory(io.confluent.ksql.function.UdfFactory) SourceName(io.confluent.ksql.name.SourceName) LoggerFactory(org.slf4j.LoggerFactory) KsqlReferentialIntegrityException(io.confluent.ksql.util.KsqlReferentialIntegrityException) KsqlTableFunction(io.confluent.ksql.function.KsqlTableFunction) Map(java.util.Map) KsqlAggregateFunction(io.confluent.ksql.function.KsqlAggregateFunction) SqlType(io.confluent.ksql.schema.ksql.types.SqlType) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) FunctionName(io.confluent.ksql.name.FunctionName) ConcurrentHashSet(io.vertx.core.impl.ConcurrentHashSet) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) TableFunctionFactory(io.confluent.ksql.function.TableFunctionFactory) SqlArgument(io.confluent.ksql.schema.ksql.SqlArgument) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ThreadSafe(javax.annotation.concurrent.ThreadSafe) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) List(java.util.List) Stream(java.util.stream.Stream) KsqlException(io.confluent.ksql.util.KsqlException) Optional(java.util.Optional) AggregateFunctionFactory(io.confluent.ksql.function.AggregateFunctionFactory) Collections(java.util.Collections) SourceName(io.confluent.ksql.name.SourceName) KsqlException(io.confluent.ksql.util.KsqlException)

Aggregations

DataSource (io.confluent.ksql.metastore.model.DataSource)70 Test (org.junit.Test)25 KsqlException (io.confluent.ksql.util.KsqlException)24 SourceName (io.confluent.ksql.name.SourceName)21 KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)12 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)12 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 MetricCollectors (io.confluent.ksql.metrics.MetricCollectors)9 Collectors (java.util.stream.Collectors)9 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)8 PreparedStatement (io.confluent.ksql.parser.KsqlParser.PreparedStatement)7 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)7 Optional (java.util.Optional)7 ImmutableList (com.google.common.collect.ImmutableList)6 GenericKey (io.confluent.ksql.GenericKey)6 QueryId (io.confluent.ksql.query.QueryId)6 ServiceContext (io.confluent.ksql.services.ServiceContext)6 KsqlConfig (io.confluent.ksql.util.KsqlConfig)6 Collections (java.util.Collections)6 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)6