Search in sources :

Example 6 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class ListSourceExecutor method describeSource.

private static SourceDescriptionWithWarnings describeSource(final KsqlConfig ksqlConfig, final KsqlExecutionContext ksqlExecutionContext, final ServiceContext serviceContext, final SourceName name, final boolean extended, final ConfiguredStatement<? extends StatementWithExtendedClause> statement, final SessionProperties sessionProperties, final Collection<SourceDescription> remoteSourceDescriptions) {
    final DataSource dataSource = ksqlExecutionContext.getMetaStore().getSource(name);
    if (dataSource == null) {
        throw new KsqlStatementException(String.format("Could not find STREAM/TABLE '%s' in the Metastore", name.text()), statement.getStatementText());
    }
    final List<RunningQuery> readQueries = getQueries(ksqlExecutionContext, q -> q.getSourceNames().contains(dataSource.getName()));
    final List<RunningQuery> writeQueries = getQueries(ksqlExecutionContext, q -> q.getSinkName().equals(Optional.of(dataSource.getName())));
    Optional<TopicDescription> topicDescription = Optional.empty();
    List<QueryOffsetSummary> queryOffsetSummaries = Collections.emptyList();
    List<String> sourceConstraints = Collections.emptyList();
    final List<KsqlWarning> warnings = new LinkedList<>();
    try {
        topicDescription = Optional.of(serviceContext.getTopicClient().describeTopic(dataSource.getKafkaTopicName()));
        sourceConstraints = getSourceConstraints(name, ksqlExecutionContext.getMetaStore());
    } catch (final KafkaException | KafkaResponseGetFailedException e) {
        warnings.add(new KsqlWarning("Error from Kafka: " + e.getMessage()));
    }
    if (extended) {
        queryOffsetSummaries = queryOffsetSummaries(ksqlConfig, serviceContext, writeQueries);
        return new SourceDescriptionWithWarnings(warnings, SourceDescriptionFactory.create(dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, remoteSourceDescriptions.stream().flatMap(sd -> sd.getClusterStatistics().stream()), remoteSourceDescriptions.stream().flatMap(sd -> sd.getClusterErrorStats().stream()), sessionProperties.getKsqlHostInfo(), ksqlExecutionContext.metricCollectors()));
    }
    return new SourceDescriptionWithWarnings(warnings, SourceDescriptionFactory.create(dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, java.util.stream.Stream.empty(), java.util.stream.Stream.empty(), sessionProperties.getKsqlHostInfo(), ksqlExecutionContext.metricCollectors()));
}
Also used : KsqlWarning(io.confluent.ksql.rest.entity.KsqlWarning) LinkedList(java.util.LinkedList) DataSource(io.confluent.ksql.metastore.model.DataSource) RunningQuery(io.confluent.ksql.rest.entity.RunningQuery) QueryOffsetSummary(io.confluent.ksql.rest.entity.QueryOffsetSummary) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) KafkaException(org.apache.kafka.common.KafkaException)

Example 7 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class ClusterTerminatorTest method givenSourceRegisteredWithTopic.

private void givenSourceRegisteredWithTopic(final Format format, final String kafkaTopicName, final boolean sink) {
    final String sourceName = "SOURCE_" + kafkaTopicName;
    final KsqlTopic topic = mock(KsqlTopic.class);
    when(topic.getKafkaTopicName()).thenReturn(kafkaTopicName);
    when(topic.getKeyFormat()).thenReturn(KeyFormat.of(FormatInfo.of(format.name()), SerdeFeatures.of(), Optional.empty()));
    when(topic.getValueFormat()).thenReturn(ValueFormat.of(FormatInfo.of(format.name()), SerdeFeatures.of()));
    final DataSource source = mock(DataSource.class);
    when(source.getKafkaTopicName()).thenReturn(kafkaTopicName);
    when(source.getKsqlTopic()).thenReturn(topic);
    when(source.isCasTarget()).thenReturn(sink);
    assertThat("topic already registered", dataSources.put(SourceName.of(sourceName), source), is(nullValue()));
}
Also used : Matchers.containsString(org.hamcrest.Matchers.containsString) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 8 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class EngineExecutor method maybeCreateSinkDdl.

private Optional<DdlCommand> maybeCreateSinkDdl(final ConfiguredStatement<?> cfgStatement, final KsqlStructuredDataOutputNode outputNode) {
    if (!outputNode.createInto()) {
        validateExistingSink(outputNode);
        return Optional.empty();
    }
    final Statement statement = cfgStatement.getStatement();
    final SourceName intoSource = outputNode.getSinkName().get();
    final boolean orReplace = statement instanceof CreateAsSelect && ((CreateAsSelect) statement).isOrReplace();
    final boolean ifNotExists = statement instanceof CreateAsSelect && ((CreateAsSelect) statement).isNotExists();
    final DataSource dataSource = engineContext.getMetaStore().getSource(intoSource);
    if (dataSource != null && !ifNotExists && !orReplace) {
        final String failedSourceType = outputNode.getNodeOutputType().getKsqlType();
        final String foundSourceType = dataSource.getDataSourceType().getKsqlType();
        throw new KsqlException(String.format("Cannot add %s '%s': A %s with the same name already exists", failedSourceType.toLowerCase(), intoSource.text(), foundSourceType.toLowerCase()));
    }
    return Optional.of(engineContext.createDdlCommand(outputNode));
}
Also used : ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) ExecutableDdlStatement(io.confluent.ksql.parser.tree.ExecutableDdlStatement) Statement(io.confluent.ksql.parser.tree.Statement) SourceName(io.confluent.ksql.name.SourceName) CreateAsSelect(io.confluent.ksql.parser.tree.CreateAsSelect) KsqlException(io.confluent.ksql.util.KsqlException) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 9 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class EngineExecutor method getSources.

private Set<DataSource> getSources(final QueryPlan queryPlan) {
    final ImmutableSet.Builder<DataSource> sources = ImmutableSet.builder();
    for (final SourceName name : queryPlan.getSources()) {
        final DataSource dataSource = engineContext.getMetaStore().getSource(name);
        if (dataSource == null) {
            throw new KsqlException("Unknown source: " + name.toString(FormatOptions.noEscape()));
        }
        sources.add(dataSource);
    }
    return sources.build();
}
Also used : ImmutableSet(com.google.common.collect.ImmutableSet) SourceName(io.confluent.ksql.name.SourceName) KsqlException(io.confluent.ksql.util.KsqlException) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 10 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class EngineExecutor method execute.

ExecuteResult execute(final KsqlPlan plan, final boolean restoreInProgress) {
    if (!plan.getQueryPlan().isPresent()) {
        final String ddlResult = plan.getDdlCommand().map(ddl -> executeDdl(ddl, plan.getStatementText(), false, Collections.emptySet(), restoreInProgress)).orElseThrow(() -> new IllegalStateException("DdlResult should be present if there is no physical plan."));
        return ExecuteResult.of(ddlResult);
    }
    final QueryPlan queryPlan = plan.getQueryPlan().get();
    final KsqlConstants.PersistentQueryType persistentQueryType = plan.getPersistentQueryType().get();
    // that attempt to write to a sink (i.e. INSERT or CREATE_AS).
    if (persistentQueryType != KsqlConstants.PersistentQueryType.CREATE_SOURCE) {
        final DataSource sinkSource = engineContext.getMetaStore().getSource(queryPlan.getSink().get());
        if (sinkSource != null && sinkSource.isSource()) {
            throw new KsqlException(String.format("Cannot insert into read-only %s: %s", sinkSource.getDataSourceType().getKsqlType().toLowerCase(), sinkSource.getName().text()));
        }
    }
    final Optional<String> ddlResult = plan.getDdlCommand().map(ddl -> executeDdl(ddl, plan.getStatementText(), true, queryPlan.getSources(), restoreInProgress));
    // Return if the source to create already exists.
    if (ddlResult.isPresent() && ddlResult.get().contains("already exists")) {
        return ExecuteResult.of(ddlResult.get());
    }
    // must be executed.
    if (persistentQueryType == KsqlConstants.PersistentQueryType.CREATE_SOURCE && !isSourceTableMaterializationEnabled()) {
        LOG.info(String.format("Source table query '%s' won't be materialized because '%s' is disabled.", plan.getStatementText(), KsqlConfig.KSQL_SOURCE_TABLE_MATERIALIZATION_ENABLED));
        return ExecuteResult.of(ddlResult.get());
    }
    return ExecuteResult.of(executePersistentQuery(queryPlan, plan.getStatementText(), persistentQueryType));
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource) PushPhysicalPlanCreator(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanCreator) CreateTableAsSelect(io.confluent.ksql.parser.tree.CreateTableAsSelect) Arrays(java.util.Arrays) InternalFunctionRegistry(io.confluent.ksql.function.InternalFunctionRegistry) SourceName(io.confluent.ksql.name.SourceName) RoutingOptions(io.confluent.ksql.execution.streams.RoutingOptions) PushPhysicalPlanManager(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanManager) PushPhysicalPlanBuilder(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanBuilder) RoutingNodeType(io.confluent.ksql.util.KsqlConstants.RoutingNodeType) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) ExecuteResult(io.confluent.ksql.KsqlExecutionContext.ExecuteResult) Map(java.util.Map) KsqlBareOutputNode(io.confluent.ksql.planner.plan.KsqlBareOutputNode) QueryId(io.confluent.ksql.query.QueryId) ExecutionStep(io.confluent.ksql.execution.plan.ExecutionStep) RefinementInfo(io.confluent.ksql.serde.RefinementInfo) ImmutableAnalysis(io.confluent.ksql.analyzer.ImmutableAnalysis) Sink(io.confluent.ksql.parser.tree.Sink) Set(java.util.Set) Relation(io.confluent.ksql.parser.tree.Relation) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlException(io.confluent.ksql.util.KsqlException) TransientQueryQueue(io.confluent.ksql.query.TransientQueryQueue) PullQueryResult(io.confluent.ksql.physical.pull.PullQueryResult) Iterables(com.google.common.collect.Iterables) FormatOptions(io.confluent.ksql.schema.utils.FormatOptions) PushRouting(io.confluent.ksql.physical.scalablepush.PushRouting) UnqualifiedColumnReferenceExp(io.confluent.ksql.execution.expression.tree.UnqualifiedColumnReferenceExp) CreateStreamAsSelect(io.confluent.ksql.parser.tree.CreateStreamAsSelect) SessionConfig(io.confluent.ksql.config.SessionConfig) CreateStream(io.confluent.ksql.parser.tree.CreateStream) SingleColumn(io.confluent.ksql.parser.tree.SingleColumn) MetaStore(io.confluent.ksql.metastore.MetaStore) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) PushRoutingOptions(io.confluent.ksql.physical.scalablepush.PushRoutingOptions) PlanInfoExtractor(io.confluent.ksql.execution.plan.PlanInfoExtractor) DataSourceNode(io.confluent.ksql.planner.plan.DataSourceNode) QueryContainer(io.confluent.ksql.parser.tree.QueryContainer) OutputNode(io.confluent.ksql.planner.plan.OutputNode) Throwables(com.google.common.base.Throwables) PushQueryMetadata(io.confluent.ksql.util.PushQueryMetadata) PushQueryQueuePopulator(io.confluent.ksql.physical.scalablepush.PushQueryQueuePopulator) ValueFormat(io.confluent.ksql.serde.ValueFormat) Table(io.confluent.ksql.parser.tree.Table) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) CreateAsSelect(io.confluent.ksql.parser.tree.CreateAsSelect) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) OutputRefinement(io.confluent.ksql.parser.OutputRefinement) LogicalPlanNode(io.confluent.ksql.planner.LogicalPlanNode) Query(io.confluent.ksql.parser.tree.Query) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) AliasedRelation(io.confluent.ksql.parser.tree.AliasedRelation) Formats(io.confluent.ksql.execution.plan.Formats) MutableMetaStore(io.confluent.ksql.metastore.MutableMetaStore) Context(io.vertx.core.Context) CreateTable(io.confluent.ksql.parser.tree.CreateTable) Locale(java.util.Locale) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) KsqlTable(io.confluent.ksql.metastore.model.KsqlTable) TopicPartition(org.apache.kafka.common.TopicPartition) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) ScalablePushQueryMetadata(io.confluent.ksql.util.ScalablePushQueryMetadata) ScalablePushQueryMetrics(io.confluent.ksql.internal.ScalablePushQueryMetrics) KsqlConfig(io.confluent.ksql.util.KsqlConfig) ExecutableDdlStatement(io.confluent.ksql.parser.tree.ExecutableDdlStatement) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Collectors(java.util.stream.Collectors) DdlCommand(io.confluent.ksql.execution.ddl.commands.DdlCommand) Objects(java.util.Objects) PullQueryExecutorMetrics(io.confluent.ksql.internal.PullQueryExecutorMetrics) QueryPlannerOptions(io.confluent.ksql.planner.QueryPlannerOptions) ConsistencyOffsetVector(io.confluent.ksql.util.ConsistencyOffsetVector) Optional(java.util.Optional) Statement(io.confluent.ksql.parser.tree.Statement) KsqlConstants(io.confluent.ksql.util.KsqlConstants) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) PullQueryQueuePopulator(io.confluent.ksql.physical.pull.PullQueryQueuePopulator) PullQueryQueue(io.confluent.ksql.query.PullQueryQueue) PushPhysicalPlan(io.confluent.ksql.physical.scalablepush.PushPhysicalPlan) HARouting(io.confluent.ksql.physical.pull.HARouting) PlanInfo(io.confluent.ksql.execution.plan.PlanInfo) PullPhysicalPlanBuilder(io.confluent.ksql.physical.pull.PullPhysicalPlanBuilder) KeyFormat(io.confluent.ksql.serde.KeyFormat) ResultType(io.confluent.ksql.util.PushQueryMetadata.ResultType) CompletableFuture(java.util.concurrent.CompletableFuture) DataSourceType(io.confluent.ksql.metastore.model.DataSource.DataSourceType) OptionalInt(java.util.OptionalInt) PushOffsetRange(io.confluent.ksql.util.PushOffsetRange) LogicalPlanner(io.confluent.ksql.planner.LogicalPlanner) Logger(org.slf4j.Logger) PhysicalPlan(io.confluent.ksql.physical.PhysicalPlan) PlanSummary(io.confluent.ksql.util.PlanSummary) PullPhysicalPlan(io.confluent.ksql.physical.pull.PullPhysicalPlan) PlanNode(io.confluent.ksql.planner.plan.PlanNode) QueryRegistry(io.confluent.ksql.query.QueryRegistry) Collections(java.util.Collections) CreateTableCommand(io.confluent.ksql.execution.ddl.commands.CreateTableCommand) Select(io.confluent.ksql.parser.tree.Select) PushQueryPreparer(io.confluent.ksql.physical.scalablepush.PushQueryPreparer) KsqlConstants(io.confluent.ksql.util.KsqlConstants) KsqlException(io.confluent.ksql.util.KsqlException) DataSource(io.confluent.ksql.metastore.model.DataSource)

Aggregations

DataSource (io.confluent.ksql.metastore.model.DataSource)70 Test (org.junit.Test)25 KsqlException (io.confluent.ksql.util.KsqlException)24 SourceName (io.confluent.ksql.name.SourceName)21 KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)12 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)12 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 MetricCollectors (io.confluent.ksql.metrics.MetricCollectors)9 Collectors (java.util.stream.Collectors)9 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)8 PreparedStatement (io.confluent.ksql.parser.KsqlParser.PreparedStatement)7 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)7 Optional (java.util.Optional)7 ImmutableList (com.google.common.collect.ImmutableList)6 GenericKey (io.confluent.ksql.GenericKey)6 QueryId (io.confluent.ksql.query.QueryId)6 ServiceContext (io.confluent.ksql.services.ServiceContext)6 KsqlConfig (io.confluent.ksql.util.KsqlConfig)6 Collections (java.util.Collections)6 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)6