Search in sources :

Example 16 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class QueryRegistryImplTest method givenCreate.

private PersistentQueryMetadata givenCreate(final QueryRegistry registry, final String id, final String source, final Optional<String> sink, KsqlConstants.PersistentQueryType persistentQueryType) {
    final QueryId queryId = new QueryId(id);
    final PersistentQueryMetadata query = mock(PersistentQueryMetadataImpl.class);
    final PersistentQueryMetadata newQuery = mock(BinPackedPersistentQueryMetadataImpl.class);
    final DataSource sinkSource = mock(DataSource.class);
    final ExecutionStep physicalPlan = mock(ExecutionStep.class);
    sink.ifPresent(s -> {
        when(sinkSource.getName()).thenReturn(SourceName.of(s));
        when(query.getSinkName()).thenReturn(Optional.of(SourceName.of(s)));
        when(newQuery.getSinkName()).thenReturn(Optional.of(SourceName.of(s)));
    });
    when(newQuery.getOverriddenProperties()).thenReturn(new HashMap<>());
    when(newQuery.getQueryId()).thenReturn(queryId);
    when(newQuery.getSink()).thenReturn(Optional.of(sinkSource));
    when(newQuery.getSourceNames()).thenReturn(ImmutableSet.of(SourceName.of(source)));
    when(newQuery.getPersistentQueryType()).thenReturn(persistentQueryType);
    when(newQuery.getPhysicalPlan()).thenReturn(physicalPlan);
    final SharedKafkaStreamsRuntime runtime = mock(SharedKafkaStreamsRuntimeImpl.class);
    try {
        Field sharedRuntime = BinPackedPersistentQueryMetadataImpl.class.getDeclaredField("sharedKafkaStreamsRuntime");
        sharedRuntime.setAccessible(true);
        sharedRuntime.set(newQuery, runtime);
    } catch (NoSuchFieldException e) {
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    }
    when(runtime.getNewQueryErrorQueue()).thenReturn(mock(QueryMetadataImpl.TimeBoundedQueue.class));
    when(query.getQueryId()).thenReturn(queryId);
    when(query.getSink()).thenReturn(Optional.of(sinkSource));
    when(query.getSourceNames()).thenReturn(ImmutableSet.of(SourceName.of(source)));
    when(query.getPersistentQueryType()).thenReturn(persistentQueryType);
    when(query.getPhysicalPlan()).thenReturn(physicalPlan);
    when(queryBuilder.buildPersistentQueryInSharedRuntime(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())).thenReturn(newQuery);
    when(queryBuilder.buildPersistentQueryInDedicatedRuntime(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())).thenReturn(query);
    when(config.getConfig(true)).thenReturn(ksqlConfig);
    return registry.createOrReplacePersistentQuery(config, serviceContext, logContext, metaStore, "sql", queryId, Optional.of(sinkSource), ImmutableSet.of(toSource(source)), mock(ExecutionStep.class), "plan-summary", persistentQueryType, sharedRuntimes ? Optional.of("applicationId") : Optional.empty());
}
Also used : ExecutionStep(io.confluent.ksql.execution.plan.ExecutionStep) Field(java.lang.reflect.Field) SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 17 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class TestExecutorUtil method buildStreamsTopologyTestDrivers.

static List<TopologyTestDriverContainer> buildStreamsTopologyTestDrivers(final TestCase testCase, final ServiceContext serviceContext, final KsqlEngine ksqlEngine, final KsqlConfig ksqlConfig, final StubKafkaService stubKafkaService, final TestExecutionListener listener) {
    final KsqlConfig maybeUpdatedConfigs = testCase.applyPersistedProperties(ksqlConfig);
    final List<PersistentQueryAndSources> queryMetadataList = doBuildQueries(testCase, serviceContext, ksqlEngine, maybeUpdatedConfigs, stubKafkaService, listener);
    final List<TopologyTestDriverContainer> topologyTestDrivers = new ArrayList<>();
    for (final PersistentQueryAndSources persistentQueryAndSources : queryMetadataList) {
        final PersistentQueryMetadata persistentQueryMetadata = persistentQueryAndSources.getPersistentQueryMetadata();
        final Properties streamsProperties = new Properties();
        streamsProperties.putAll(persistentQueryMetadata.getStreamsProperties());
        final Topology topology = persistentQueryMetadata.getTopology();
        final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(topology, streamsProperties, Instant.EPOCH);
        final List<Topic> sourceTopics = persistentQueryAndSources.getSources().stream().map(dataSource -> {
            stubKafkaService.requireTopicExists(dataSource.getKafkaTopicName());
            return stubKafkaService.getTopic(dataSource.getKafkaTopicName());
        }).collect(Collectors.toList());
        final Optional<Topic> sinkTopic = persistentQueryMetadata.getSinkName().map(name -> buildSinkTopic(ksqlEngine.getMetaStore().getSource(name), stubKafkaService, serviceContext.getSchemaRegistryClient()));
        testCase.setGeneratedTopologies(ImmutableList.of(persistentQueryMetadata.getTopologyDescription()));
        testCase.setGeneratedSchemas(persistentQueryMetadata.getQuerySchemas().getLoggerSchemaInfo());
        topologyTestDrivers.add(TopologyTestDriverContainer.of(topologyTestDriver, sourceTopics, sinkTopic));
    }
    return topologyTestDrivers;
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource) KsqlPlan(io.confluent.ksql.engine.KsqlPlan) SessionProperties(io.confluent.ksql.rest.SessionProperties) StringDescription(org.hamcrest.StringDescription) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) SourceName(io.confluent.ksql.name.SourceName) URL(java.net.URL) Matchers.not(org.hamcrest.Matchers.not) ServiceContext(io.confluent.ksql.services.ServiceContext) SchemaRegistryTopicSchemaSupplier(io.confluent.ksql.schema.ksql.inference.SchemaRegistryTopicSchemaSupplier) InjectorChain(io.confluent.ksql.statement.InjectorChain) KsqlConstants.getSRSubject(io.confluent.ksql.util.KsqlConstants.getSRSubject) ExecuteResult(io.confluent.ksql.KsqlExecutionContext.ExecuteResult) InsertValues(io.confluent.ksql.parser.tree.InsertValues) Map(java.util.Map) DefaultFormatInjector(io.confluent.ksql.format.DefaultFormatInjector) QueryId(io.confluent.ksql.query.QueryId) SchemaRegisterInjector(io.confluent.ksql.schema.ksql.inference.SchemaRegisterInjector) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) PlanJsonMapper(io.confluent.ksql.execution.json.PlanJsonMapper) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) Collection(java.util.Collection) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) KsqlConfig(io.confluent.ksql.util.KsqlConfig) Instant(java.time.Instant) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) DefaultSchemaInjector(io.confluent.ksql.schema.ksql.inference.DefaultSchemaInjector) Collectors(java.util.stream.Collectors) List(java.util.List) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) KsqlException(io.confluent.ksql.util.KsqlException) Optional(java.util.Optional) Matchers.is(org.hamcrest.Matchers.is) Builder(com.google.common.collect.ImmutableList.Builder) KsqlConstants(io.confluent.ksql.util.KsqlConstants) Topology(org.apache.kafka.streams.Topology) FormatFactory(io.confluent.ksql.serde.FormatFactory) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) KsqlHostInfo(io.confluent.ksql.util.KsqlHostInfo) ArrayList(java.util.ArrayList) SessionConfig(io.confluent.ksql.config.SessionConfig) LinkedHashMap(java.util.LinkedHashMap) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) ConfiguredKsqlPlan(io.confluent.ksql.planner.plan.ConfiguredKsqlPlan) MetaStore(io.confluent.ksql.metastore.MetaStore) ParsedStatement(io.confluent.ksql.parser.KsqlParser.ParsedStatement) StubKafkaService(io.confluent.ksql.test.tools.stubs.StubKafkaService) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Matchers.empty(org.hamcrest.Matchers.empty) Properties(java.util.Properties) Iterator(java.util.Iterator) KsqlEngine(io.confluent.ksql.engine.KsqlEngine) MalformedURLException(java.net.MalformedURLException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) SerdeFeature(io.confluent.ksql.serde.SerdeFeature) StubInsertValuesExecutor(io.confluent.ksql.engine.StubInsertValuesExecutor) IOException(java.io.IOException) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) Format(io.confluent.ksql.serde.Format) SqlFormatInjector(io.confluent.ksql.engine.SqlFormatInjector) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) VisibleForTesting(com.google.common.annotations.VisibleForTesting) SchemaMetadata(io.confluent.kafka.schemaregistry.client.SchemaMetadata) PreparedStatement(io.confluent.ksql.parser.KsqlParser.PreparedStatement) ArrayList(java.util.ArrayList) KsqlConfig(io.confluent.ksql.util.KsqlConfig) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) SessionProperties(io.confluent.ksql.rest.SessionProperties) Properties(java.util.Properties) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata)

Example 18 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class InsertsStreamEndpoint method createInsertsSubscriber.

public InsertsStreamSubscriber createInsertsSubscriber(final String caseInsensitiveTarget, final JsonObject properties, final Subscriber<InsertResult> acksSubscriber, final Context context, final WorkerExecutor workerExecutor, final ServiceContext serviceContext) {
    VertxUtils.checkIsWorker();
    if (!ksqlConfig.getBoolean(KsqlConfig.KSQL_INSERT_INTO_VALUES_ENABLED)) {
        throw new KsqlApiException("The server has disabled INSERT INTO ... VALUES functionality. " + "To enable it, restart your ksqlDB server " + "with 'ksql.insert.into.values.enabled'=true", ERROR_CODE_BAD_REQUEST);
    }
    final String target;
    try {
        target = Identifiers.getIdentifierText(caseInsensitiveTarget);
    } catch (IllegalArgumentException e) {
        throw new KsqlApiException("Invalid target name: " + e.getMessage(), ERROR_CODE_BAD_STATEMENT);
    }
    final DataSource dataSource = getDataSource(ksqlEngine.getMetaStore(), SourceName.of(target));
    return InsertsSubscriber.createInsertsSubscriber(serviceContext, properties, dataSource, ksqlConfig, context, acksSubscriber, workerExecutor);
}
Also used : KsqlApiException(io.confluent.ksql.api.server.KsqlApiException) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 19 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class TopicDeleteInjector method checkTopicRefs.

private void checkTopicRefs(final DataSource source) {
    final String topicName = source.getKafkaTopicName();
    final SourceName sourceName = source.getName();
    final Map<SourceName, DataSource> sources = metastore.getAllDataSources();
    final String using = sources.values().stream().filter(s -> s.getKafkaTopicName().equals(topicName)).map(DataSource::getName).filter(name -> !sourceName.equals(name)).map(SourceName::text).sorted().collect(Collectors.joining(", "));
    if (!using.isEmpty()) {
        throw new RuntimeException(String.format("Refusing to delete topic. Found other data sources (%s) using topic %s", using, topicName));
    }
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) SourceName(io.confluent.ksql.name.SourceName) FormatFactory(io.confluent.ksql.serde.FormatFactory) ServiceContext(io.confluent.ksql.services.ServiceContext) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) SchemaRegistryUtil(io.confluent.ksql.schema.registry.SchemaRegistryUtil) ImmutableList(com.google.common.collect.ImmutableList) Closer(com.google.common.io.Closer) Injector(io.confluent.ksql.statement.Injector) ExecutorUtil(io.confluent.ksql.util.ExecutorUtil) Map(java.util.Map) MetaStore(io.confluent.ksql.metastore.MetaStore) SerdeFeature(io.confluent.ksql.serde.SerdeFeature) SqlFormatter(io.confluent.ksql.parser.SqlFormatter) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) Format(io.confluent.ksql.serde.Format) KsqlException(io.confluent.ksql.util.KsqlException) DropStatement(io.confluent.ksql.parser.tree.DropStatement) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Statement(io.confluent.ksql.parser.tree.Statement) KsqlConstants(io.confluent.ksql.util.KsqlConstants) SourceName(io.confluent.ksql.name.SourceName) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 20 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class JsonFormatTest method readNormalResults.

private Map<GenericKey, GenericRow> readNormalResults(final String resultTopic, final int expectedNumMessages) {
    final DataSource source = metaStore.getSource(SourceName.of(streamName));
    final PhysicalSchema resultSchema = PhysicalSchema.from(source.getSchema(), source.getKsqlTopic().getKeyFormat().getFeatures(), source.getKsqlTopic().getValueFormat().getFeatures());
    return TEST_HARNESS.verifyAvailableUniqueRows(resultTopic, expectedNumMessages, KAFKA, JSON, resultSchema);
}
Also used : PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) DataSource(io.confluent.ksql.metastore.model.DataSource)

Aggregations

DataSource (io.confluent.ksql.metastore.model.DataSource)70 Test (org.junit.Test)25 KsqlException (io.confluent.ksql.util.KsqlException)24 SourceName (io.confluent.ksql.name.SourceName)21 KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)12 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)12 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 MetricCollectors (io.confluent.ksql.metrics.MetricCollectors)9 Collectors (java.util.stream.Collectors)9 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)8 PreparedStatement (io.confluent.ksql.parser.KsqlParser.PreparedStatement)7 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)7 Optional (java.util.Optional)7 ImmutableList (com.google.common.collect.ImmutableList)6 GenericKey (io.confluent.ksql.GenericKey)6 QueryId (io.confluent.ksql.query.QueryId)6 ServiceContext (io.confluent.ksql.services.ServiceContext)6 KsqlConfig (io.confluent.ksql.util.KsqlConfig)6 Collections (java.util.Collections)6 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)6