Search in sources :

Example 1 with SharedKafkaStreamsRuntime

use of io.confluent.ksql.util.SharedKafkaStreamsRuntime in project ksql by confluentinc.

the class QueryRegistryImplTest method givenCreate.

private PersistentQueryMetadata givenCreate(final QueryRegistry registry, final String id, final String source, final Optional<String> sink, KsqlConstants.PersistentQueryType persistentQueryType) {
    final QueryId queryId = new QueryId(id);
    final PersistentQueryMetadata query = mock(PersistentQueryMetadataImpl.class);
    final PersistentQueryMetadata newQuery = mock(BinPackedPersistentQueryMetadataImpl.class);
    final DataSource sinkSource = mock(DataSource.class);
    final ExecutionStep physicalPlan = mock(ExecutionStep.class);
    sink.ifPresent(s -> {
        when(sinkSource.getName()).thenReturn(SourceName.of(s));
        when(query.getSinkName()).thenReturn(Optional.of(SourceName.of(s)));
        when(newQuery.getSinkName()).thenReturn(Optional.of(SourceName.of(s)));
    });
    when(newQuery.getOverriddenProperties()).thenReturn(new HashMap<>());
    when(newQuery.getQueryId()).thenReturn(queryId);
    when(newQuery.getSink()).thenReturn(Optional.of(sinkSource));
    when(newQuery.getSourceNames()).thenReturn(ImmutableSet.of(SourceName.of(source)));
    when(newQuery.getPersistentQueryType()).thenReturn(persistentQueryType);
    when(newQuery.getPhysicalPlan()).thenReturn(physicalPlan);
    final SharedKafkaStreamsRuntime runtime = mock(SharedKafkaStreamsRuntimeImpl.class);
    try {
        Field sharedRuntime = BinPackedPersistentQueryMetadataImpl.class.getDeclaredField("sharedKafkaStreamsRuntime");
        sharedRuntime.setAccessible(true);
        sharedRuntime.set(newQuery, runtime);
    } catch (NoSuchFieldException e) {
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    }
    when(runtime.getNewQueryErrorQueue()).thenReturn(mock(QueryMetadataImpl.TimeBoundedQueue.class));
    when(query.getQueryId()).thenReturn(queryId);
    when(query.getSink()).thenReturn(Optional.of(sinkSource));
    when(query.getSourceNames()).thenReturn(ImmutableSet.of(SourceName.of(source)));
    when(query.getPersistentQueryType()).thenReturn(persistentQueryType);
    when(query.getPhysicalPlan()).thenReturn(physicalPlan);
    when(queryBuilder.buildPersistentQueryInSharedRuntime(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())).thenReturn(newQuery);
    when(queryBuilder.buildPersistentQueryInDedicatedRuntime(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())).thenReturn(query);
    when(config.getConfig(true)).thenReturn(ksqlConfig);
    return registry.createOrReplacePersistentQuery(config, serviceContext, logContext, metaStore, "sql", queryId, Optional.of(sinkSource), ImmutableSet.of(toSource(source)), mock(ExecutionStep.class), "plan-summary", persistentQueryType, sharedRuntimes ? Optional.of("applicationId") : Optional.empty());
}
Also used : ExecutionStep(io.confluent.ksql.execution.plan.ExecutionStep) Field(java.lang.reflect.Field) SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 2 with SharedKafkaStreamsRuntime

use of io.confluent.ksql.util.SharedKafkaStreamsRuntime in project ksql by confluentinc.

the class QueryBuilder method buildPersistentQueryInSharedRuntime.

@SuppressWarnings("ParameterNumber")
PersistentQueryMetadata buildPersistentQueryInSharedRuntime(final KsqlConfig ksqlConfig, final KsqlConstants.PersistentQueryType persistentQueryType, final String statementText, final QueryId queryId, final Optional<DataSource> sinkDataSource, final Set<DataSource> sources, final ExecutionStep<?> physicalPlan, final String planSummary, final QueryMetadata.Listener listener, final Supplier<List<PersistentQueryMetadata>> allPersistentQueries, final String applicationId, final MetricCollectors metricCollectors) {
    final SharedKafkaStreamsRuntime sharedKafkaStreamsRuntime = getKafkaStreamsInstance(applicationId, sources.stream().map(DataSource::getName).collect(Collectors.toSet()), queryId, metricCollectors);
    final Map<String, Object> queryOverrides = sharedKafkaStreamsRuntime.getStreamProperties();
    final LogicalSchema logicalSchema;
    final KeyFormat keyFormat;
    final ValueFormat valueFormat;
    final KsqlTopic ksqlTopic;
    switch(persistentQueryType) {
        // CREATE_SOURCE does not have a sink, so the schema is obtained from the query source
        case CREATE_SOURCE:
            final DataSource dataSource = Iterables.getOnlyElement(sources);
            logicalSchema = dataSource.getSchema();
            keyFormat = dataSource.getKsqlTopic().getKeyFormat();
            valueFormat = dataSource.getKsqlTopic().getValueFormat();
            ksqlTopic = dataSource.getKsqlTopic();
            break;
        default:
            logicalSchema = sinkDataSource.get().getSchema();
            keyFormat = sinkDataSource.get().getKsqlTopic().getKeyFormat();
            valueFormat = sinkDataSource.get().getKsqlTopic().getValueFormat();
            ksqlTopic = sinkDataSource.get().getKsqlTopic();
            break;
    }
    final PhysicalSchema querySchema = PhysicalSchema.from(logicalSchema, keyFormat.getFeatures(), valueFormat.getFeatures());
    final NamedTopologyBuilder namedTopologyBuilder = sharedKafkaStreamsRuntime.getKafkaStreams().newNamedTopologyBuilder(queryId.toString(), PropertiesUtil.asProperties(queryOverrides));
    final RuntimeBuildContext runtimeBuildContext = buildContext(applicationId, queryId, namedTopologyBuilder);
    final Object result = buildQueryImplementation(physicalPlan, runtimeBuildContext);
    final NamedTopology topology = namedTopologyBuilder.build();
    final Optional<MaterializationProviderBuilderFactory.MaterializationProviderBuilder> materializationProviderBuilder = getMaterializationInfo(result).map(info -> materializationProviderBuilderFactory.materializationProviderBuilder(info, querySchema, keyFormat, queryOverrides, applicationId, queryId.toString()));
    final Optional<ScalablePushRegistry> scalablePushRegistry = applyScalablePushProcessor(querySchema.logicalSchema(), result, allPersistentQueries, queryOverrides, applicationId, ksqlConfig, ksqlTopic, serviceContext);
    final BinPackedPersistentQueryMetadataImpl binPackedPersistentQueryMetadata = new BinPackedPersistentQueryMetadataImpl(persistentQueryType, statementText, querySchema, sources.stream().map(DataSource::getName).collect(Collectors.toSet()), planSummary, applicationId, topology, sharedKafkaStreamsRuntime, runtimeBuildContext.getSchemas(), config.getOverrides(), queryId, materializationProviderBuilder, physicalPlan, getUncaughtExceptionProcessingLogger(queryId), sinkDataSource, listener, queryOverrides, scalablePushRegistry, (streamsRuntime) -> getNamedTopology(streamsRuntime, queryId, applicationId, queryOverrides, physicalPlan));
    if (real) {
        return binPackedPersistentQueryMetadata;
    } else {
        return SandboxedBinPackedPersistentQueryMetadataImpl.of(binPackedPersistentQueryMetadata, listener);
    }
}
Also used : ValueFormat(io.confluent.ksql.serde.ValueFormat) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KeyFormat(io.confluent.ksql.serde.KeyFormat) DataSource(io.confluent.ksql.metastore.model.DataSource) ScalablePushRegistry(io.confluent.ksql.physical.scalablepush.ScalablePushRegistry) SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) NamedTopologyBuilder(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder) NamedTopology(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopology) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) BinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.BinPackedPersistentQueryMetadataImpl) SandboxedBinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.SandboxedBinPackedPersistentQueryMetadataImpl)

Example 3 with SharedKafkaStreamsRuntime

use of io.confluent.ksql.util.SharedKafkaStreamsRuntime in project ksql by confluentinc.

the class QueryRegistryImpl method unregisterQuery.

private void unregisterQuery(final QueryMetadata query) {
    if (query instanceof PersistentQueryMetadata) {
        final PersistentQueryMetadata persistentQuery = (PersistentQueryMetadata) query;
        final QueryId queryId = persistentQuery.getQueryId();
        persistentQueries.remove(queryId);
        final Set<SharedKafkaStreamsRuntime> toClose = streams.stream().filter(s -> s.getCollocatedQueries().isEmpty()).collect(Collectors.toSet());
        streams.removeAll(toClose);
        toClose.forEach(SharedKafkaStreamsRuntime::close);
        switch(persistentQuery.getPersistentQueryType()) {
            case CREATE_SOURCE:
                createAsQueries.remove(Iterables.getOnlyElement(persistentQuery.getSourceNames()));
                break;
            case CREATE_AS:
                createAsQueries.remove(persistentQuery.getSinkName().get());
                break;
            case INSERT:
                sinkAndSources(persistentQuery).forEach(sourceName -> insertQueries.computeIfPresent(sourceName, (s, queries) -> {
                    queries.remove(queryId);
                    return (queries.isEmpty()) ? null : queries;
                }));
                break;
            default:
        }
    }
    allLiveQueries.remove(query.getQueryId());
    notifyDeregister(query);
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource) Iterables(com.google.common.collect.Iterables) SandboxedSharedKafkaStreamsRuntimeImpl(io.confluent.ksql.util.SandboxedSharedKafkaStreamsRuntimeImpl) SourceName(io.confluent.ksql.name.SourceName) SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) ProcessingLogContext(io.confluent.ksql.logging.processing.ProcessingLogContext) SandboxedTransientQueryMetadata(io.confluent.ksql.util.SandboxedTransientQueryMetadata) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) OptionalInt(java.util.OptionalInt) WindowInfo(io.confluent.ksql.serde.WindowInfo) ArrayList(java.util.ArrayList) SessionConfig(io.confluent.ksql.config.SessionConfig) HashSet(java.util.HashSet) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) BiPredicate(java.util.function.BiPredicate) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) MetaStore(io.confluent.ksql.metastore.MetaStore) SandboxedPersistentQueryMetadataImpl(io.confluent.ksql.util.SandboxedPersistentQueryMetadataImpl) BinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.BinPackedPersistentQueryMetadataImpl) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) QueryMetadata(io.confluent.ksql.util.QueryMetadata) TopicPartition(org.apache.kafka.common.TopicPartition) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) ExecutionStep(io.confluent.ksql.execution.plan.ExecutionStep) ImmutableSet(com.google.common.collect.ImmutableSet) PropertiesList(io.confluent.ksql.rest.entity.PropertiesList) Logger(org.slf4j.Logger) ImmutableMap(com.google.common.collect.ImmutableMap) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) KsqlConfig(io.confluent.ksql.util.KsqlConfig) SandboxedBinPackedPersistentQueryMetadataImpl(io.confluent.ksql.util.SandboxedBinPackedPersistentQueryMetadataImpl) State(org.apache.kafka.streams.KafkaStreams.State) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Collectors(java.util.stream.Collectors) PersistentQueryMetadataImpl(io.confluent.ksql.util.PersistentQueryMetadataImpl) Objects(java.util.Objects) List(java.util.List) QueryEventListener(io.confluent.ksql.engine.QueryEventListener) Optional(java.util.Optional) KsqlConstants(io.confluent.ksql.util.KsqlConstants) Collections(java.util.Collections) SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata)

Example 4 with SharedKafkaStreamsRuntime

use of io.confluent.ksql.util.SharedKafkaStreamsRuntime in project ksql by confluentinc.

the class QueryBuilder method getKafkaStreamsInstance.

private SharedKafkaStreamsRuntime getKafkaStreamsInstance(final String applicationId, final Set<SourceName> sources, final QueryId queryId, final MetricCollectors metricCollectors) {
    for (final SharedKafkaStreamsRuntime sharedKafkaStreamsRuntime : streams) {
        if (sharedKafkaStreamsRuntime.getApplicationId().equals(applicationId) || (sharedKafkaStreamsRuntime.getApplicationId().equals(applicationId + "-validation") && !real)) {
            return sharedKafkaStreamsRuntime;
        }
    }
    final SharedKafkaStreamsRuntime stream;
    final KsqlConfig ksqlConfig = config.getConfig(true);
    if (real) {
        stream = new SharedKafkaStreamsRuntimeImpl(kafkaStreamsBuilder, getConfiguredQueryErrorClassifier(ksqlConfig, applicationId), ksqlConfig.getInt(KsqlConfig.KSQL_QUERY_ERROR_MAX_QUEUE_SIZE), ksqlConfig.getLong(KsqlConfig.KSQL_SHUTDOWN_TIMEOUT_MS_CONFIG), buildStreamsProperties(applicationId, Optional.empty(), metricCollectors, config.getConfig(true), processingLogContext));
    } else {
        stream = new SandboxedSharedKafkaStreamsRuntimeImpl(kafkaStreamsBuilder, buildStreamsProperties(applicationId + "-validation", Optional.empty(), metricCollectors, config.getConfig(true), processingLogContext));
    }
    streams.add(stream);
    return stream;
}
Also used : SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime) SandboxedSharedKafkaStreamsRuntimeImpl(io.confluent.ksql.util.SandboxedSharedKafkaStreamsRuntimeImpl) SandboxedSharedKafkaStreamsRuntimeImpl(io.confluent.ksql.util.SandboxedSharedKafkaStreamsRuntimeImpl) SharedKafkaStreamsRuntimeImpl(io.confluent.ksql.util.SharedKafkaStreamsRuntimeImpl) KsqlConfig(io.confluent.ksql.util.KsqlConfig)

Example 5 with SharedKafkaStreamsRuntime

use of io.confluent.ksql.util.SharedKafkaStreamsRuntime in project ksql by confluentinc.

the class QueryRegistryImpl method updateStreamsPropertiesAndRestartRuntime.

@Override
public void updateStreamsPropertiesAndRestartRuntime(final KsqlConfig config, final ProcessingLogContext logContext) {
    for (SharedKafkaStreamsRuntime stream : streams) {
        updateStreamsProperties(stream, config, logContext);
        stream.restartStreamsRuntime();
    }
}
Also used : SharedKafkaStreamsRuntime(io.confluent.ksql.util.SharedKafkaStreamsRuntime)

Aggregations

SharedKafkaStreamsRuntime (io.confluent.ksql.util.SharedKafkaStreamsRuntime)5 DataSource (io.confluent.ksql.metastore.model.DataSource)3 ExecutionStep (io.confluent.ksql.execution.plan.ExecutionStep)2 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)2 BinPackedPersistentQueryMetadataImpl (io.confluent.ksql.util.BinPackedPersistentQueryMetadataImpl)2 KsqlConfig (io.confluent.ksql.util.KsqlConfig)2 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)2 SandboxedBinPackedPersistentQueryMetadataImpl (io.confluent.ksql.util.SandboxedBinPackedPersistentQueryMetadataImpl)2 SandboxedSharedKafkaStreamsRuntimeImpl (io.confluent.ksql.util.SandboxedSharedKafkaStreamsRuntimeImpl)2 ImmutableList (com.google.common.collect.ImmutableList)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 ImmutableSet (com.google.common.collect.ImmutableSet)1 Iterables (com.google.common.collect.Iterables)1 SessionConfig (io.confluent.ksql.config.SessionConfig)1 QueryEventListener (io.confluent.ksql.engine.QueryEventListener)1 KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)1 RuntimeBuildContext (io.confluent.ksql.execution.runtime.RuntimeBuildContext)1 FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)1 ProcessingLogContext (io.confluent.ksql.logging.processing.ProcessingLogContext)1 MetaStore (io.confluent.ksql.metastore.MetaStore)1