Search in sources :

Example 6 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class DescribeConnectorExecutor method execute.

@SuppressWarnings("OptionalGetWithoutIsPresent")
public StatementExecutorResponse execute(final ConfiguredStatement<DescribeConnector> configuredStatement, final SessionProperties sessionProperties, final KsqlExecutionContext ksqlExecutionContext, final ServiceContext serviceContext) {
    final String connectorName = configuredStatement.getStatement().getConnectorName();
    final ConnectResponse<ConnectorStateInfo> statusResponse = serviceContext.getConnectClient().status(connectorName);
    if (statusResponse.error().isPresent()) {
        return StatementExecutorResponse.handled(connectErrorHandler.handle(configuredStatement, statusResponse));
    }
    final ConnectResponse<ConnectorInfo> infoResponse = serviceContext.getConnectClient().describe(connectorName);
    if (infoResponse.error().isPresent()) {
        return StatementExecutorResponse.handled(connectErrorHandler.handle(configuredStatement, infoResponse));
    }
    final ConnectorStateInfo status = statusResponse.datum().get();
    final ConnectorInfo info = infoResponse.datum().get();
    final Optional<Connector> connector = connectorFactory.apply(info);
    final List<KsqlWarning> warnings;
    final List<String> topics;
    if (connector.isPresent()) {
        // Small optimization. If a connector's info is not found in the response, don't query for
        // active topics with the given connectorName
        final ConnectResponse<Map<String, Map<String, List<String>>>> topicsResponse = serviceContext.getConnectClient().topics(connectorName);
        // server logs.
        if (topicsResponse.error().isPresent() && topicsResponse.httpCode() == HttpStatus.SC_NOT_FOUND) {
            topics = ImmutableList.of();
            warnings = ImmutableList.of();
            LOG.warn("Could not list related topics due to error: " + topicsResponse.error().get());
        } else if (topicsResponse.error().isPresent()) {
            topics = ImmutableList.of();
            warnings = ImmutableList.of(new KsqlWarning("Could not list related topics due to error: " + topicsResponse.error().get()));
        } else {
            topics = topicsResponse.datum().get().get(connectorName).getOrDefault(TOPICS_KEY, ImmutableList.of());
            warnings = ImmutableList.of();
        }
    } else {
        topics = ImmutableList.of();
        warnings = ImmutableList.of();
    }
    final List<SourceDescription> sources;
    if (connector.isPresent()) {
        sources = ksqlExecutionContext.getMetaStore().getAllDataSources().values().stream().filter(source -> topics.contains(source.getKafkaTopicName())).map(source -> SourceDescriptionFactory.create(source, false, ImmutableList.of(), ImmutableList.of(), Optional.empty(), ImmutableList.of(), ImmutableList.of(), ksqlExecutionContext.metricCollectors())).collect(Collectors.toList());
    } else {
        sources = ImmutableList.of();
    }
    final ConnectorDescription description = new ConnectorDescription(configuredStatement.getStatementText(), info.config().get(ConnectorConfig.CONNECTOR_CLASS_CONFIG), status, sources, topics, warnings);
    return StatementExecutorResponse.handled(Optional.of(description));
}
Also used : Connectors(io.confluent.ksql.connect.supported.Connectors) SessionProperties(io.confluent.ksql.rest.SessionProperties) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription) Function(java.util.function.Function) ConnectorInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) DescribeConnector(io.confluent.ksql.parser.tree.DescribeConnector) Logger(org.slf4j.Logger) Connector(io.confluent.ksql.connect.Connector) SourceDescriptionFactory(io.confluent.ksql.rest.entity.SourceDescriptionFactory) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) Collectors(java.util.stream.Collectors) ConnectorStateInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo) List(java.util.List) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) ConnectorDescription(io.confluent.ksql.rest.entity.ConnectorDescription) ConnectResponse(io.confluent.ksql.services.ConnectClient.ConnectResponse) Optional(java.util.Optional) VisibleForTesting(com.google.common.annotations.VisibleForTesting) HttpStatus(org.apache.hc.core5.http.HttpStatus) KsqlWarning(io.confluent.ksql.rest.entity.KsqlWarning) ConnectorConfig(org.apache.kafka.connect.runtime.ConnectorConfig) DescribeConnector(io.confluent.ksql.parser.tree.DescribeConnector) Connector(io.confluent.ksql.connect.Connector) ConnectorInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo) KsqlWarning(io.confluent.ksql.rest.entity.KsqlWarning) ConnectorDescription(io.confluent.ksql.rest.entity.ConnectorDescription) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) Map(java.util.Map) ConnectorStateInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo) SourceDescription(io.confluent.ksql.rest.entity.SourceDescription)

Example 7 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class ListFunctionsExecutor method execute.

public static StatementExecutorResponse execute(final ConfiguredStatement<ListFunctions> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext) {
    final FunctionRegistry functionRegistry = executionContext.getMetaStore();
    final List<SimpleFunctionInfo> all = functionRegistry.listFunctions().stream().map(factory -> new SimpleFunctionInfo(factory.getName().toUpperCase(), FunctionType.SCALAR, factory.getMetadata().getCategory())).collect(Collectors.toList());
    functionRegistry.listTableFunctions().stream().map(factory -> new SimpleFunctionInfo(factory.getName().toUpperCase(), FunctionType.TABLE, factory.getMetadata().getCategory())).forEach(all::add);
    functionRegistry.listAggregateFunctions().stream().map(factory -> new SimpleFunctionInfo(factory.getName().toUpperCase(), FunctionType.AGGREGATE, factory.getMetadata().getCategory())).forEach(all::add);
    return StatementExecutorResponse.handled(Optional.of(new FunctionNameList(statement.getStatementText(), all)));
}
Also used : FunctionNameList(io.confluent.ksql.rest.entity.FunctionNameList) FunctionType(io.confluent.ksql.rest.entity.FunctionType) List(java.util.List) SessionProperties(io.confluent.ksql.rest.SessionProperties) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) ServiceContext(io.confluent.ksql.services.ServiceContext) ListFunctions(io.confluent.ksql.parser.tree.ListFunctions) Optional(java.util.Optional) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) SimpleFunctionInfo(io.confluent.ksql.rest.entity.SimpleFunctionInfo) Collectors(java.util.stream.Collectors) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) FunctionNameList(io.confluent.ksql.rest.entity.FunctionNameList) SimpleFunctionInfo(io.confluent.ksql.rest.entity.SimpleFunctionInfo)

Example 8 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class LocalCommands method processLocalCommandFiles.

public void processLocalCommandFiles(final ServiceContext serviceContext) {
    final FilenameFilter filter = (dir, fileName) -> fileName.endsWith(LOCAL_COMMANDS_FILE_SUFFIX);
    final File[] files = directory.listFiles(filter);
    if (files == null) {
        throw new KsqlServerException("Bad local commands directory " + directory.getAbsolutePath() + ". Please check your configuration for " + KsqlRestConfig.KSQL_LOCAL_COMMANDS_LOCATION_CONFIG);
    }
    for (final File file : files) {
        if (file.equals(currentLocalCommands.getFile())) {
            continue;
        }
        try (LocalCommandsFile localCommandsFile = LocalCommandsFile.createReadonly(file)) {
            final List<LocalCommand> localCommands = localCommandsFile.readRecords();
            cleanUpTransientQueryState(localCommands, serviceContext);
            markFileAsProcessed(file);
        } catch (Exception e) {
            LOG.error("Error processing local commands " + file.getAbsolutePath() + ". There may be orphaned transient topics or abandoned state stores.", e);
        }
    }
}
Also used : FilenameFilter(java.io.FilenameFilter) Logger(org.slf4j.Logger) KsqlEngine(io.confluent.ksql.engine.KsqlEngine) Files(java.nio.file.Files) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) Set(java.util.Set) IOException(java.io.IOException) Random(java.util.Random) Collectors(java.util.stream.Collectors) File(java.io.File) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) KsqlServerException(io.confluent.ksql.util.KsqlServerException) PosixFilePermissions(java.nio.file.attribute.PosixFilePermissions) List(java.util.List) Closeable(java.io.Closeable) KsqlException(io.confluent.ksql.util.KsqlException) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) FilenameFilter(java.io.FilenameFilter) File(java.io.File) KsqlServerException(io.confluent.ksql.util.KsqlServerException) IOException(java.io.IOException) KsqlServerException(io.confluent.ksql.util.KsqlServerException) KsqlException(io.confluent.ksql.util.KsqlException)

Example 9 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class EngineExecutor method executeScalablePushQuery.

ScalablePushQueryMetadata executeScalablePushQuery(final ImmutableAnalysis analysis, final ConfiguredStatement<Query> statement, final PushRouting pushRouting, final PushRoutingOptions pushRoutingOptions, final QueryPlannerOptions queryPlannerOptions, final Context context, final Optional<ScalablePushQueryMetrics> scalablePushQueryMetrics) {
    final SessionConfig sessionConfig = statement.getSessionConfig();
    // If we ever change how many hops a request can do, we'll need to update this for correct
    // metrics.
    final RoutingNodeType routingNodeType = pushRoutingOptions.getHasBeenForwarded() ? RoutingNodeType.REMOTE_NODE : RoutingNodeType.SOURCE_NODE;
    PushPhysicalPlan plan = null;
    try {
        final KsqlConfig ksqlConfig = sessionConfig.getConfig(false);
        final LogicalPlanNode logicalPlan = buildAndValidateLogicalPlan(statement, analysis, ksqlConfig, queryPlannerOptions, true);
        final PushPhysicalPlanCreator pushPhysicalPlanCreator = (offsetRange, catchupConsumerGroup) -> buildScalablePushPhysicalPlan(logicalPlan, analysis, context, offsetRange, catchupConsumerGroup);
        final Optional<PushOffsetRange> offsetRange = pushRoutingOptions.getContinuationToken().map(PushOffsetRange::deserialize);
        final Optional<String> catchupConsumerGroup = pushRoutingOptions.getCatchupConsumerGroup();
        final PushPhysicalPlanManager physicalPlanManager = new PushPhysicalPlanManager(pushPhysicalPlanCreator, catchupConsumerGroup, offsetRange);
        final PushPhysicalPlan physicalPlan = physicalPlanManager.getPhysicalPlan();
        plan = physicalPlan;
        final TransientQueryQueue transientQueryQueue = new TransientQueryQueue(analysis.getLimitClause());
        final PushQueryMetadata.ResultType resultType = physicalPlan.getScalablePushRegistry().isTable() ? physicalPlan.getScalablePushRegistry().isWindowed() ? ResultType.WINDOWED_TABLE : ResultType.TABLE : ResultType.STREAM;
        final PushQueryQueuePopulator populator = () -> pushRouting.handlePushQuery(serviceContext, physicalPlanManager, statement, pushRoutingOptions, physicalPlan.getOutputSchema(), transientQueryQueue, scalablePushQueryMetrics, offsetRange);
        final PushQueryPreparer preparer = () -> pushRouting.preparePushQuery(physicalPlanManager, statement, pushRoutingOptions);
        final ScalablePushQueryMetadata metadata = new ScalablePushQueryMetadata(physicalPlan.getOutputSchema(), physicalPlan.getQueryId(), transientQueryQueue, scalablePushQueryMetrics, resultType, populator, preparer, physicalPlan.getSourceType(), routingNodeType, physicalPlan::getRowsReadFromDataSource);
        return metadata;
    } catch (final Exception e) {
        if (plan == null) {
            scalablePushQueryMetrics.ifPresent(m -> m.recordErrorRateForNoResult(1));
        } else {
            final PushPhysicalPlan pushPhysicalPlan = plan;
            scalablePushQueryMetrics.ifPresent(metrics -> metrics.recordErrorRate(1, pushPhysicalPlan.getSourceType(), routingNodeType));
        }
        final String stmtLower = statement.getStatementText().toLowerCase(Locale.ROOT);
        final String messageLower = e.getMessage().toLowerCase(Locale.ROOT);
        final String stackLower = Throwables.getStackTraceAsString(e).toLowerCase(Locale.ROOT);
        // the contents of the query
        if (messageLower.contains(stmtLower) || stackLower.contains(stmtLower)) {
            final StackTraceElement loc = Iterables.getLast(Throwables.getCausalChain(e)).getStackTrace()[0];
            LOG.error("Failure to execute push query V2 {} {}, not logging the error message since it " + "contains the query string, which may contain sensitive information." + " If you see this LOG message, please submit a GitHub ticket and" + " we will scrub the statement text from the error at {}", pushRoutingOptions.debugString(), queryPlannerOptions.debugString(), loc);
        } else {
            LOG.error("Failure to execute push query V2. {} {}", pushRoutingOptions.debugString(), queryPlannerOptions.debugString(), e);
        }
        LOG.debug("Failed push query V2 text {}, {}", statement.getStatementText(), e);
        throw new KsqlStatementException(e.getMessage() == null ? "Server Error" + Arrays.toString(e.getStackTrace()) : e.getMessage(), statement.getStatementText(), e);
    }
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource) PushPhysicalPlanCreator(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanCreator) CreateTableAsSelect(io.confluent.ksql.parser.tree.CreateTableAsSelect) Arrays(java.util.Arrays) InternalFunctionRegistry(io.confluent.ksql.function.InternalFunctionRegistry) SourceName(io.confluent.ksql.name.SourceName) RoutingOptions(io.confluent.ksql.execution.streams.RoutingOptions) PushPhysicalPlanManager(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanManager) PushPhysicalPlanBuilder(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanBuilder) RoutingNodeType(io.confluent.ksql.util.KsqlConstants.RoutingNodeType) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) ExecuteResult(io.confluent.ksql.KsqlExecutionContext.ExecuteResult) Map(java.util.Map) KsqlBareOutputNode(io.confluent.ksql.planner.plan.KsqlBareOutputNode) QueryId(io.confluent.ksql.query.QueryId) ExecutionStep(io.confluent.ksql.execution.plan.ExecutionStep) RefinementInfo(io.confluent.ksql.serde.RefinementInfo) ImmutableAnalysis(io.confluent.ksql.analyzer.ImmutableAnalysis) Sink(io.confluent.ksql.parser.tree.Sink) Set(java.util.Set) Relation(io.confluent.ksql.parser.tree.Relation) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlException(io.confluent.ksql.util.KsqlException) TransientQueryQueue(io.confluent.ksql.query.TransientQueryQueue) PullQueryResult(io.confluent.ksql.physical.pull.PullQueryResult) Iterables(com.google.common.collect.Iterables) FormatOptions(io.confluent.ksql.schema.utils.FormatOptions) PushRouting(io.confluent.ksql.physical.scalablepush.PushRouting) UnqualifiedColumnReferenceExp(io.confluent.ksql.execution.expression.tree.UnqualifiedColumnReferenceExp) CreateStreamAsSelect(io.confluent.ksql.parser.tree.CreateStreamAsSelect) SessionConfig(io.confluent.ksql.config.SessionConfig) CreateStream(io.confluent.ksql.parser.tree.CreateStream) SingleColumn(io.confluent.ksql.parser.tree.SingleColumn) MetaStore(io.confluent.ksql.metastore.MetaStore) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) PushRoutingOptions(io.confluent.ksql.physical.scalablepush.PushRoutingOptions) PlanInfoExtractor(io.confluent.ksql.execution.plan.PlanInfoExtractor) DataSourceNode(io.confluent.ksql.planner.plan.DataSourceNode) QueryContainer(io.confluent.ksql.parser.tree.QueryContainer) OutputNode(io.confluent.ksql.planner.plan.OutputNode) Throwables(com.google.common.base.Throwables) PushQueryMetadata(io.confluent.ksql.util.PushQueryMetadata) PushQueryQueuePopulator(io.confluent.ksql.physical.scalablepush.PushQueryQueuePopulator) ValueFormat(io.confluent.ksql.serde.ValueFormat) Table(io.confluent.ksql.parser.tree.Table) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) CreateAsSelect(io.confluent.ksql.parser.tree.CreateAsSelect) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) OutputRefinement(io.confluent.ksql.parser.OutputRefinement) LogicalPlanNode(io.confluent.ksql.planner.LogicalPlanNode) Query(io.confluent.ksql.parser.tree.Query) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) AliasedRelation(io.confluent.ksql.parser.tree.AliasedRelation) Formats(io.confluent.ksql.execution.plan.Formats) MutableMetaStore(io.confluent.ksql.metastore.MutableMetaStore) Context(io.vertx.core.Context) CreateTable(io.confluent.ksql.parser.tree.CreateTable) Locale(java.util.Locale) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) KsqlTable(io.confluent.ksql.metastore.model.KsqlTable) TopicPartition(org.apache.kafka.common.TopicPartition) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) ScalablePushQueryMetadata(io.confluent.ksql.util.ScalablePushQueryMetadata) ScalablePushQueryMetrics(io.confluent.ksql.internal.ScalablePushQueryMetrics) KsqlConfig(io.confluent.ksql.util.KsqlConfig) ExecutableDdlStatement(io.confluent.ksql.parser.tree.ExecutableDdlStatement) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Collectors(java.util.stream.Collectors) DdlCommand(io.confluent.ksql.execution.ddl.commands.DdlCommand) Objects(java.util.Objects) PullQueryExecutorMetrics(io.confluent.ksql.internal.PullQueryExecutorMetrics) QueryPlannerOptions(io.confluent.ksql.planner.QueryPlannerOptions) ConsistencyOffsetVector(io.confluent.ksql.util.ConsistencyOffsetVector) Optional(java.util.Optional) Statement(io.confluent.ksql.parser.tree.Statement) KsqlConstants(io.confluent.ksql.util.KsqlConstants) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) PullQueryQueuePopulator(io.confluent.ksql.physical.pull.PullQueryQueuePopulator) PullQueryQueue(io.confluent.ksql.query.PullQueryQueue) PushPhysicalPlan(io.confluent.ksql.physical.scalablepush.PushPhysicalPlan) HARouting(io.confluent.ksql.physical.pull.HARouting) PlanInfo(io.confluent.ksql.execution.plan.PlanInfo) PullPhysicalPlanBuilder(io.confluent.ksql.physical.pull.PullPhysicalPlanBuilder) KeyFormat(io.confluent.ksql.serde.KeyFormat) ResultType(io.confluent.ksql.util.PushQueryMetadata.ResultType) CompletableFuture(java.util.concurrent.CompletableFuture) DataSourceType(io.confluent.ksql.metastore.model.DataSource.DataSourceType) OptionalInt(java.util.OptionalInt) PushOffsetRange(io.confluent.ksql.util.PushOffsetRange) LogicalPlanner(io.confluent.ksql.planner.LogicalPlanner) Logger(org.slf4j.Logger) PhysicalPlan(io.confluent.ksql.physical.PhysicalPlan) PlanSummary(io.confluent.ksql.util.PlanSummary) PullPhysicalPlan(io.confluent.ksql.physical.pull.PullPhysicalPlan) PlanNode(io.confluent.ksql.planner.plan.PlanNode) QueryRegistry(io.confluent.ksql.query.QueryRegistry) Collections(java.util.Collections) CreateTableCommand(io.confluent.ksql.execution.ddl.commands.CreateTableCommand) Select(io.confluent.ksql.parser.tree.Select) PushQueryPreparer(io.confluent.ksql.physical.scalablepush.PushQueryPreparer) RoutingNodeType(io.confluent.ksql.util.KsqlConstants.RoutingNodeType) PushQueryMetadata(io.confluent.ksql.util.PushQueryMetadata) ScalablePushQueryMetadata(io.confluent.ksql.util.ScalablePushQueryMetadata) PushPhysicalPlan(io.confluent.ksql.physical.scalablepush.PushPhysicalPlan) SessionConfig(io.confluent.ksql.config.SessionConfig) KsqlConfig(io.confluent.ksql.util.KsqlConfig) LogicalPlanNode(io.confluent.ksql.planner.LogicalPlanNode) KsqlException(io.confluent.ksql.util.KsqlException) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) TransientQueryQueue(io.confluent.ksql.query.TransientQueryQueue) PushQueryPreparer(io.confluent.ksql.physical.scalablepush.PushQueryPreparer) PushPhysicalPlanManager(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanManager) ScalablePushQueryMetadata(io.confluent.ksql.util.ScalablePushQueryMetadata) PushPhysicalPlanCreator(io.confluent.ksql.physical.scalablepush.PushPhysicalPlanCreator) PushQueryQueuePopulator(io.confluent.ksql.physical.scalablepush.PushQueryQueuePopulator) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) ResultType(io.confluent.ksql.util.PushQueryMetadata.ResultType) PushOffsetRange(io.confluent.ksql.util.PushOffsetRange)

Example 10 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class OrphanedTransientQueryCleaner method cleanupOrphanedInternalTopics.

/**
 * Cleans up any internal topics that may exist for the given set of query application
 * ids, since it's assumed that they are completed.
 * @param serviceContext The service context
 * @param queryApplicationIds The set of completed query application ids
 */
public void cleanupOrphanedInternalTopics(final ServiceContext serviceContext, final Set<String> queryApplicationIds) {
    final KafkaTopicClient topicClient = serviceContext.getTopicClient();
    final Set<String> topicNames;
    try {
        topicNames = topicClient.listTopicNames();
    } catch (KafkaResponseGetFailedException e) {
        LOG.error("Couldn't fetch topic names", e);
        return;
    }
    // Find any transient query topics
    final Set<String> orphanedQueryApplicationIds = topicNames.stream().map(topicName -> queryApplicationIds.stream().filter(topicName::startsWith).findFirst()).filter(Optional::isPresent).map(Optional::get).collect(Collectors.toSet());
    for (final String queryApplicationId : orphanedQueryApplicationIds) {
        cleanupService.addCleanupTask(new QueryCleanupService.QueryCleanupTask(serviceContext, queryApplicationId, Optional.empty(), true, ksqlConfig.getKsqlStreamConfigProps().getOrDefault(StreamsConfig.STATE_DIR_CONFIG, StreamsConfig.configDef().defaultValues().get(StreamsConfig.STATE_DIR_CONFIG)).toString(), ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG), ksqlConfig.getString(KsqlConfig.KSQL_PERSISTENT_QUERY_NAME_PREFIX_CONFIG)));
    }
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Logger(org.slf4j.Logger) Objects.requireNonNull(java.util.Objects.requireNonNull) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) Optional(java.util.Optional) Set(java.util.Set) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) Collectors(java.util.stream.Collectors) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) Optional(java.util.Optional) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException)

Aggregations

ServiceContext (io.confluent.ksql.services.ServiceContext)44 Optional (java.util.Optional)25 KsqlConfig (io.confluent.ksql.util.KsqlConfig)23 Collectors (java.util.stream.Collectors)23 List (java.util.List)22 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)21 KsqlException (io.confluent.ksql.util.KsqlException)21 Map (java.util.Map)21 ImmutableMap (com.google.common.collect.ImmutableMap)18 TestServiceContext (io.confluent.ksql.services.TestServiceContext)15 Collections (java.util.Collections)15 ImmutableList (com.google.common.collect.ImmutableList)13 KsqlExecutionContext (io.confluent.ksql.KsqlExecutionContext)13 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)13 KsqlEngine (io.confluent.ksql.engine.KsqlEngine)12 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)12 Test (org.junit.Test)12 Logger (org.slf4j.Logger)10 LoggerFactory (org.slf4j.LoggerFactory)10 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)9