Search in sources :

Example 1 with KafkaTopicClient

use of io.confluent.ksql.services.KafkaTopicClient in project ksql by confluentinc.

the class ListTopicsExecutor method execute.

public static StatementExecutorResponse execute(final ConfiguredStatement<ListTopics> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext) {
    final KafkaTopicClient client = serviceContext.getTopicClient();
    final Map<String, TopicDescription> topicDescriptions = listTopics(client, statement);
    if (statement.getStatement().getShowExtended()) {
        final KafkaConsumerGroupClient consumerGroupClient = new KafkaConsumerGroupClientImpl(serviceContext::getAdminClient);
        final Map<String, List<Integer>> topicConsumersAndGroupCount = getTopicConsumerAndGroupCounts(consumerGroupClient);
        final List<KafkaTopicInfoExtended> topicInfoExtendedList = topicDescriptions.values().stream().map(desc -> topicDescriptionToTopicInfoExtended(desc, topicConsumersAndGroupCount)).collect(Collectors.toList());
        return StatementExecutorResponse.handled(Optional.of(new KafkaTopicsListExtended(statement.getStatementText(), topicInfoExtendedList)));
    } else {
        final List<KafkaTopicInfo> topicInfoList = topicDescriptions.values().stream().map(ListTopicsExecutor::topicDescriptionToTopicInfo).collect(Collectors.toList());
        return StatementExecutorResponse.handled(Optional.of(new KafkaTopicsList(statement.getStatementText(), topicInfoList)));
    }
}
Also used : Arrays(java.util.Arrays) SessionProperties(io.confluent.ksql.rest.SessionProperties) ListTopics(io.confluent.ksql.parser.tree.ListTopics) ServiceContext(io.confluent.ksql.services.ServiceContext) ConsumerSummary(io.confluent.ksql.services.KafkaConsumerGroupClient.ConsumerSummary) HashMap(java.util.HashMap) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) KafkaTopicsListExtended(io.confluent.ksql.rest.entity.KafkaTopicsListExtended) ReservedInternalTopics(io.confluent.ksql.util.ReservedInternalTopics) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaConsumerGroupClientImpl(io.confluent.ksql.services.KafkaConsumerGroupClientImpl) Collection(java.util.Collection) Set(java.util.Set) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) Collectors(java.util.stream.Collectors) List(java.util.List) TreeMap(java.util.TreeMap) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) KafkaConsumerGroupClient(io.confluent.ksql.services.KafkaConsumerGroupClient) KafkaTopicInfoExtended(io.confluent.ksql.rest.entity.KafkaTopicInfoExtended) KafkaTopicInfo(io.confluent.ksql.rest.entity.KafkaTopicInfo) Optional(java.util.Optional) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) KafkaTopicsListExtended(io.confluent.ksql.rest.entity.KafkaTopicsListExtended) KafkaTopicInfo(io.confluent.ksql.rest.entity.KafkaTopicInfo) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) KafkaConsumerGroupClientImpl(io.confluent.ksql.services.KafkaConsumerGroupClientImpl) KafkaTopicInfoExtended(io.confluent.ksql.rest.entity.KafkaTopicInfoExtended) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) ArrayList(java.util.ArrayList) List(java.util.List) KafkaConsumerGroupClient(io.confluent.ksql.services.KafkaConsumerGroupClient)

Example 2 with KafkaTopicClient

use of io.confluent.ksql.services.KafkaTopicClient in project ksql by confluentinc.

the class OrphanedTransientQueryCleaner method cleanupOrphanedInternalTopics.

/**
 * Cleans up any internal topics that may exist for the given set of query application
 * ids, since it's assumed that they are completed.
 * @param serviceContext The service context
 * @param queryApplicationIds The set of completed query application ids
 */
public void cleanupOrphanedInternalTopics(final ServiceContext serviceContext, final Set<String> queryApplicationIds) {
    final KafkaTopicClient topicClient = serviceContext.getTopicClient();
    final Set<String> topicNames;
    try {
        topicNames = topicClient.listTopicNames();
    } catch (KafkaResponseGetFailedException e) {
        LOG.error("Couldn't fetch topic names", e);
        return;
    }
    // Find any transient query topics
    final Set<String> orphanedQueryApplicationIds = topicNames.stream().map(topicName -> queryApplicationIds.stream().filter(topicName::startsWith).findFirst()).filter(Optional::isPresent).map(Optional::get).collect(Collectors.toSet());
    for (final String queryApplicationId : orphanedQueryApplicationIds) {
        cleanupService.addCleanupTask(new QueryCleanupService.QueryCleanupTask(serviceContext, queryApplicationId, Optional.empty(), true, ksqlConfig.getKsqlStreamConfigProps().getOrDefault(StreamsConfig.STATE_DIR_CONFIG, StreamsConfig.configDef().defaultValues().get(StreamsConfig.STATE_DIR_CONFIG)).toString(), ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG), ksqlConfig.getString(KsqlConfig.KSQL_PERSISTENT_QUERY_NAME_PREFIX_CONFIG)));
    }
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Logger(org.slf4j.Logger) Objects.requireNonNull(java.util.Objects.requireNonNull) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) Optional(java.util.Optional) Set(java.util.Set) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) Collectors(java.util.stream.Collectors) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) Optional(java.util.Optional) KafkaResponseGetFailedException(io.confluent.ksql.exception.KafkaResponseGetFailedException)

Example 3 with KafkaTopicClient

use of io.confluent.ksql.services.KafkaTopicClient in project ksql by confluentinc.

the class KsqlContextTestUtil method create.

public static KsqlContext create(final KsqlConfig ksqlConfig, final SchemaRegistryClient schemaRegistryClient, final FunctionRegistry functionRegistry) {
    final KafkaClientSupplier clientSupplier = new DefaultKafkaClientSupplier();
    final Admin adminClient = clientSupplier.getAdmin(ksqlConfig.getKsqlAdminClientConfigProps());
    final KafkaTopicClient kafkaTopicClient = new KafkaTopicClientImpl(() -> adminClient);
    final ServiceContext serviceContext = TestServiceContext.create(clientSupplier, adminClient, kafkaTopicClient, () -> schemaRegistryClient, new DefaultConnectClientFactory(ksqlConfig).get(Optional.empty(), Collections.emptyList(), Optional.empty()));
    final String metricsPrefix = "instance-" + COUNTER.getAndIncrement() + "-";
    final KsqlEngine engine = new KsqlEngine(serviceContext, ProcessingLogContext.create(), functionRegistry, ServiceInfo.create(ksqlConfig, metricsPrefix), new SequentialQueryIdGenerator(), ksqlConfig, Collections.emptyList(), new MetricCollectors());
    return new KsqlContext(serviceContext, ksqlConfig, engine, Injectors.DEFAULT);
}
Also used : DefaultConnectClientFactory(io.confluent.ksql.services.DefaultConnectClientFactory) KsqlEngine(io.confluent.ksql.engine.KsqlEngine) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) KafkaClientSupplier(org.apache.kafka.streams.KafkaClientSupplier) ServiceContext(io.confluent.ksql.services.ServiceContext) TestServiceContext(io.confluent.ksql.services.TestServiceContext) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) SequentialQueryIdGenerator(io.confluent.ksql.query.id.SequentialQueryIdGenerator) Admin(org.apache.kafka.clients.admin.Admin) KafkaTopicClientImpl(io.confluent.ksql.services.KafkaTopicClientImpl)

Example 4 with KafkaTopicClient

use of io.confluent.ksql.services.KafkaTopicClient in project ksql by confluentinc.

the class IntegrationTestHarness method ensureTopics.

/**
 * Ensure topics with the given {@code topicNames} exist.
 *
 * <p>Topics will be created, if they do not already exist, with the specified
 * {@code partitionCount}.
 *
 * @param topicNames the names of the topics to create.
 */
public void ensureTopics(final int partitionCount, final String... topicNames) {
    final KafkaTopicClient topicClient = serviceContext.get().getTopicClient();
    Arrays.stream(topicNames).filter(name -> !topicClient.isTopicExists(name)).forEach(name -> topicClient.createTopic(name, partitionCount, DEFAULT_REPLICATION_FACTOR));
}
Also used : PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) Arrays(java.util.Arrays) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) AssertEventually.assertThatEventually(io.confluent.ksql.test.util.AssertEventually.assertThatEventually) ServiceContext(io.confluent.ksql.services.ServiceContext) ProcessingLogContext(io.confluent.ksql.logging.processing.ProcessingLogContext) ConnectProperties(io.confluent.ksql.serde.connect.ConnectProperties) Serde(org.apache.kafka.common.serialization.Serde) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) AvroFormat(io.confluent.ksql.serde.avro.AvroFormat) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) SchemaTranslator(io.confluent.ksql.serde.SchemaTranslator) KsqlConfig(io.confluent.ksql.util.KsqlConfig) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) TestServiceContext(io.confluent.ksql.services.TestServiceContext) Objects(java.util.Objects) List(java.util.List) ExternalResource(org.junit.rules.ExternalResource) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) MapMatchers.mapHasSize(io.confluent.ksql.test.util.MapMatchers.mapHasSize) Entry(java.util.Map.Entry) Optional(java.util.Optional) Matchers.is(org.hamcrest.Matchers.is) FormatInfo(io.confluent.ksql.serde.FormatInfo) KsqlConstants(io.confluent.ksql.util.KsqlConstants) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) EmbeddedSingleNodeKafkaCluster(io.confluent.ksql.test.util.EmbeddedSingleNodeKafkaCluster) HashMap(java.util.HashMap) Multimap(com.google.common.collect.Multimap) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) ImmutableList(com.google.common.collect.ImmutableList) Matchers.hasSize(org.hamcrest.Matchers.hasSize) GenericRowSerDe(io.confluent.ksql.serde.GenericRowSerDe) Deserializer(org.apache.kafka.common.serialization.Deserializer) TestDataProvider(io.confluent.ksql.util.TestDataProvider) ConsumerTestUtil.hasUniqueRecords(io.confluent.ksql.test.util.ConsumerTestUtil.hasUniqueRecords) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) TimeUnit(java.util.concurrent.TimeUnit) KsqlConfigTestUtil(io.confluent.ksql.KsqlConfigTestUtil) GenericKeySerDe(io.confluent.ksql.serde.GenericKeySerDe) ConsumerTestUtil(io.confluent.ksql.test.util.ConsumerTestUtil) ConsumerTestUtil.toUniqueRecords(io.confluent.ksql.test.util.ConsumerTestUtil.toUniqueRecords) GenericRow(io.confluent.ksql.GenericRow) Format(io.confluent.ksql.serde.Format) Serializer(org.apache.kafka.common.serialization.Serializer) Matcher(org.hamcrest.Matcher) SchemaMetadata(io.confluent.kafka.schemaregistry.client.SchemaMetadata) GenericKey(io.confluent.ksql.GenericKey) PersistenceSchema(io.confluent.ksql.schema.ksql.PersistenceSchema) Collections(java.util.Collections) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient)

Example 5 with KafkaTopicClient

use of io.confluent.ksql.services.KafkaTopicClient in project ksql by confluentinc.

the class KsqlRestoreCommandTopic method maybeCleanUpQuery.

private static void maybeCleanUpQuery(final byte[] command, final KsqlConfig ksqlConfig) {
    boolean queryIdFound = false;
    final Map<String, Object> streamsProperties = new HashMap<>(ksqlConfig.getKsqlStreamConfigProps());
    boolean sharedRuntimeQuery = false;
    String queryId = "";
    final JSONObject jsonObject = new JSONObject(new String(command, StandardCharsets.UTF_8));
    if (hasKey(jsonObject, "plan")) {
        final JSONObject plan = jsonObject.getJSONObject("plan");
        if (hasKey(plan, "queryPlan")) {
            final JSONObject queryPlan = plan.getJSONObject("queryPlan");
            queryId = queryPlan.getString("queryId");
            if (hasKey(queryPlan, "runtimeId") && ((Optional<String>) queryPlan.get("runtimeId")).isPresent()) {
                streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, ((Optional<String>) queryPlan.get("runtimeId")).get());
                sharedRuntimeQuery = true;
            } else {
                streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, QueryApplicationId.build(ksqlConfig, true, new QueryId(queryId)));
            }
            queryIdFound = true;
        }
    }
    // the command contains a query, clean up it's internal state store and also the internal topics
    if (queryIdFound) {
        final StreamsConfig streamsConfig = new StreamsConfig(streamsProperties);
        final String topicPrefix = sharedRuntimeQuery ? streamsConfig.getString(StreamsConfig.APPLICATION_ID_CONFIG) : QueryApplicationId.buildInternalTopicPrefix(ksqlConfig, sharedRuntimeQuery) + queryId;
        try {
            final Admin admin = new DefaultKafkaClientSupplier().getAdmin(ksqlConfig.getKsqlAdminClientConfigProps());
            final KafkaTopicClient topicClient = new KafkaTopicClientImpl(() -> admin);
            topicClient.deleteInternalTopics(topicPrefix);
            new StateDirectory(streamsConfig, Time.SYSTEM, true, ksqlConfig.getBoolean(KsqlConfig.KSQL_SHARED_RUNTIME_ENABLED)).clean();
            System.out.println(String.format("Cleaned up internal state store and internal topics for query %s", topicPrefix));
        } catch (final Exception e) {
            System.out.println(String.format("Failed to clean up query %s ", topicPrefix));
        }
    }
}
Also used : HashMap(java.util.HashMap) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) QueryId(io.confluent.ksql.query.QueryId) Admin(org.apache.kafka.clients.admin.Admin) NoSuchFileException(java.nio.file.NoSuchFileException) SerializationException(org.apache.kafka.common.errors.SerializationException) IncompatibleKsqlCommandVersionException(io.confluent.ksql.rest.server.resources.IncompatibleKsqlCommandVersionException) OutOfOrderSequenceException(org.apache.kafka.common.errors.OutOfOrderSequenceException) ProducerFencedException(org.apache.kafka.common.errors.ProducerFencedException) KsqlException(io.confluent.ksql.util.KsqlException) TimeoutException(org.apache.kafka.common.errors.TimeoutException) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) JSONObject(org.json.JSONObject) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) JSONObject(org.json.JSONObject) KafkaTopicClientImpl(io.confluent.ksql.services.KafkaTopicClientImpl) StreamsConfig(org.apache.kafka.streams.StreamsConfig) StateDirectory(org.apache.kafka.streams.processor.internals.StateDirectory)

Aggregations

KafkaTopicClient (io.confluent.ksql.services.KafkaTopicClient)10 ServiceContext (io.confluent.ksql.services.ServiceContext)4 HashMap (java.util.HashMap)3 Optional (java.util.Optional)3 SuppressFBWarnings (edu.umd.cs.findbugs.annotations.SuppressFBWarnings)2 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)2 KsqlEngine (io.confluent.ksql.engine.KsqlEngine)2 ProcessingLogContext (io.confluent.ksql.logging.processing.ProcessingLogContext)2 MetricCollectors (io.confluent.ksql.metrics.MetricCollectors)2 KafkaTopicClientImpl (io.confluent.ksql.services.KafkaTopicClientImpl)2 TestServiceContext (io.confluent.ksql.services.TestServiceContext)2 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)2 KsqlConfig (io.confluent.ksql.util.KsqlConfig)2 Arrays (java.util.Arrays)2 Collection (java.util.Collection)2 List (java.util.List)2 Map (java.util.Map)2 Set (java.util.Set)2 Collectors (java.util.stream.Collectors)2 Admin (org.apache.kafka.clients.admin.Admin)2