Search in sources :

Example 36 with AdminClient

use of org.apache.kafka.clients.admin.AdminClient in project strimzi by strimzi.

the class BaseKafkaImpl method topicMetadata.

/**
 * Get a topic config via the Kafka AdminClient API, calling the given handler
 * (in a different thread) with the result.
 */
@Override
public void topicMetadata(TopicName topicName, Handler<AsyncResult<TopicMetadata>> handler) {
    LOGGER.debug("Getting metadata for topic {}", topicName);
    ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName.toString());
    KafkaFuture<TopicDescription> descriptionFuture = adminClient.describeTopics(Collections.singleton(topicName.toString())).values().get(topicName.toString());
    KafkaFuture<Config> configFuture = adminClient.describeConfigs(Collections.singleton(resource)).values().get(resource);
    queueWork(new MetadataWork(descriptionFuture, configFuture, result -> handler.handle(result)));
}
Also used : Config(org.apache.kafka.clients.admin.Config) Logger(org.slf4j.Logger) Vertx(io.vertx.core.Vertx) LoggerFactory(org.slf4j.LoggerFactory) Set(java.util.Set) KafkaFuture(org.apache.kafka.common.KafkaFuture) Future(io.vertx.core.Future) AdminClient(org.apache.kafka.clients.admin.AdminClient) ExecutionException(java.util.concurrent.ExecutionException) ConfigResource(org.apache.kafka.common.config.ConfigResource) Map(java.util.Map) ListTopicsResult(org.apache.kafka.clients.admin.ListTopicsResult) UnknownTopicOrPartitionException(org.apache.kafka.common.errors.UnknownTopicOrPartitionException) AsyncResult(io.vertx.core.AsyncResult) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Handler(io.vertx.core.Handler) Collections(java.util.Collections) Config(org.apache.kafka.clients.admin.Config) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) ConfigResource(org.apache.kafka.common.config.ConfigResource)

Example 37 with AdminClient

use of org.apache.kafka.clients.admin.AdminClient in project ksql by confluentinc.

the class KsqlContext method create.

public static KsqlContext create(KsqlConfig ksqlConfig, SchemaRegistryClient schemaRegistryClient) {
    if (ksqlConfig == null) {
        ksqlConfig = new KsqlConfig(Collections.emptyMap());
    }
    Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
    if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
        streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID_OPTION_DEFAULT);
    }
    if (!streamsProperties.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) {
        streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BOOTSTRAP_SERVER_OPTION_DEFAULT);
    }
    AdminClient adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    if (schemaRegistryClient == null) {
        return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient));
    } else {
        return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient, schemaRegistryClient, new MetaStoreImpl()));
    }
}
Also used : KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient)

Example 38 with AdminClient

use of org.apache.kafka.clients.admin.AdminClient in project ksql by confluentinc.

the class EndToEndIntegrationTest method before.

@Before
public void before() throws Exception {
    testHarness = new IntegrationTestHarness();
    testHarness.start();
    Map<String, Object> streamsConfig = testHarness.ksqlConfig.getKsqlStreamConfigProps();
    streamsConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    KsqlConfig ksqlconfig = new KsqlConfig(streamsConfig);
    AdminClient adminClient = AdminClient.create(ksqlconfig.getKsqlAdminClientConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    ksqlEngine = new KsqlEngine(ksqlconfig, topicClient);
    testHarness.createTopic(pageViewTopic);
    testHarness.createTopic(usersTopic);
    pageViewDataProvider = new PageViewDataProvider();
    testHarness.publishTestData(usersTopic, new UserDataProvider(), System.currentTimeMillis() - 10000);
    testHarness.publishTestData(pageViewTopic, pageViewDataProvider, System.currentTimeMillis());
    ksqlEngine.buildMultipleQueries(format("CREATE TABLE %s (registertime bigint, gender varchar, regionid varchar, " + "userid varchar) WITH (kafka_topic='%s', value_format='JSON', key = 'userid');", userTable, usersTopic), Collections.emptyMap());
    ksqlEngine.buildMultipleQueries(format("CREATE STREAM %s (viewtime bigint, userid varchar, pageid varchar) " + "WITH (kafka_topic='%s', value_format='JSON');", pageViewStream, pageViewTopic), Collections.emptyMap());
}
Also used : KsqlEngine(io.confluent.ksql.KsqlEngine) UserDataProvider(io.confluent.ksql.util.UserDataProvider) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) PageViewDataProvider(io.confluent.ksql.util.PageViewDataProvider) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) Before(org.junit.Before)

Example 39 with AdminClient

use of org.apache.kafka.clients.admin.AdminClient in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateTumblingWindow.

@Test
public void shouldAggregateTumblingWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "TUMBLING_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)", "TUMBLING ( SIZE 10 SECONDS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
     * 2 x items *
     */
    20.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, 1, new TimeWindowedDeserializer<>(new StringDeserializer()), MAX_POLL_PER_ITERATION);
        updateResults(results, windowedResults);
        final GenericRow actual = results.get("ITEM_1");
        return expected.equals(actual);
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) GenericRow(io.confluent.ksql.GenericRow) Windowed(org.apache.kafka.streams.kstream.Windowed) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 40 with AdminClient

use of org.apache.kafka.clients.admin.AdminClient in project apache-kafka-on-k8s by banzaicloud.

the class ClientAuthenticationFailureTest method testAdminClientWithInvalidCredentials.

@Test
public void testAdminClientWithInvalidCredentials() {
    Map<String, Object> props = new HashMap<>(saslClientConfigs);
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + server.port());
    try (AdminClient client = AdminClient.create(props)) {
        DescribeTopicsResult result = client.describeTopics(Collections.singleton("test"));
        result.all().get();
        fail("Expected an authentication error!");
    } catch (Exception e) {
        assertTrue("Expected SaslAuthenticationException, got " + e.getCause().getClass(), e.getCause() instanceof SaslAuthenticationException);
    }
}
Also used : HashMap(java.util.HashMap) DescribeTopicsResult(org.apache.kafka.clients.admin.DescribeTopicsResult) SaslAuthenticationException(org.apache.kafka.common.errors.SaslAuthenticationException) SaslAuthenticationException(org.apache.kafka.common.errors.SaslAuthenticationException) AdminClient(org.apache.kafka.clients.admin.AdminClient) Test(org.junit.Test)

Aggregations

AdminClient (org.apache.kafka.clients.admin.AdminClient)70 Test (org.junit.Test)38 KafkaFutureImpl (org.apache.kafka.common.internals.KafkaFutureImpl)31 NewTopic (org.apache.kafka.clients.admin.NewTopic)30 StreamsConfig (org.apache.kafka.streams.StreamsConfig)29 MockAdminClient (org.apache.kafka.clients.admin.MockAdminClient)27 HashMap (java.util.HashMap)24 TopicMetadataAndConfig (org.apache.kafka.clients.admin.CreateTopicsResult.TopicMetadataAndConfig)18 TopicDescription (org.apache.kafka.clients.admin.TopicDescription)18 Config (org.apache.kafka.clients.admin.Config)15 Map (java.util.Map)14 ConsumerConfig (org.apache.kafka.clients.consumer.ConsumerConfig)14 ProducerConfig (org.apache.kafka.clients.producer.ProducerConfig)14 TopicConfig (org.apache.kafka.common.config.TopicConfig)13 MockTime (org.apache.kafka.common.utils.MockTime)13 TopicExistsException (org.apache.kafka.common.errors.TopicExistsException)11 ArrayList (java.util.ArrayList)10 TopicPartitionInfo (org.apache.kafka.common.TopicPartitionInfo)10 ConfigResource (org.apache.kafka.common.config.ConfigResource)10 UnknownTopicOrPartitionException (org.apache.kafka.common.errors.UnknownTopicOrPartitionException)10