use of org.apache.kafka.clients.admin.AdminClient in project strimzi by strimzi.
the class BaseKafkaImpl method topicMetadata.
/**
* Get a topic config via the Kafka AdminClient API, calling the given handler
* (in a different thread) with the result.
*/
@Override
public void topicMetadata(TopicName topicName, Handler<AsyncResult<TopicMetadata>> handler) {
LOGGER.debug("Getting metadata for topic {}", topicName);
ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName.toString());
KafkaFuture<TopicDescription> descriptionFuture = adminClient.describeTopics(Collections.singleton(topicName.toString())).values().get(topicName.toString());
KafkaFuture<Config> configFuture = adminClient.describeConfigs(Collections.singleton(resource)).values().get(resource);
queueWork(new MetadataWork(descriptionFuture, configFuture, result -> handler.handle(result)));
}
use of org.apache.kafka.clients.admin.AdminClient in project ksql by confluentinc.
the class KsqlContext method create.
public static KsqlContext create(KsqlConfig ksqlConfig, SchemaRegistryClient schemaRegistryClient) {
if (ksqlConfig == null) {
ksqlConfig = new KsqlConfig(Collections.emptyMap());
}
Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID_OPTION_DEFAULT);
}
if (!streamsProperties.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) {
streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BOOTSTRAP_SERVER_OPTION_DEFAULT);
}
AdminClient adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
if (schemaRegistryClient == null) {
return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient));
} else {
return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient, schemaRegistryClient, new MetaStoreImpl()));
}
}
use of org.apache.kafka.clients.admin.AdminClient in project ksql by confluentinc.
the class EndToEndIntegrationTest method before.
@Before
public void before() throws Exception {
testHarness = new IntegrationTestHarness();
testHarness.start();
Map<String, Object> streamsConfig = testHarness.ksqlConfig.getKsqlStreamConfigProps();
streamsConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
KsqlConfig ksqlconfig = new KsqlConfig(streamsConfig);
AdminClient adminClient = AdminClient.create(ksqlconfig.getKsqlAdminClientConfigProps());
KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
ksqlEngine = new KsqlEngine(ksqlconfig, topicClient);
testHarness.createTopic(pageViewTopic);
testHarness.createTopic(usersTopic);
pageViewDataProvider = new PageViewDataProvider();
testHarness.publishTestData(usersTopic, new UserDataProvider(), System.currentTimeMillis() - 10000);
testHarness.publishTestData(pageViewTopic, pageViewDataProvider, System.currentTimeMillis());
ksqlEngine.buildMultipleQueries(format("CREATE TABLE %s (registertime bigint, gender varchar, regionid varchar, " + "userid varchar) WITH (kafka_topic='%s', value_format='JSON', key = 'userid');", userTable, usersTopic), Collections.emptyMap());
ksqlEngine.buildMultipleQueries(format("CREATE STREAM %s (viewtime bigint, userid varchar, pageid varchar) " + "WITH (kafka_topic='%s', value_format='JSON');", pageViewStream, pageViewTopic), Collections.emptyMap());
}
use of org.apache.kafka.clients.admin.AdminClient in project ksql by confluentinc.
the class WindowingIntTest method shouldAggregateTumblingWindow.
@Test
public void shouldAggregateTumblingWindow() throws Exception {
testHarness.publishTestData(topicName, dataProvider, now);
final String streamName = "TUMBLING_AGGTEST";
final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)", "TUMBLING ( SIZE 10 SECONDS)");
ksqlContext.sql(queryString);
Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
* 2 x items *
*/
20.0));
final Map<String, GenericRow> results = new HashMap<>();
TestUtils.waitForCondition(() -> {
final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, 1, new TimeWindowedDeserializer<>(new StringDeserializer()), MAX_POLL_PER_ITERATION);
updateResults(results, windowedResults);
final GenericRow actual = results.get("ITEM_1");
return expected.equals(actual);
}, 60000, "didn't receive correct results within timeout");
AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
Set<String> topicBeforeCleanup = topicClient.listTopicNames();
assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
queryMetadata.close();
Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
use of org.apache.kafka.clients.admin.AdminClient in project apache-kafka-on-k8s by banzaicloud.
the class ClientAuthenticationFailureTest method testAdminClientWithInvalidCredentials.
@Test
public void testAdminClientWithInvalidCredentials() {
Map<String, Object> props = new HashMap<>(saslClientConfigs);
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + server.port());
try (AdminClient client = AdminClient.create(props)) {
DescribeTopicsResult result = client.describeTopics(Collections.singleton("test"));
result.all().get();
fail("Expected an authentication error!");
} catch (Exception e) {
assertTrue("Expected SaslAuthenticationException, got " + e.getCause().getClass(), e.getCause() instanceof SaslAuthenticationException);
}
}
Aggregations