use of org.apache.kafka.clients.admin.TopicDescription in project strimzi by strimzi.
the class BaseKafkaImpl method topicMetadata.
/**
* Get a topic config via the Kafka AdminClient API, calling the given handler
* (in a different thread) with the result.
*/
@Override
public void topicMetadata(TopicName topicName, Handler<AsyncResult<TopicMetadata>> handler) {
LOGGER.debug("Getting metadata for topic {}", topicName);
ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName.toString());
KafkaFuture<TopicDescription> descriptionFuture = adminClient.describeTopics(Collections.singleton(topicName.toString())).values().get(topicName.toString());
KafkaFuture<Config> configFuture = adminClient.describeConfigs(Collections.singleton(resource)).values().get(resource);
queueWork(new MetadataWork(descriptionFuture, configFuture, result -> handler.handle(result)));
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class KafkaTopicClientImplIntegrationTest method shouldCreateTopic.
@Test
public void shouldCreateTopic() {
// Given:
final String topicName = UUID.randomUUID().toString();
// When:
client.createTopic(topicName, 3, (short) 1);
// Then:
assertThatEventually(() -> topicExists(topicName), is(true));
final TopicDescription topicDescription = getTopicDescription(topicName);
assertThat(topicDescription.partitions(), hasSize(3));
assertThat(topicDescription.partitions().get(0).replicas(), hasSize(1));
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class KafkaTopicClientImplTest method getDescribeTopicsResult.
private DescribeTopicsResult getDescribeTopicsResult() {
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, node, Collections.singletonList(node), Collections.singletonList(node));
TopicDescription topicDescription = new TopicDescription(topicName1, false, Collections.singletonList(topicPartitionInfo));
DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
expect(describeTopicsResult.all()).andReturn(KafkaFuture.completedFuture(Collections.singletonMap(topicName1, topicDescription)));
replay(describeTopicsResult);
return describeTopicsResult;
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class KafkaTopicClientImpl method validateTopicProperties.
private void validateTopicProperties(final String topic, final int numPartitions, final short replicationFactor) {
Map<String, TopicDescription> topicDescriptions = describeTopics(Collections.singletonList(topic));
TopicDescription topicDescription = topicDescriptions.get(topic);
if (topicDescription.partitions().size() != numPartitions || topicDescription.partitions().get(0).replicas().size() < replicationFactor) {
throw new KafkaTopicException(String.format("Topic '%s' does not conform to the requirements Partitions:%d v %d. Replication: %d " + "v %d", topic, topicDescription.partitions().size(), numPartitions, topicDescription.partitions().get(0).replicas().size(), replicationFactor));
}
// Topic with the partitions and replicas exists, reuse it!
log.debug("Did not create topic {} with {} partitions and replication-factor {} since it already " + "exists", topic, numPartitions, replicationFactor);
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class KafkaTopicsListTest method shouldBuildValidTopicList.
@Test
public void shouldBuildValidTopicList() {
Collection<KsqlTopic> ksqlTopics = Collections.emptyList();
// represent the full list of topics
Map<String, TopicDescription> topicDescriptions = new HashMap<>();
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(1, new Node(1, "", 8088), Collections.emptyList(), Collections.emptyList());
topicDescriptions.put("test-topic", new TopicDescription("test-topic", false, Collections.singletonList(topicPartitionInfo)));
/**
* Return POJO for consumerGroupClient
*/
TopicPartition topicPartition = new TopicPartition("test-topic", 1);
KafkaConsumerGroupClientImpl.ConsumerSummary consumerSummary = new KafkaConsumerGroupClientImpl.ConsumerSummary("consumer-id");
consumerSummary.addPartition(topicPartition);
KafkaConsumerGroupClientImpl.ConsumerGroupSummary consumerGroupSummary = new KafkaConsumerGroupClientImpl.ConsumerGroupSummary();
consumerGroupSummary.addConsumerSummary(consumerSummary);
KafkaConsumerGroupClient consumerGroupClient = mock(KafkaConsumerGroupClient.class);
expect(consumerGroupClient.listGroups()).andReturn(Collections.singletonList("test-topic"));
expect(consumerGroupClient.describeConsumerGroup("test-topic")).andReturn(consumerGroupSummary);
replay(consumerGroupClient);
/**
* Test
*/
KafkaTopicsList topicsList = KafkaTopicsList.build("statement test", ksqlTopics, topicDescriptions, new KsqlConfig(Collections.EMPTY_MAP), consumerGroupClient);
assertThat(topicsList.getTopics().size(), equalTo(1));
KafkaTopicInfo first = topicsList.getTopics().iterator().next();
assertThat(first.getConsumerGroupCount(), equalTo(1));
assertThat(first.getConsumerCount(), equalTo(1));
assertThat(first.getReplicaInfo().size(), equalTo(1));
}
Aggregations