use of org.apache.kafka.common.TopicPartitionInfo in project kafka by apache.
the class WorkerUtilsTest method testCreateTopicsFailsIfAtLeastOneTopicExists.
@Test
public void testCreateTopicsFailsIfAtLeastOneTopicExists() throws Throwable {
adminClient.addTopic(false, TEST_TOPIC, Collections.singletonList(new TopicPartitionInfo(0, broker1, singleReplica, Collections.<Node>emptyList())), null);
Map<String, NewTopic> newTopics = new HashMap<>();
newTopics.put(TEST_TOPIC, NEW_TEST_TOPIC);
newTopics.put("another-topic", new NewTopic("another-topic", TEST_PARTITIONS, TEST_REPLICATION_FACTOR));
newTopics.put("one-more-topic", new NewTopic("one-more-topic", TEST_PARTITIONS, TEST_REPLICATION_FACTOR));
assertThrows(TopicExistsException.class, () -> WorkerUtils.createTopics(log, adminClient, newTopics, true));
}
use of org.apache.kafka.common.TopicPartitionInfo in project kafka by apache.
the class TopicAdminTest method verifyingGettingTopicCleanupPolicies.
@Test
public void verifyingGettingTopicCleanupPolicies() {
String topicName = "myTopic";
Map<String, String> topicConfigs = Collections.singletonMap("cleanup.policy", "compact");
Cluster cluster = createCluster(1);
try (MockAdminClient mockAdminClient = new MockAdminClient(cluster.nodes(), cluster.nodeById(0))) {
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.emptyList());
mockAdminClient.addTopic(false, topicName, Collections.singletonList(topicPartitionInfo), topicConfigs);
TopicAdmin admin = new TopicAdmin(null, mockAdminClient);
Set<String> policies = admin.topicCleanupPolicy("myTopic");
assertEquals(1, policies.size());
assertEquals(TopicConfig.CLEANUP_POLICY_COMPACT, policies.iterator().next());
}
}
use of org.apache.kafka.common.TopicPartitionInfo in project kafka by apache.
the class StreamsResetterTest method shouldDeleteTopic.
@Test
public void shouldDeleteTopic() throws InterruptedException, ExecutionException {
final Cluster cluster = createCluster(1);
try (final MockAdminClient adminClient = new MockAdminClient(cluster.nodes(), cluster.nodeById(0))) {
final TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.<Node>emptyList());
adminClient.addTopic(false, TOPIC, Collections.singletonList(topicPartitionInfo), null);
streamsResetter.doDelete(Collections.singletonList(TOPIC), adminClient);
assertEquals(Collections.emptySet(), adminClient.listTopics().names().get());
}
}
use of org.apache.kafka.common.TopicPartitionInfo in project ksql by confluentinc.
the class KafkaTopicClientImplTest method getDescribeTopicsResult.
private DescribeTopicsResult getDescribeTopicsResult() {
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, node, Collections.singletonList(node), Collections.singletonList(node));
TopicDescription topicDescription = new TopicDescription(topicName1, false, Collections.singletonList(topicPartitionInfo));
DescribeTopicsResult describeTopicsResult = mock(DescribeTopicsResult.class);
expect(describeTopicsResult.all()).andReturn(KafkaFuture.completedFuture(Collections.singletonMap(topicName1, topicDescription)));
replay(describeTopicsResult);
return describeTopicsResult;
}
use of org.apache.kafka.common.TopicPartitionInfo in project ksql by confluentinc.
the class KafkaTopicsListTest method shouldBuildValidTopicList.
@Test
public void shouldBuildValidTopicList() {
Collection<KsqlTopic> ksqlTopics = Collections.emptyList();
// represent the full list of topics
Map<String, TopicDescription> topicDescriptions = new HashMap<>();
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(1, new Node(1, "", 8088), Collections.emptyList(), Collections.emptyList());
topicDescriptions.put("test-topic", new TopicDescription("test-topic", false, Collections.singletonList(topicPartitionInfo)));
/**
* Return POJO for consumerGroupClient
*/
TopicPartition topicPartition = new TopicPartition("test-topic", 1);
KafkaConsumerGroupClientImpl.ConsumerSummary consumerSummary = new KafkaConsumerGroupClientImpl.ConsumerSummary("consumer-id");
consumerSummary.addPartition(topicPartition);
KafkaConsumerGroupClientImpl.ConsumerGroupSummary consumerGroupSummary = new KafkaConsumerGroupClientImpl.ConsumerGroupSummary();
consumerGroupSummary.addConsumerSummary(consumerSummary);
KafkaConsumerGroupClient consumerGroupClient = mock(KafkaConsumerGroupClient.class);
expect(consumerGroupClient.listGroups()).andReturn(Collections.singletonList("test-topic"));
expect(consumerGroupClient.describeConsumerGroup("test-topic")).andReturn(consumerGroupSummary);
replay(consumerGroupClient);
/**
* Test
*/
KafkaTopicsList topicsList = KafkaTopicsList.build("statement test", ksqlTopics, topicDescriptions, new KsqlConfig(Collections.EMPTY_MAP), consumerGroupClient);
assertThat(topicsList.getTopics().size(), equalTo(1));
KafkaTopicInfo first = topicsList.getTopics().iterator().next();
assertThat(first.getConsumerGroupCount(), equalTo(1));
assertThat(first.getConsumerCount(), equalTo(1));
assertThat(first.getReplicaInfo().size(), equalTo(1));
}
Aggregations