use of org.apache.kafka.clients.admin.TopicDescription in project cruise-control by linkedin.
the class ExecutorTest method testBrokerDiesWhenMovePartitions.
@Test
public void testBrokerDiesWhenMovePartitions() throws Exception {
ZkUtils zkUtils = KafkaCruiseControlUnitTestUtils.zkUtils(zookeeper().getConnectionString());
Map<String, TopicDescription> topicDescriptions = createTopics();
int initialLeader0 = topicDescriptions.get(TOPIC_0).partitions().get(0).leader().id();
int initialLeader1 = topicDescriptions.get(TOPIC_1).partitions().get(0).leader().id();
_brokers.get(initialLeader0 == 0 ? 1 : 0).shutdown();
ExecutionProposal proposal0 = new ExecutionProposal(TP0, 0, initialLeader0, Collections.singletonList(initialLeader0), Collections.singletonList(initialLeader0 == 0 ? 1 : 0));
ExecutionProposal proposal1 = new ExecutionProposal(TP1, 0, initialLeader1, Arrays.asList(initialLeader1, initialLeader1 == 0 ? 1 : 0), Arrays.asList(initialLeader1 == 0 ? 1 : 0, initialLeader1));
Collection<ExecutionProposal> proposalsToExecute = Arrays.asList(proposal0, proposal1);
executeAndVerifyProposals(zkUtils, proposalsToExecute, Collections.emptyList());
// We are not doing the rollback.
assertEquals(Collections.singletonList(initialLeader0 == 0 ? 1 : 0), ExecutorUtils.newAssignmentForPartition(zkUtils, TP0));
assertEquals(initialLeader0, zkUtils.getLeaderForPartition(TOPIC_1, PARTITION).get());
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class JoinNodeTest method setupTopicClientExpectations.
private void setupTopicClientExpectations(int streamPartitions, int tablePartitions) {
Node node = new Node(0, "localhost", 9091);
List<TopicPartitionInfo> streamPartitionInfoList = IntStream.range(0, streamPartitions).mapToObj(p -> new TopicPartitionInfo(p, node, Collections.emptyList(), Collections.emptyList())).collect(Collectors.toList());
EasyMock.expect(topicClient.describeTopics(Arrays.asList("test1"))).andReturn(Collections.singletonMap("test1", new TopicDescription("test1", false, streamPartitionInfoList)));
List<TopicPartitionInfo> tablePartitionInfoList = IntStream.range(0, tablePartitions).mapToObj(p -> new TopicPartitionInfo(p, node, Collections.emptyList(), Collections.emptyList())).collect(Collectors.toList());
EasyMock.expect(topicClient.describeTopics(Arrays.asList("test2"))).andReturn(Collections.singletonMap("test2", new TopicDescription("test2", false, tablePartitionInfoList)));
EasyMock.replay(topicClient);
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class KafkaTopicClientImplIntegrationTest method shouldCreateTopicWithConfig.
@Test
public void shouldCreateTopicWithConfig() {
// Given:
final String topicName = UUID.randomUUID().toString();
final Map<String, String> config = ImmutableMap.of(TopicConfig.COMPRESSION_TYPE_CONFIG, "snappy");
// When:
client.createTopic(topicName, 2, (short) 1, config);
// Then:
assertThatEventually(() -> topicExists(topicName), is(true));
final TopicDescription topicDescription = getTopicDescription(topicName);
assertThat(topicDescription.partitions(), hasSize(2));
assertThat(topicDescription.partitions().get(0).replicas(), hasSize(1));
final Map<String, String> configs = client.getTopicConfig(topicName);
assertThat(configs.get(TopicConfig.COMPRESSION_TYPE_CONFIG), is("snappy"));
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class SourceDescription method getReplication.
private static int getReplication(KafkaTopicClient topicClient, String kafkaTopicName) {
Map<String, TopicDescription> stringTopicDescriptionMap = topicClient.describeTopics(Arrays.asList(kafkaTopicName));
TopicDescription topicDescription = stringTopicDescriptionMap.values().iterator().next();
return topicDescription.partitions().iterator().next().replicas().size();
}
use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.
the class KafkaTopicsList method build.
public static KafkaTopicsList build(String statementText, Collection<KsqlTopic> ksqlTopics, Map<String, TopicDescription> kafkaTopicDescriptions, KsqlConfig ksqlConfig, KafkaConsumerGroupClient consumerGroupClient) {
Set<String> registeredNames = getRegisteredKafkaTopicNames(ksqlTopics);
List<KafkaTopicInfo> kafkaTopicInfoList = new ArrayList<>();
kafkaTopicDescriptions = new TreeMap<>(filterKsqlInternalTopics(kafkaTopicDescriptions, ksqlConfig));
Map<String, List<Integer>> topicConsumersAndGroupCount = getTopicConsumerAndGroupCounts(consumerGroupClient);
for (TopicDescription desp : kafkaTopicDescriptions.values()) {
kafkaTopicInfoList.add(new KafkaTopicInfo(desp.name(), registeredNames.contains(desp.name()), desp.partitions().stream().map(partition -> partition.replicas().size()).collect(Collectors.toList()), topicConsumersAndGroupCount.getOrDefault(desp.name(), Arrays.asList(0, 0)).get(0), topicConsumersAndGroupCount.getOrDefault(desp.name(), Arrays.asList(0, 0)).get(1)));
}
return new KafkaTopicsList(statementText, kafkaTopicInfoList);
}
Aggregations