Search in sources :

Example 1 with TopicDescription

use of org.apache.kafka.clients.admin.TopicDescription in project cruise-control by linkedin.

the class ExecutorTest method testBrokerDiesWhenMovePartitions.

@Test
public void testBrokerDiesWhenMovePartitions() throws Exception {
    ZkUtils zkUtils = KafkaCruiseControlUnitTestUtils.zkUtils(zookeeper().getConnectionString());
    Map<String, TopicDescription> topicDescriptions = createTopics();
    int initialLeader0 = topicDescriptions.get(TOPIC_0).partitions().get(0).leader().id();
    int initialLeader1 = topicDescriptions.get(TOPIC_1).partitions().get(0).leader().id();
    _brokers.get(initialLeader0 == 0 ? 1 : 0).shutdown();
    ExecutionProposal proposal0 = new ExecutionProposal(TP0, 0, initialLeader0, Collections.singletonList(initialLeader0), Collections.singletonList(initialLeader0 == 0 ? 1 : 0));
    ExecutionProposal proposal1 = new ExecutionProposal(TP1, 0, initialLeader1, Arrays.asList(initialLeader1, initialLeader1 == 0 ? 1 : 0), Arrays.asList(initialLeader1 == 0 ? 1 : 0, initialLeader1));
    Collection<ExecutionProposal> proposalsToExecute = Arrays.asList(proposal0, proposal1);
    executeAndVerifyProposals(zkUtils, proposalsToExecute, Collections.emptyList());
    // We are not doing the rollback.
    assertEquals(Collections.singletonList(initialLeader0 == 0 ? 1 : 0), ExecutorUtils.newAssignmentForPartition(zkUtils, TP0));
    assertEquals(initialLeader0, zkUtils.getLeaderForPartition(TOPIC_1, PARTITION).get());
}
Also used : TopicDescription(org.apache.kafka.clients.admin.TopicDescription) ZkUtils(kafka.utils.ZkUtils) Test(org.junit.Test)

Example 2 with TopicDescription

use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.

the class JoinNodeTest method setupTopicClientExpectations.

private void setupTopicClientExpectations(int streamPartitions, int tablePartitions) {
    Node node = new Node(0, "localhost", 9091);
    List<TopicPartitionInfo> streamPartitionInfoList = IntStream.range(0, streamPartitions).mapToObj(p -> new TopicPartitionInfo(p, node, Collections.emptyList(), Collections.emptyList())).collect(Collectors.toList());
    EasyMock.expect(topicClient.describeTopics(Arrays.asList("test1"))).andReturn(Collections.singletonMap("test1", new TopicDescription("test1", false, streamPartitionInfoList)));
    List<TopicPartitionInfo> tablePartitionInfoList = IntStream.range(0, tablePartitions).mapToObj(p -> new TopicPartitionInfo(p, node, Collections.emptyList(), Collections.emptyList())).collect(Collectors.toList());
    EasyMock.expect(topicClient.describeTopics(Arrays.asList("test2"))).andReturn(Collections.singletonMap("test2", new TopicDescription("test2", false, tablePartitionInfoList)));
    EasyMock.replay(topicClient);
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) SchemaKTable(io.confluent.ksql.structured.SchemaKTable) LogicalPlanBuilder(io.confluent.ksql.structured.LogicalPlanBuilder) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) EasyMock.mock(org.easymock.EasyMock.mock) HashMap(java.util.HashMap) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) Schema(org.apache.kafka.connect.data.Schema) TopologyDescription(org.apache.kafka.streams.TopologyDescription) Map(java.util.Map) MetaStore(io.confluent.ksql.metastore.MetaStore) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) LinkedList(java.util.LinkedList) Before(org.junit.Before) Utils(org.apache.kafka.common.utils.Utils) TopicPartitionInfo(org.apache.kafka.common.TopicPartitionInfo) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SOURCE_NODE(io.confluent.ksql.planner.plan.PlanTestUtil.SOURCE_NODE) Field(org.apache.kafka.connect.data.Field) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) Test(org.junit.Test) EasyMock(org.easymock.EasyMock) KsqlConfig(io.confluent.ksql.util.KsqlConfig) MetaStoreFixture(io.confluent.ksql.util.MetaStoreFixture) Collectors(java.util.stream.Collectors) List(java.util.List) MAPVALUES_NODE(io.confluent.ksql.planner.plan.PlanTestUtil.MAPVALUES_NODE) KsqlException(io.confluent.ksql.util.KsqlException) Node(org.apache.kafka.common.Node) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) Assert(org.junit.Assert) Collections(java.util.Collections) Topology(org.apache.kafka.streams.Topology) PlanTestUtil.getNodeByName(io.confluent.ksql.planner.plan.PlanTestUtil.getNodeByName) TopicPartitionInfo(org.apache.kafka.common.TopicPartitionInfo) Node(org.apache.kafka.common.Node) TopicDescription(org.apache.kafka.clients.admin.TopicDescription)

Example 3 with TopicDescription

use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.

the class KafkaTopicClientImplIntegrationTest method shouldCreateTopicWithConfig.

@Test
public void shouldCreateTopicWithConfig() {
    // Given:
    final String topicName = UUID.randomUUID().toString();
    final Map<String, String> config = ImmutableMap.of(TopicConfig.COMPRESSION_TYPE_CONFIG, "snappy");
    // When:
    client.createTopic(topicName, 2, (short) 1, config);
    // Then:
    assertThatEventually(() -> topicExists(topicName), is(true));
    final TopicDescription topicDescription = getTopicDescription(topicName);
    assertThat(topicDescription.partitions(), hasSize(2));
    assertThat(topicDescription.partitions().get(0).replicas(), hasSize(1));
    final Map<String, String> configs = client.getTopicConfig(topicName);
    assertThat(configs.get(TopicConfig.COMPRESSION_TYPE_CONFIG), is("snappy"));
}
Also used : TopicDescription(org.apache.kafka.clients.admin.TopicDescription) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 4 with TopicDescription

use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.

the class SourceDescription method getReplication.

private static int getReplication(KafkaTopicClient topicClient, String kafkaTopicName) {
    Map<String, TopicDescription> stringTopicDescriptionMap = topicClient.describeTopics(Arrays.asList(kafkaTopicName));
    TopicDescription topicDescription = stringTopicDescriptionMap.values().iterator().next();
    return topicDescription.partitions().iterator().next().replicas().size();
}
Also used : TopicDescription(org.apache.kafka.clients.admin.TopicDescription)

Example 5 with TopicDescription

use of org.apache.kafka.clients.admin.TopicDescription in project ksql by confluentinc.

the class KafkaTopicsList method build.

public static KafkaTopicsList build(String statementText, Collection<KsqlTopic> ksqlTopics, Map<String, TopicDescription> kafkaTopicDescriptions, KsqlConfig ksqlConfig, KafkaConsumerGroupClient consumerGroupClient) {
    Set<String> registeredNames = getRegisteredKafkaTopicNames(ksqlTopics);
    List<KafkaTopicInfo> kafkaTopicInfoList = new ArrayList<>();
    kafkaTopicDescriptions = new TreeMap<>(filterKsqlInternalTopics(kafkaTopicDescriptions, ksqlConfig));
    Map<String, List<Integer>> topicConsumersAndGroupCount = getTopicConsumerAndGroupCounts(consumerGroupClient);
    for (TopicDescription desp : kafkaTopicDescriptions.values()) {
        kafkaTopicInfoList.add(new KafkaTopicInfo(desp.name(), registeredNames.contains(desp.name()), desp.partitions().stream().map(partition -> partition.replicas().size()).collect(Collectors.toList()), topicConsumersAndGroupCount.getOrDefault(desp.name(), Arrays.asList(0, 0)).get(0), topicConsumersAndGroupCount.getOrDefault(desp.name(), Arrays.asList(0, 0)).get(1)));
    }
    return new KafkaTopicsList(statementText, kafkaTopicInfoList);
}
Also used : JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) TopicPartition(org.apache.kafka.common.TopicPartition) Arrays(java.util.Arrays) JsonSubTypes(com.fasterxml.jackson.annotation.JsonSubTypes) Collection(java.util.Collection) Set(java.util.Set) KafkaConsumerGroupClient(io.confluent.ksql.util.KafkaConsumerGroupClient) HashMap(java.util.HashMap) KsqlConfig(io.confluent.ksql.util.KsqlConfig) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Objects(java.util.Objects) JsonTypeName(com.fasterxml.jackson.annotation.JsonTypeName) List(java.util.List) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic) TreeMap(java.util.TreeMap) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) KafkaConsumerGroupClientImpl(io.confluent.ksql.util.KafkaConsumerGroupClientImpl) Map(java.util.Map) JsonCreator(com.fasterxml.jackson.annotation.JsonCreator) Preconditions(com.google.common.base.Preconditions) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) KsqlConstants(io.confluent.ksql.util.KsqlConstants) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) TopicDescription(org.apache.kafka.clients.admin.TopicDescription)

Aggregations

TopicDescription (org.apache.kafka.clients.admin.TopicDescription)22 Test (org.junit.Test)10 TopicPartitionInfo (org.apache.kafka.common.TopicPartitionInfo)9 Node (org.apache.kafka.common.Node)8 Map (java.util.Map)6 HashMap (java.util.HashMap)4 AdminClient (org.apache.kafka.clients.admin.AdminClient)4 KsqlConfig (io.confluent.ksql.util.KsqlConfig)3 ArrayList (java.util.ArrayList)3 Set (java.util.Set)3 ExecutionException (java.util.concurrent.ExecutionException)3 DescribeTopicsResult (org.apache.kafka.clients.admin.DescribeTopicsResult)3 NewTopic (org.apache.kafka.clients.admin.NewTopic)3 KafkaFuture (org.apache.kafka.common.KafkaFuture)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)2 KafkaConsumerGroupClient (io.confluent.ksql.util.KafkaConsumerGroupClient)2 KafkaConsumerGroupClientImpl (io.confluent.ksql.util.KafkaConsumerGroupClientImpl)2 Arrays (java.util.Arrays)2 Collections (java.util.Collections)2 List (java.util.List)2