Search in sources :

Example 11 with ProducerMetadata

use of org.apache.kafka.clients.producer.internals.ProducerMetadata in project kafka by apache.

the class KafkaProducerTest method testInitTransactionTimeout.

@Test
public void testInitTransactionTimeout() {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "bad-transaction");
    configs.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 500);
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
    Time time = new MockTime(1);
    MetadataResponse initialUpdateResponse = RequestTestUtils.metadataUpdateWith(1, singletonMap("topic", 1));
    ProducerMetadata metadata = newMetadata(0, Long.MAX_VALUE);
    metadata.updateWithCurrentRequestVersion(initialUpdateResponse, false, time.milliseconds());
    MockClient client = new MockClient(time, metadata);
    try (Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), metadata, client, null, time)) {
        client.prepareResponse(request -> request instanceof FindCoordinatorRequest && ((FindCoordinatorRequest) request).data().keyType() == FindCoordinatorRequest.CoordinatorType.TRANSACTION.id(), FindCoordinatorResponse.prepareResponse(Errors.NONE, "bad-transaction", NODE));
        assertThrows(TimeoutException.class, producer::initTransactions);
        client.prepareResponse(request -> request instanceof FindCoordinatorRequest && ((FindCoordinatorRequest) request).data().keyType() == FindCoordinatorRequest.CoordinatorType.TRANSACTION.id(), FindCoordinatorResponse.prepareResponse(Errors.NONE, "bad-transaction", NODE));
        client.prepareResponse(initProducerIdResponse(1L, (short) 5, Errors.NONE));
        // retry initialization should work
        producer.initTransactions();
    }
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) FindCoordinatorRequest(org.apache.kafka.common.requests.FindCoordinatorRequest) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 12 with ProducerMetadata

use of org.apache.kafka.clients.producer.internals.ProducerMetadata in project kafka by apache.

the class KafkaProducerTest method testMetadataExpiry.

@ParameterizedTest
@ValueSource(booleans = { true, false })
public void testMetadataExpiry(boolean isIdempotenceEnabled) throws InterruptedException {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
    configs.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, isIdempotenceEnabled);
    ProducerMetadata metadata = mock(ProducerMetadata.class);
    when(metadata.fetch()).thenReturn(onePartitionCluster, emptyCluster, onePartitionCluster);
    KafkaProducer<String, String> producer = producerWithOverrideNewSender(configs, metadata);
    ProducerRecord<String, String> record = new ProducerRecord<>(topic, "value");
    producer.send(record);
    // Verify the topic's metadata isn't requested since it's already present.
    verify(metadata, times(0)).requestUpdateForTopic(topic);
    verify(metadata, times(0)).awaitUpdate(anyInt(), anyLong());
    verify(metadata, times(1)).fetch();
    // The metadata has been expired. Verify the producer requests the topic's metadata.
    producer.send(record, null);
    verify(metadata, times(1)).requestUpdateForTopic(topic);
    verify(metadata, times(1)).awaitUpdate(anyInt(), anyLong());
    verify(metadata, times(3)).fetch();
    producer.close(Duration.ofMillis(0));
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) ValueSource(org.junit.jupiter.params.provider.ValueSource) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 13 with ProducerMetadata

use of org.apache.kafka.clients.producer.internals.ProducerMetadata in project kafka by apache.

the class KafkaProducerTest method verifyInvalidGroupMetadata.

private void verifyInvalidGroupMetadata(ConsumerGroupMetadata groupMetadata) {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "some.id");
    configs.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000);
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
    Time time = new MockTime(1);
    MetadataResponse initialUpdateResponse = RequestTestUtils.metadataUpdateWith(1, singletonMap("topic", 1));
    ProducerMetadata metadata = newMetadata(0, Long.MAX_VALUE);
    MockClient client = new MockClient(time, metadata);
    client.updateMetadata(initialUpdateResponse);
    Node node = metadata.fetch().nodes().get(0);
    client.throttle(node, 5000);
    client.prepareResponse(FindCoordinatorResponse.prepareResponse(Errors.NONE, "some.id", NODE));
    client.prepareResponse(initProducerIdResponse(1L, (short) 5, Errors.NONE));
    try (Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), metadata, client, null, time)) {
        producer.initTransactions();
        producer.beginTransaction();
        assertThrows(IllegalArgumentException.class, () -> producer.sendOffsetsToTransaction(Collections.emptyMap(), groupMetadata));
    }
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) Node(org.apache.kafka.common.Node) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient)

Example 14 with ProducerMetadata

use of org.apache.kafka.clients.producer.internals.ProducerMetadata in project kafka by apache.

the class KafkaProducerTest method shouldCloseProperlyAndThrowIfInterrupted.

@Test
public void shouldCloseProperlyAndThrowIfInterrupted() throws Exception {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
    configs.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, MockPartitioner.class.getName());
    configs.put(ProducerConfig.BATCH_SIZE_CONFIG, "1");
    Time time = new MockTime();
    MetadataResponse initialUpdateResponse = RequestTestUtils.metadataUpdateWith(1, singletonMap("topic", 1));
    ProducerMetadata metadata = newMetadata(0, Long.MAX_VALUE);
    MockClient client = new MockClient(time, metadata);
    client.updateMetadata(initialUpdateResponse);
    final Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), metadata, client, null, time);
    ExecutorService executor = Executors.newSingleThreadExecutor();
    final AtomicReference<Exception> closeException = new AtomicReference<>();
    try {
        Future<?> future = executor.submit(() -> {
            producer.send(new ProducerRecord<>("topic", "key", "value"));
            try {
                producer.close();
                fail("Close should block and throw.");
            } catch (Exception e) {
                closeException.set(e);
            }
        });
        // Close producer should not complete until send succeeds
        try {
            future.get(100, TimeUnit.MILLISECONDS);
            fail("Close completed without waiting for send");
        } catch (java.util.concurrent.TimeoutException expected) {
        /* ignore */
        }
        // Ensure send has started
        client.waitForRequests(1, 1000);
        assertTrue(future.cancel(true), "Close terminated prematurely");
        TestUtils.waitForCondition(() -> closeException.get() != null, "InterruptException did not occur within timeout.");
        assertTrue(closeException.get() instanceof InterruptException, "Expected exception not thrown " + closeException);
    } finally {
        executor.shutdownNow();
    }
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) InterruptException(org.apache.kafka.common.errors.InterruptException) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) AtomicReference(java.util.concurrent.atomic.AtomicReference) KafkaException(org.apache.kafka.common.KafkaException) InvalidTopicException(org.apache.kafka.common.errors.InvalidTopicException) InterruptException(org.apache.kafka.common.errors.InterruptException) ExecutionException(java.util.concurrent.ExecutionException) RecordTooLargeException(org.apache.kafka.common.errors.RecordTooLargeException) TimeoutException(org.apache.kafka.common.errors.TimeoutException) ConfigException(org.apache.kafka.common.config.ConfigException) MockPartitioner(org.apache.kafka.test.MockPartitioner) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) ExecutorService(java.util.concurrent.ExecutorService) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 15 with ProducerMetadata

use of org.apache.kafka.clients.producer.internals.ProducerMetadata in project kafka by apache.

the class KafkaProducerTest method testCommitTransactionWithSendToInvalidTopic.

@Test
public void testCommitTransactionWithSendToInvalidTopic() throws Exception {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "some.id");
    configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000");
    configs.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, "15000");
    Time time = new MockTime();
    MetadataResponse initialUpdateResponse = RequestTestUtils.metadataUpdateWith(1, emptyMap());
    ProducerMetadata metadata = newMetadata(0, Long.MAX_VALUE);
    metadata.updateWithCurrentRequestVersion(initialUpdateResponse, false, time.milliseconds());
    MockClient client = new MockClient(time, metadata);
    client.prepareResponse(FindCoordinatorResponse.prepareResponse(Errors.NONE, "some.id", NODE));
    client.prepareResponse(initProducerIdResponse(1L, (short) 5, Errors.NONE));
    // Invalid topic name due to space
    String invalidTopicName = "topic abc";
    ProducerRecord<String, String> record = new ProducerRecord<>(invalidTopicName, "HelloKafka");
    List<MetadataResponse.TopicMetadata> topicMetadata = new ArrayList<>();
    topicMetadata.add(new MetadataResponse.TopicMetadata(Errors.INVALID_TOPIC_EXCEPTION, invalidTopicName, false, Collections.emptyList()));
    MetadataResponse updateResponse = RequestTestUtils.metadataResponse(new ArrayList<>(initialUpdateResponse.brokers()), initialUpdateResponse.clusterId(), initialUpdateResponse.controller().id(), topicMetadata);
    client.prepareMetadataUpdate(updateResponse);
    try (Producer<String, String> producer = kafkaProducer(configs, new StringSerializer(), new StringSerializer(), metadata, client, null, time)) {
        producer.initTransactions();
        producer.beginTransaction();
        TestUtils.assertFutureError(producer.send(record), InvalidTopicException.class);
        assertThrows(KafkaException.class, producer::commitTransaction);
    }
}
Also used : ProducerMetadata(org.apache.kafka.clients.producer.internals.ProducerMetadata) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) MockTime(org.apache.kafka.common.utils.MockTime) Time(org.apache.kafka.common.utils.Time) MetadataResponse(org.apache.kafka.common.requests.MetadataResponse) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MockTime(org.apache.kafka.common.utils.MockTime) MockClient(org.apache.kafka.clients.MockClient) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Aggregations

HashMap (java.util.HashMap)34 ProducerMetadata (org.apache.kafka.clients.producer.internals.ProducerMetadata)34 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)32 MockTime (org.apache.kafka.common.utils.MockTime)30 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)28 MockClient (org.apache.kafka.clients.MockClient)27 MetadataResponse (org.apache.kafka.common.requests.MetadataResponse)27 Test (org.junit.jupiter.api.Test)27 Time (org.apache.kafka.common.utils.Time)25 ExecutorService (java.util.concurrent.ExecutorService)8 ExecutionException (java.util.concurrent.ExecutionException)7 TimeoutException (org.apache.kafka.common.errors.TimeoutException)7 ValueSource (org.junit.jupiter.params.provider.ValueSource)7 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)6 Node (org.apache.kafka.common.Node)6 ArrayList (java.util.ArrayList)5 CountDownLatch (java.util.concurrent.CountDownLatch)5 ConsumerGroupMetadata (org.apache.kafka.clients.consumer.ConsumerGroupMetadata)5 KafkaException (org.apache.kafka.common.KafkaException)5 TopicPartition (org.apache.kafka.common.TopicPartition)5