Search in sources :

Example 16 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class ClientAuthenticationFailureTest method testConsumerWithInvalidCredentials.

@Test
public void testConsumerWithInvalidCredentials() {
    Map<String, Object> props = new HashMap<>(saslClientConfigs);
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + server.port());
    StringDeserializer deserializer = new StringDeserializer();
    try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props, deserializer, deserializer)) {
        consumer.subscribe(Arrays.asList(topic));
        consumer.poll(100);
        fail("Expected an authentication error!");
    } catch (SaslAuthenticationException e) {
    // OK
    } catch (Exception e) {
        fail("Expected only an authentication error, but another error occurred: " + e.getMessage());
    }
}
Also used : HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) SaslAuthenticationException(org.apache.kafka.common.errors.SaslAuthenticationException) SaslAuthenticationException(org.apache.kafka.common.errors.SaslAuthenticationException) Test(org.junit.Test)

Example 17 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class KafkaConsumerTest method testInterceptorConstructorClose.

@Test
public void testInterceptorConstructorClose() throws Exception {
    try {
        Properties props = new Properties();
        // test with client ID assigned by KafkaConsumer
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
        props.setProperty(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, MockConsumerInterceptor.class.getName());
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props, new StringDeserializer(), new StringDeserializer());
        assertEquals(1, MockConsumerInterceptor.INIT_COUNT.get());
        assertEquals(0, MockConsumerInterceptor.CLOSE_COUNT.get());
        consumer.close();
        assertEquals(1, MockConsumerInterceptor.INIT_COUNT.get());
        assertEquals(1, MockConsumerInterceptor.CLOSE_COUNT.get());
        // Cluster metadata will only be updated on calling poll.
        Assert.assertNull(MockConsumerInterceptor.CLUSTER_META.get());
    } finally {
        // cleanup since we are using mutable static variables in MockConsumerInterceptor
        MockConsumerInterceptor.resetCounters();
    }
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Properties(java.util.Properties) MockConsumerInterceptor(org.apache.kafka.test.MockConsumerInterceptor) Test(org.junit.Test)

Example 18 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class EosTestDriver method verifyMin.

private static void verifyMin(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> minPerTopicPerPartition) {
    final StringDeserializer stringDeserializer = new StringDeserializer();
    final IntegerDeserializer integerDeserializer = new IntegerDeserializer();
    final HashMap<String, Integer> currentMinPerKey = new HashMap<>();
    for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : minPerTopicPerPartition.entrySet()) {
        final TopicPartition inputTopicPartition = new TopicPartition("data", partitionRecords.getKey().partition());
        final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition);
        final List<ConsumerRecord<byte[], byte[]>> partitionMin = partitionRecords.getValue();
        if (partitionInput.size() != partitionMin.size()) {
            throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionMin.size());
        }
        final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator();
        for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionMin) {
            final ConsumerRecord<byte[], byte[]> input = inputRecords.next();
            final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
            final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
            final String key = stringDeserializer.deserialize(input.topic(), input.key());
            final int value = integerDeserializer.deserialize(input.topic(), input.value());
            Integer min = currentMinPerKey.get(key);
            if (min == null) {
                min = value;
            } else {
                min = Math.min(min, value);
            }
            currentMinPerKey.put(key, min);
            if (!receivedKey.equals(key) || receivedValue != min) {
                throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + min + "> but was <" + receivedKey + "," + receivedValue + ">");
            }
        }
    }
}
Also used : IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) TopicPartition(org.apache.kafka.common.TopicPartition) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map)

Example 19 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testReadCommittedAbortMarkerWithNoData.

@Test
public void testReadCommittedAbortMarkerWithNoData() {
    Fetcher<String, String> fetcher = createFetcher(subscriptions, new Metrics(), new StringDeserializer(), new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    long producerId = 1L;
    abortTransaction(buffer, producerId, 5L);
    appendTransactionalRecords(buffer, producerId, 6L, new SimpleRecord("6".getBytes(), null), new SimpleRecord("7".getBytes(), null), new SimpleRecord("8".getBytes(), null));
    commitTransaction(buffer, producerId, 9L);
    buffer.flip();
    // send the fetch
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    assertEquals(1, fetcher.sendFetches());
    // prepare the response. the aborted transactions begin at offsets which are no longer in the log
    List<FetchResponse.AbortedTransaction> abortedTransactions = new ArrayList<>();
    abortedTransactions.add(new FetchResponse.AbortedTransaction(producerId, 0L));
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer), abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(0);
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetcher.fetchedRecords();
    assertTrue(allFetchedRecords.containsKey(tp0));
    List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0);
    assertEquals(3, fetchedRecords.size());
    assertEquals(Arrays.asList(6L, 7L, 8L), collectRecordOffsets(fetchedRecords));
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ArrayList(java.util.ArrayList) FetchResponse(org.apache.kafka.common.requests.FetchResponse) ByteBuffer(java.nio.ByteBuffer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Metrics(org.apache.kafka.common.metrics.Metrics) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) List(java.util.List) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 20 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testReadCommittedWithCompactedTopic.

@Test
public void testReadCommittedWithCompactedTopic() {
    Fetcher<String, String> fetcher = createFetcher(subscriptions, new Metrics(), new StringDeserializer(), new StringDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    long pid1 = 1L;
    long pid2 = 2L;
    long pid3 = 3L;
    appendTransactionalRecords(buffer, pid3, 3L, new SimpleRecord("3".getBytes(), "value".getBytes()), new SimpleRecord("4".getBytes(), "value".getBytes()));
    appendTransactionalRecords(buffer, pid2, 15L, new SimpleRecord("15".getBytes(), "value".getBytes()), new SimpleRecord("16".getBytes(), "value".getBytes()), new SimpleRecord("17".getBytes(), "value".getBytes()));
    appendTransactionalRecords(buffer, pid1, 22L, new SimpleRecord("22".getBytes(), "value".getBytes()), new SimpleRecord("23".getBytes(), "value".getBytes()));
    abortTransaction(buffer, pid2, 28L);
    appendTransactionalRecords(buffer, pid3, 30L, new SimpleRecord("30".getBytes(), "value".getBytes()), new SimpleRecord("31".getBytes(), "value".getBytes()), new SimpleRecord("32".getBytes(), "value".getBytes()));
    commitTransaction(buffer, pid3, 35L);
    appendTransactionalRecords(buffer, pid1, 39L, new SimpleRecord("39".getBytes(), "value".getBytes()), new SimpleRecord("40".getBytes(), "value".getBytes()));
    // transaction from pid1 is aborted, but the marker is not included in the fetch
    buffer.flip();
    // send the fetch
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    assertEquals(1, fetcher.sendFetches());
    // prepare the response. the aborted transactions begin at offsets which are no longer in the log
    List<FetchResponse.AbortedTransaction> abortedTransactions = new ArrayList<>();
    abortedTransactions.add(new FetchResponse.AbortedTransaction(pid2, 6L));
    abortedTransactions.add(new FetchResponse.AbortedTransaction(pid1, 0L));
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(MemoryRecords.readableRecords(buffer), abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(0);
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<String, String>>> allFetchedRecords = fetcher.fetchedRecords();
    assertTrue(allFetchedRecords.containsKey(tp0));
    List<ConsumerRecord<String, String>> fetchedRecords = allFetchedRecords.get(tp0);
    assertEquals(5, fetchedRecords.size());
    assertEquals(Arrays.asList(3L, 4L, 30L, 31L, 32L), collectRecordOffsets(fetchedRecords));
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ArrayList(java.util.ArrayList) FetchResponse(org.apache.kafka.common.requests.FetchResponse) ByteBuffer(java.nio.ByteBuffer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Metrics(org.apache.kafka.common.metrics.Metrics) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) List(java.util.List) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Aggregations

StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)152 Test (org.junit.Test)91 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)59 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)46 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)35 HashMap (java.util.HashMap)33 Properties (java.util.Properties)32 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)31 Windowed (org.apache.kafka.streams.kstream.Windowed)31 List (java.util.List)29 KeyValue (org.apache.kafka.streams.KeyValue)29 IntegrationTest (org.apache.kafka.test.IntegrationTest)27 ArrayList (java.util.ArrayList)26 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)25 Map (java.util.Map)20 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)20 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)17 Serdes (org.apache.kafka.common.serialization.Serdes)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)17 KStream (org.apache.kafka.streams.kstream.KStream)17