Search in sources :

Example 91 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class KafkaConsumerTest method newConsumer.

private KafkaConsumer<String, String> newConsumer(Time time, KafkaClient client, Metadata metadata, PartitionAssignor assignor, OffsetResetStrategy resetStrategy, boolean autoCommitEnabled) {
    String clientId = "mock-consumer";
    String groupId = "mock-group";
    String metricGroupPrefix = "consumer";
    long retryBackoffMs = 100;
    long requestTimeoutMs = 30000;
    boolean excludeInternalTopics = true;
    int minBytes = 1;
    int maxBytes = Integer.MAX_VALUE;
    int maxWaitMs = 500;
    int fetchSize = 1024 * 1024;
    int maxPollRecords = Integer.MAX_VALUE;
    boolean checkCrcs = true;
    int rebalanceTimeoutMs = 60000;
    Deserializer<String> keyDeserializer = new StringDeserializer();
    Deserializer<String> valueDeserializer = new StringDeserializer();
    List<PartitionAssignor> assignors = singletonList(assignor);
    ConsumerInterceptors<String, String> interceptors = new ConsumerInterceptors<>(Collections.<ConsumerInterceptor<String, String>>emptyList());
    Metrics metrics = new Metrics();
    ConsumerMetrics metricsRegistry = new ConsumerMetrics(metricGroupPrefix);
    SubscriptionState subscriptions = new SubscriptionState(resetStrategy);
    LogContext loggerFactory = new LogContext();
    ConsumerNetworkClient consumerClient = new ConsumerNetworkClient(loggerFactory, client, metadata, time, retryBackoffMs, requestTimeoutMs, heartbeatIntervalMs);
    ConsumerCoordinator consumerCoordinator = new ConsumerCoordinator(loggerFactory, consumerClient, groupId, rebalanceTimeoutMs, sessionTimeoutMs, heartbeatIntervalMs, assignors, metadata, subscriptions, metrics, metricGroupPrefix, time, retryBackoffMs, autoCommitEnabled, autoCommitIntervalMs, interceptors, excludeInternalTopics, true);
    Fetcher<String, String> fetcher = new Fetcher<>(loggerFactory, consumerClient, minBytes, maxBytes, maxWaitMs, fetchSize, maxPollRecords, checkCrcs, keyDeserializer, valueDeserializer, metadata, subscriptions, metrics, metricsRegistry.fetcherMetrics, time, retryBackoffMs, requestTimeoutMs, IsolationLevel.READ_UNCOMMITTED);
    return new KafkaConsumer<>(loggerFactory, clientId, consumerCoordinator, keyDeserializer, valueDeserializer, fetcher, interceptors, time, consumerClient, metrics, subscriptions, metadata, retryBackoffMs, requestTimeoutMs, assignors);
}
Also used : ConsumerInterceptors(org.apache.kafka.clients.consumer.internals.ConsumerInterceptors) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ConsumerCoordinator(org.apache.kafka.clients.consumer.internals.ConsumerCoordinator) LogContext(org.apache.kafka.common.utils.LogContext) ConsumerMetrics(org.apache.kafka.clients.consumer.internals.ConsumerMetrics) Metrics(org.apache.kafka.common.metrics.Metrics) ConsumerNetworkClient(org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient) SubscriptionState(org.apache.kafka.clients.consumer.internals.SubscriptionState) Fetcher(org.apache.kafka.clients.consumer.internals.Fetcher) PartitionAssignor(org.apache.kafka.clients.consumer.internals.PartitionAssignor) ConsumerMetrics(org.apache.kafka.clients.consumer.internals.ConsumerMetrics)

Example 92 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class EosTestDriver method verifyMax.

private static void verifyMax(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> maxPerTopicPerPartition) {
    final StringDeserializer stringDeserializer = new StringDeserializer();
    final IntegerDeserializer integerDeserializer = new IntegerDeserializer();
    final HashMap<String, Integer> currentMinPerKey = new HashMap<>();
    for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : maxPerTopicPerPartition.entrySet()) {
        final TopicPartition inputTopicPartition = new TopicPartition("repartition", partitionRecords.getKey().partition());
        final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition);
        final List<ConsumerRecord<byte[], byte[]>> partitionMax = partitionRecords.getValue();
        if (partitionInput.size() != partitionMax.size()) {
            throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionMax.size());
        }
        final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator();
        for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionMax) {
            final ConsumerRecord<byte[], byte[]> input = inputRecords.next();
            final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
            final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
            final String key = stringDeserializer.deserialize(input.topic(), input.key());
            final int value = integerDeserializer.deserialize(input.topic(), input.value());
            Integer max = currentMinPerKey.get(key);
            if (max == null) {
                max = Integer.MIN_VALUE;
            }
            max = Math.max(max, value);
            currentMinPerKey.put(key, max);
            if (!receivedKey.equals(key) || receivedValue != max.intValue()) {
                throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + max + "> but was <" + receivedKey + "," + receivedValue + ">");
            }
        }
    }
}
Also used : IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) TopicPartition(org.apache.kafka.common.TopicPartition) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map)

Example 93 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class EosTestDriver method verifyCnt.

private static void verifyCnt(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> cntPerTopicPerPartition) {
    final StringDeserializer stringDeserializer = new StringDeserializer();
    final LongDeserializer longDeserializer = new LongDeserializer();
    final HashMap<String, Long> currentSumPerKey = new HashMap<>();
    for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : cntPerTopicPerPartition.entrySet()) {
        final TopicPartition inputTopicPartition = new TopicPartition("repartition", partitionRecords.getKey().partition());
        final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition);
        final List<ConsumerRecord<byte[], byte[]>> partitionCnt = partitionRecords.getValue();
        if (partitionInput.size() != partitionCnt.size()) {
            throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionCnt.size());
        }
        final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator();
        for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionCnt) {
            final ConsumerRecord<byte[], byte[]> input = inputRecords.next();
            final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
            final long receivedValue = longDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
            final String key = stringDeserializer.deserialize(input.topic(), input.key());
            Long cnt = currentSumPerKey.get(key);
            if (cnt == null) {
                cnt = 0L;
            }
            currentSumPerKey.put(key, ++cnt);
            if (!receivedKey.equals(key) || receivedValue != cnt.longValue()) {
                throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + cnt + "> but was <" + receivedKey + "," + receivedValue + ">");
            }
        }
    }
}
Also used : HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) TopicPartition(org.apache.kafka.common.TopicPartition) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map)

Example 94 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class EosTestDriver method verifyReceivedAllRecords.

private static void verifyReceivedAllRecords(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> expectedRecords, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> receivedRecords) {
    if (expectedRecords.size() != receivedRecords.size()) {
        throw new RuntimeException("Result verification failed. Received " + receivedRecords.size() + " records but expected " + expectedRecords.size());
    }
    final StringDeserializer stringDeserializer = new StringDeserializer();
    final IntegerDeserializer integerDeserializer = new IntegerDeserializer();
    for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : receivedRecords.entrySet()) {
        final TopicPartition inputTopicPartition = new TopicPartition("data", partitionRecords.getKey().partition());
        final Iterator<ConsumerRecord<byte[], byte[]>> expectedRecord = expectedRecords.get(inputTopicPartition).iterator();
        for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionRecords.getValue()) {
            final ConsumerRecord<byte[], byte[]> expected = expectedRecord.next();
            final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
            final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
            final String expectedKey = stringDeserializer.deserialize(expected.topic(), expected.key());
            final int expectedValue = integerDeserializer.deserialize(expected.topic(), expected.value());
            if (!receivedKey.equals(expectedKey) || receivedValue != expectedValue) {
                throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + expectedKey + "," + expectedValue + "> but was <" + receivedKey + "," + receivedValue + ">");
            }
        }
    }
}
Also used : IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopicPartition(org.apache.kafka.common.TopicPartition) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord)

Example 95 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class EosTestDriver method verifyAllTransactionFinished.

private static void verifyAllTransactionFinished(final KafkaConsumer<byte[], byte[]> consumer, final String kafka, final boolean withRepartitioning) {
    final String[] topics;
    if (withRepartitioning) {
        topics = new String[] { "echo", "min", "sum", "repartition", "max", "cnt" };
    } else {
        topics = new String[] { "echo", "min", "sum" };
    }
    final List<TopicPartition> partitions = getAllPartitions(consumer, topics);
    consumer.assign(partitions);
    consumer.seekToEnd(partitions);
    for (final TopicPartition tp : partitions) {
        System.out.println(tp + " at position " + consumer.position(tp));
    }
    final Properties producerProps = new Properties();
    producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, "VerifyProducer");
    producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
    producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    producerProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
    try (final KafkaProducer<String, String> producer = new KafkaProducer<>(producerProps)) {
        for (final TopicPartition tp : partitions) {
            final ProducerRecord<String, String> record = new ProducerRecord<>(tp.topic(), tp.partition(), "key", "value");
            producer.send(record, new Callback() {

                @Override
                public void onCompletion(final RecordMetadata metadata, final Exception exception) {
                    if (exception != null) {
                        exception.printStackTrace(System.err);
                        System.err.flush();
                        Exit.exit(1);
                    }
                }
            });
        }
    }
    final StringDeserializer stringDeserializer = new StringDeserializer();
    long maxWaitTime = System.currentTimeMillis() + MAX_IDLE_TIME_MS;
    while (!partitions.isEmpty() && System.currentTimeMillis() < maxWaitTime) {
        final ConsumerRecords<byte[], byte[]> records = consumer.poll(100);
        if (records.isEmpty()) {
            System.out.println("No data received.");
            for (final TopicPartition tp : partitions) {
                System.out.println(tp + " at position " + consumer.position(tp));
            }
        }
        for (final ConsumerRecord<byte[], byte[]> record : records) {
            maxWaitTime = System.currentTimeMillis() + MAX_IDLE_TIME_MS;
            final String topic = record.topic();
            final TopicPartition tp = new TopicPartition(topic, record.partition());
            try {
                final String key = stringDeserializer.deserialize(topic, record.key());
                final String value = stringDeserializer.deserialize(topic, record.value());
                if (!("key".equals(key) && "value".equals(value) && partitions.remove(tp))) {
                    throw new RuntimeException("Post transactions verification failed. Received unexpected verification record: " + "Expected record <'key','value'> from one of " + partitions + " but got" + " <" + key + "," + value + "> [" + record.topic() + ", " + record.partition() + "]");
                } else {
                    System.out.println("Verifying " + tp + " successful.");
                }
            } catch (final SerializationException e) {
                throw new RuntimeException("Post transactions verification failed. Received unexpected verification record: " + "Expected record <'key','value'> from one of " + partitions + " but got " + record, e);
            }
        }
    }
    if (!partitions.isEmpty()) {
        throw new RuntimeException("Could not read all verification records. Did not receive any new record within the last " + (MAX_IDLE_TIME_MS / 1000) + " sec.");
    }
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) SerializationException(org.apache.kafka.common.errors.SerializationException) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Properties(java.util.Properties) SerializationException(org.apache.kafka.common.errors.SerializationException) TimeoutException(org.apache.kafka.common.errors.TimeoutException) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) Callback(org.apache.kafka.clients.producer.Callback) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord)

Aggregations

StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)152 Test (org.junit.Test)91 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)59 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)46 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)35 HashMap (java.util.HashMap)33 Properties (java.util.Properties)32 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)31 Windowed (org.apache.kafka.streams.kstream.Windowed)31 List (java.util.List)29 KeyValue (org.apache.kafka.streams.KeyValue)29 IntegrationTest (org.apache.kafka.test.IntegrationTest)27 ArrayList (java.util.ArrayList)26 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)25 Map (java.util.Map)20 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)20 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)17 Serdes (org.apache.kafka.common.serialization.Serdes)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)17 KStream (org.apache.kafka.streams.kstream.KStream)17