Search in sources :

Example 36 with ByteArrayDeserializer

use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class KafkaConsumerTest method testOsDefaultSocketBufferSizes.

@Test
public void testOsDefaultSocketBufferSizes() throws Exception {
    Map<String, Object> config = new HashMap<>();
    config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999");
    config.put(ConsumerConfig.SEND_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE);
    config.put(ConsumerConfig.RECEIVE_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE);
    KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer());
    consumer.close();
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Test(org.junit.Test)

Example 37 with ByteArrayDeserializer

use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenDeleted.

@Test
public void testRegexMatchesTopicsAWhenDeleted() throws Exception {
    final Serde<String> stringSerde = Serdes.String();
    final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-A", "TEST-TOPIC-B");
    final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-B");
    CLUSTER.createTopics("TEST-TOPIC-A", "TEST-TOPIC-B");
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-[A-Z]"));
    pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final List<String> assignedTopics = new ArrayList<>();
    streams = new KafkaStreams(builder.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {

        @Override
        public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
            return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {

                @Override
                public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
                    super.subscribe(topics, new TheConsumerRebalanceListener(assignedTopics, listener));
                }
            };
        }
    });
    streams.start();
    TestUtils.waitForCondition(new TestCondition() {

        @Override
        public boolean conditionMet() {
            return assignedTopics.equals(expectedFirstAssignment);
        }
    }, STREAM_TASKS_NOT_UPDATED);
    CLUSTER.deleteTopic("TEST-TOPIC-A");
    TestUtils.waitForCondition(new TestCondition() {

        @Override
        public boolean conditionMet() {
            return assignedTopics.equals(expectedSecondAssignment);
        }
    }, STREAM_TASKS_NOT_UPDATED);
}
Also used : Pattern(java.util.regex.Pattern) KafkaStreams(org.apache.kafka.streams.KafkaStreams) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) ArrayList(java.util.ArrayList) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestCondition(org.apache.kafka.test.TestCondition) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Map(java.util.Map) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 38 with ByteArrayDeserializer

use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenCreated.

@Test
public void testRegexMatchesTopicsAWhenCreated() throws Exception {
    final Serde<String> stringSerde = Serdes.String();
    final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-1");
    final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-1", "TEST-TOPIC-2");
    final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
    CLUSTER.createTopic("TEST-TOPIC-1");
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-\\d"));
    pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final List<String> assignedTopics = new ArrayList<>();
    streams = new KafkaStreams(builder.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {

        @Override
        public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
            return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {

                @Override
                public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
                    super.subscribe(topics, new TheConsumerRebalanceListener(assignedTopics, listener));
                }
            };
        }
    });
    streams.start();
    TestUtils.waitForCondition(new TestCondition() {

        @Override
        public boolean conditionMet() {
            return assignedTopics.equals(expectedFirstAssignment);
        }
    }, STREAM_TASKS_NOT_UPDATED);
    CLUSTER.createTopic("TEST-TOPIC-2");
    TestUtils.waitForCondition(new TestCondition() {

        @Override
        public boolean conditionMet() {
            return assignedTopics.equals(expectedSecondAssignment);
        }
    }, STREAM_TASKS_NOT_UPDATED);
}
Also used : Pattern(java.util.regex.Pattern) KafkaStreams(org.apache.kafka.streams.KafkaStreams) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) ArrayList(java.util.ArrayList) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestCondition(org.apache.kafka.test.TestCondition) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Map(java.util.Map) StreamsConfig(org.apache.kafka.streams.StreamsConfig) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 39 with ByteArrayDeserializer

use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class RegexSourceIntegrationTest method testMultipleConsumersCanReadFromPartitionedTopic.

@Test
public void testMultipleConsumersCanReadFromPartitionedTopic() throws Exception {
    KafkaStreams partitionedStreamsLeader = null;
    KafkaStreams partitionedStreamsFollower = null;
    try {
        final Serde<String> stringSerde = Serdes.String();
        final StreamsBuilder builderLeader = new StreamsBuilder();
        final StreamsBuilder builderFollower = new StreamsBuilder();
        final List<String> expectedAssignment = Arrays.asList(PARTITIONED_TOPIC_1, PARTITIONED_TOPIC_2);
        final KStream<String, String> partitionedStreamLeader = builderLeader.stream(Pattern.compile("partitioned-\\d"));
        final KStream<String, String> partitionedStreamFollower = builderFollower.stream(Pattern.compile("partitioned-\\d"));
        partitionedStreamLeader.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
        partitionedStreamFollower.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
        final List<String> leaderAssignment = new ArrayList<>();
        final List<String> followerAssignment = new ArrayList<>();
        partitionedStreamsLeader = new KafkaStreams(builderLeader.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {

            @Override
            public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
                return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {

                    @Override
                    public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
                        super.subscribe(topics, new TheConsumerRebalanceListener(leaderAssignment, listener));
                    }
                };
            }
        });
        partitionedStreamsFollower = new KafkaStreams(builderFollower.build(), streamsConfiguration, new DefaultKafkaClientSupplier() {

            @Override
            public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {
                return new KafkaConsumer<byte[], byte[]>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()) {

                    @Override
                    public void subscribe(final Pattern topics, final ConsumerRebalanceListener listener) {
                        super.subscribe(topics, new TheConsumerRebalanceListener(followerAssignment, listener));
                    }
                };
            }
        });
        partitionedStreamsLeader.start();
        partitionedStreamsFollower.start();
        TestUtils.waitForCondition(new TestCondition() {

            @Override
            public boolean conditionMet() {
                return followerAssignment.equals(expectedAssignment) && leaderAssignment.equals(expectedAssignment);
            }
        }, "topic assignment not completed");
    } finally {
        if (partitionedStreamsLeader != null) {
            partitionedStreamsLeader.close();
        }
        if (partitionedStreamsFollower != null) {
            partitionedStreamsFollower.close();
        }
    }
}
Also used : Pattern(java.util.regex.Pattern) KafkaStreams(org.apache.kafka.streams.KafkaStreams) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) ArrayList(java.util.ArrayList) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestCondition(org.apache.kafka.test.TestCondition) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Map(java.util.Map) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 40 with ByteArrayDeserializer

use of org.apache.kafka.common.serialization.ByteArrayDeserializer in project apache-kafka-on-k8s by banzaicloud.

the class StreamsResetter method maybeResetInputAndSeekToEndIntermediateTopicOffsets.

private int maybeResetInputAndSeekToEndIntermediateTopicOffsets(final Map consumerConfig, final boolean dryRun) throws Exception {
    final List<String> inputTopics = options.valuesOf(inputTopicsOption);
    final List<String> intermediateTopics = options.valuesOf(intermediateTopicsOption);
    int topicNotFound = EXIT_CODE_SUCCESS;
    final List<String> notFoundInputTopics = new ArrayList<>();
    final List<String> notFoundIntermediateTopics = new ArrayList<>();
    final String groupId = options.valueOf(applicationIdOption);
    if (inputTopics.size() == 0 && intermediateTopics.size() == 0) {
        System.out.println("No input or intermediate topics specified. Skipping seek.");
        return EXIT_CODE_SUCCESS;
    }
    if (inputTopics.size() != 0) {
        System.out.println("Reset-offsets for input topics " + inputTopics);
    }
    if (intermediateTopics.size() != 0) {
        System.out.println("Seek-to-end for intermediate topics " + intermediateTopics);
    }
    final Set<String> topicsToSubscribe = new HashSet<>(inputTopics.size() + intermediateTopics.size());
    for (final String topic : inputTopics) {
        if (!allTopics.contains(topic)) {
            notFoundInputTopics.add(topic);
        } else {
            topicsToSubscribe.add(topic);
        }
    }
    for (final String topic : intermediateTopics) {
        if (!allTopics.contains(topic)) {
            notFoundIntermediateTopics.add(topic);
        } else {
            topicsToSubscribe.add(topic);
        }
    }
    if (!notFoundInputTopics.isEmpty()) {
        System.out.println("Following input topics are not found, skipping them");
        for (final String topic : notFoundInputTopics) {
            System.out.println("Topic: " + topic);
        }
        topicNotFound = EXIT_CODE_ERROR;
    }
    if (!notFoundIntermediateTopics.isEmpty()) {
        System.out.println("Following intermediate topics are not found, skipping them");
        for (final String topic : notFoundIntermediateTopics) {
            System.out.println("Topic:" + topic);
        }
        topicNotFound = EXIT_CODE_ERROR;
    }
    // try to poll with an empty subscription)
    if (topicsToSubscribe.isEmpty()) {
        return topicNotFound;
    }
    final Properties config = new Properties();
    config.putAll(consumerConfig);
    config.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    config.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    try (final KafkaConsumer<byte[], byte[]> client = new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer())) {
        client.subscribe(topicsToSubscribe);
        client.poll(1);
        final Set<TopicPartition> partitions = client.assignment();
        final Set<TopicPartition> inputTopicPartitions = new HashSet<>();
        final Set<TopicPartition> intermediateTopicPartitions = new HashSet<>();
        for (final TopicPartition p : partitions) {
            final String topic = p.topic();
            if (isInputTopic(topic)) {
                inputTopicPartitions.add(p);
            } else if (isIntermediateTopic(topic)) {
                intermediateTopicPartitions.add(p);
            } else {
                System.err.println("Skipping invalid partition: " + p);
            }
        }
        maybeReset(groupId, client, inputTopicPartitions);
        maybeSeekToEnd(groupId, client, intermediateTopicPartitions);
        if (!dryRun) {
            for (final TopicPartition p : partitions) {
                client.position(p);
            }
            client.commitSync();
        }
    } catch (final Exception e) {
        System.err.println("ERROR: Resetting offsets failed.");
        throw e;
    }
    System.out.println("Done.");
    return topicNotFound;
}
Also used : ArrayList(java.util.ArrayList) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Properties(java.util.Properties) DatatypeConfigurationException(javax.xml.datatype.DatatypeConfigurationException) OptionException(joptsimple.OptionException) ParseException(java.text.ParseException) IOException(java.io.IOException) TopicPartition(org.apache.kafka.common.TopicPartition) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) HashSet(java.util.HashSet)

Aggregations

ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)59 TopicPartition (org.apache.kafka.common.TopicPartition)24 ArrayList (java.util.ArrayList)22 Test (org.junit.Test)22 Test (org.junit.jupiter.api.Test)22 List (java.util.List)17 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)17 HashMap (java.util.HashMap)16 ByteBuffer (java.nio.ByteBuffer)14 LinkedHashMap (java.util.LinkedHashMap)14 MemoryRecords (org.apache.kafka.common.record.MemoryRecords)14 SimpleRecord (org.apache.kafka.common.record.SimpleRecord)14 HashSet (java.util.HashSet)10 Properties (java.util.Properties)10 Metrics (org.apache.kafka.common.metrics.Metrics)10 Arrays.asList (java.util.Arrays.asList)9 Collections.emptyList (java.util.Collections.emptyList)9 Collections.singletonList (java.util.Collections.singletonList)9 Map (java.util.Map)9 ConsumerRebalanceListener (org.apache.kafka.clients.consumer.ConsumerRebalanceListener)7