Search in sources :

Example 1 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project apache-kafka-on-k8s by banzaicloud.

the class RestoreIntegrationTest method createStateForRestoration.

private void createStateForRestoration() throws ExecutionException, InterruptedException {
    final Properties producerConfig = new Properties();
    producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    try (final KafkaProducer<Integer, Integer> producer = new KafkaProducer<>(producerConfig, new IntegerSerializer(), new IntegerSerializer())) {
        for (int i = 0; i < numberOfKeys; i++) {
            producer.send(new ProducerRecord<>(INPUT_STREAM, i, i));
        }
    }
    final Properties consumerConfig = new Properties();
    consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, applicationId);
    consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
    consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
    final Consumer consumer = new KafkaConsumer(consumerConfig);
    final List<TopicPartition> partitions = Arrays.asList(new TopicPartition(INPUT_STREAM, 0), new TopicPartition(INPUT_STREAM, 1));
    consumer.assign(partitions);
    consumer.seekToEnd(partitions);
    final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
    for (TopicPartition partition : partitions) {
        final long position = consumer.position(partition);
        offsets.put(partition, new OffsetAndMetadata(position + 1));
    }
    consumer.commitSync(offsets);
    consumer.close();
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) HashMap(java.util.HashMap) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Consumer(org.apache.kafka.clients.consumer.Consumer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) TopicPartition(org.apache.kafka.common.TopicPartition) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata)

Example 2 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project apache-kafka-on-k8s by banzaicloud.

the class GlobalStateTaskTest method shouldNotThrowStreamsExceptionWhenValueDeserializationFails.

@Test
public void shouldNotThrowStreamsExceptionWhenValueDeserializationFails() throws Exception {
    final GlobalStateUpdateTask globalStateTask2 = new GlobalStateUpdateTask(topology, context, stateMgr, new LogAndContinueExceptionHandler(), logContext);
    final byte[] key = new IntegerSerializer().serialize(topic2, 1);
    final byte[] recordValue = new LongSerializer().serialize(topic2, 10L);
    maybeDeserialize(globalStateTask2, key, recordValue, false);
}
Also used : LongSerializer(org.apache.kafka.common.serialization.LongSerializer) LogAndContinueExceptionHandler(org.apache.kafka.streams.errors.LogAndContinueExceptionHandler) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Test(org.junit.Test)

Example 3 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project apache-kafka-on-k8s by banzaicloud.

the class GlobalStateTaskTest method shouldThrowStreamsExceptionWhenValueDeserializationFails.

@Test
public void shouldThrowStreamsExceptionWhenValueDeserializationFails() throws Exception {
    final byte[] key = new IntegerSerializer().serialize(topic2, 1);
    final byte[] recordValue = new LongSerializer().serialize(topic2, 10L);
    maybeDeserialize(globalStateTask, key, recordValue, true);
}
Also used : LongSerializer(org.apache.kafka.common.serialization.LongSerializer) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Test(org.junit.Test)

Example 4 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamMapTest method testMap.

@Test
public void testMap() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topicName = "topic";
    final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
    final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
    final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
    stream.map((key, value) -> KeyValue.pair(value, key)).process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        for (final int expectedKey : expectedKeys) {
            final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
            inputTopic.pipeInput(expectedKey, "V" + expectedKey, 10L - expectedKey);
        }
    }
    final KeyValueTimestamp[] expected = new KeyValueTimestamp[] { new KeyValueTimestamp<>("V0", 0, 10), new KeyValueTimestamp<>("V1", 1, 9), new KeyValueTimestamp<>("V2", 2, 8), new KeyValueTimestamp<>("V3", 3, 7) };
    assertEquals(4, supplier.theCapturedProcessor().processed().size());
    for (int i = 0; i < expected.length; i++) {
        assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
    }
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertThrows(org.junit.Assert.assertThrows) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) KStream(org.apache.kafka.streams.kstream.KStream) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Duration(java.time.Duration) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Is.is(org.hamcrest.core.Is.is) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Record(org.apache.kafka.streams.processor.api.Record) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Assert.assertEquals(org.junit.Assert.assertEquals) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 5 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamMapValuesTest method testFlatMapValues.

@Test
public void testFlatMapValues() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = { 1, 10, 100, 1000 };
    final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
    stream.mapValues(CharSequence::length).process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        for (final int expectedKey : expectedKeys) {
            final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
            inputTopic.pipeInput(expectedKey, Integer.toString(expectedKey), expectedKey / 2L);
        }
    }
    final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(1, 1, 0), new KeyValueTimestamp<>(10, 2, 5), new KeyValueTimestamp<>(100, 3, 50), new KeyValueTimestamp<>(1000, 4, 500) };
    assertArrayEquals(expected, supplier.theCapturedProcessor().processed().toArray());
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)86 Test (org.junit.Test)65 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)64 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)61 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)53 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)42 Properties (java.util.Properties)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 HashSet (java.util.HashSet)11 Set (java.util.Set)11 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)8 Serdes (org.apache.kafka.common.serialization.Serdes)8 KeyValue (org.apache.kafka.streams.KeyValue)8 TestInputTopic (org.apache.kafka.streams.TestInputTopic)8 Consumed (org.apache.kafka.streams.kstream.Consumed)8 KStream (org.apache.kafka.streams.kstream.KStream)8 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)8 Duration (java.time.Duration)5 Instant (java.time.Instant)5 ArrayList (java.util.ArrayList)5