Search in sources :

Example 56 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project sda-dropwizard-commons by SDA-SE.

the class ProducerRegistrationTest method serializerShouldBeSetCorrectly.

@Test
public void serializerShouldBeSetCorrectly() {
    ProducerRegistration<Long, Integer> producerRegistration = ProducerRegistration.builder().forTopic("TOPIC").withDefaultProducer().withKeySerializer(new LongSerializer()).withValueSerializer(new IntegerSerializer()).build();
    assertThat(producerRegistration).isNotNull();
    assertThat(producerRegistration.getTopic().getTopicName()).isEqualTo("TOPIC");
    assertThat(producerRegistration.getKeySerializer()).isInstanceOf(LongSerializer.class);
    assertThat(producerRegistration.getValueSerializer()).isInstanceOf(IntegerSerializer.class);
}
Also used : LongSerializer(org.apache.kafka.common.serialization.LongSerializer) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Test(org.junit.Test)

Example 57 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka-junit by charithe.

the class EphemeralKafkaClusterTest method testStartAndStop.

@Test
public void testStartAndStop() throws Exception {
    try (KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(cluster.consumerConfig(false), new IntegerDeserializer(), new StringDeserializer());
        KafkaProducer<Integer, String> producer = new KafkaProducer<>(cluster.producerConfig(), new IntegerSerializer(), new StringSerializer())) {
        cluster.createTopics(TEST_TOPIC);
        producer.send(new ProducerRecord<>(TEST_TOPIC, "value"));
        producer.flush();
        consumer.subscribe(Collections.singleton(TEST_TOPIC));
        ConsumerRecords<Integer, String> poll = consumer.poll(TEN_SECONDS);
        assertThat(poll.count()).isEqualTo(1);
        assertThat(poll.iterator().next().value()).isEqualTo("value");
    }
}
Also used : IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 58 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class StoreQueryIntegrationTest method shouldQuerySpecificStalePartitionStoresMultiStreamThreadsNamedTopology.

@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreadsNamedTopology() throws Exception {
    final int batch1NumMessages = 100;
    final int key = 1;
    final Semaphore semaphore = new Semaphore(0);
    final int numStreamThreads = 2;
    final Properties streamsConfiguration1 = streamsConfiguration();
    streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
    final Properties streamsConfiguration2 = streamsConfiguration();
    streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
    final String topologyA = "topology-A";
    final KafkaStreamsNamedTopologyWrapper kafkaStreams1 = createNamedTopologyKafkaStreams(streamsConfiguration1);
    final KafkaStreamsNamedTopologyWrapper kafkaStreams2 = createNamedTopologyKafkaStreams(streamsConfiguration2);
    final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
    final NamedTopologyBuilder builder1A = kafkaStreams1.newNamedTopologyBuilder(topologyA, streamsConfiguration1);
    getStreamsBuilderWithTopology(builder1A, semaphore);
    final NamedTopologyBuilder builder2A = kafkaStreams2.newNamedTopologyBuilder(topologyA, streamsConfiguration2);
    getStreamsBuilderWithTopology(builder2A, semaphore);
    kafkaStreams1.start(builder1A.build());
    kafkaStreams2.start(builder2A.build());
    waitForApplicationState(kafkaStreamsList, State.RUNNING, Duration.ofSeconds(60));
    assertTrue(kafkaStreams1.metadataForLocalThreads().size() > 1);
    assertTrue(kafkaStreams2.metadataForLocalThreads().size() > 1);
    produceValueRange(key, 0, batch1NumMessages);
    // Assert that all messages in the first batch were processed in a timely manner
    assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
    final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer(), topologyA);
    // key belongs to this partition
    final int keyPartition = keyQueryMetadata.partition();
    // key doesn't belongs to this partition
    final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
    final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
    // Assert that both active and standby are able to query for a key
    final NamedTopologyStoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = NamedTopologyStoreQueryParameters.fromNamedTopologyAndStoreNameAndType(topologyA, TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyPartition);
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
        return store1.get(key) != null;
    }, "store1 cannot find results for key");
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
        return store2.get(key) != null;
    }, "store2 cannot find results for key");
    final NamedTopologyStoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = NamedTopologyStoreQueryParameters.fromNamedTopologyAndStoreNameAndType(topologyA, TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyDontBelongPartition);
    final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
    final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
    // Assert that
    assertThat(store3.get(key), is(nullValue()));
    assertThat(store4.get(key), is(nullValue()));
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) Semaphore(java.util.concurrent.Semaphore) Matchers.containsString(org.hamcrest.Matchers.containsString) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper) NamedTopologyBuilder(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 59 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class RestoreIntegrationTest method createStateForRestoration.

private void createStateForRestoration(final String changelogTopic, final int startingOffset) {
    final Properties producerConfig = new Properties();
    producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    try (final KafkaProducer<Integer, Integer> producer = new KafkaProducer<>(producerConfig, new IntegerSerializer(), new IntegerSerializer())) {
        for (int i = 0; i < numberOfKeys; i++) {
            final int offset = startingOffset + i;
            producer.send(new ProducerRecord<>(changelogTopic, offset, offset));
        }
    }
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) OffsetCheckpoint(org.apache.kafka.streams.state.internals.OffsetCheckpoint)

Example 60 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamSplitTest method withDriver.

private void withDriver(final Consumer<TopologyTestDriver> test) {
    final int[] expectedKeys = new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 7 };
    final Topology topology = builder.build();
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
        for (final int expectedKey : expectedKeys) {
            inputTopic.pipeInput(expectedKey, "V" + expectedKey);
        }
        test.accept(driver);
    }
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer)

Aggregations

IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)106 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)75 Test (org.junit.Test)74 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)72 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)58 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)46 Properties (java.util.Properties)22 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)16 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 HashSet (java.util.HashSet)11 Set (java.util.Set)11 KeyValue (org.apache.kafka.streams.KeyValue)10 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 Serdes (org.apache.kafka.common.serialization.Serdes)8 TestInputTopic (org.apache.kafka.streams.TestInputTopic)8 Consumed (org.apache.kafka.streams.kstream.Consumed)8 KStream (org.apache.kafka.streams.kstream.KStream)8 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)8 Duration (java.time.Duration)6