Search in sources :

Example 26 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamFilterTest method testFilterNot.

@Test
public void testFilterNot() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6, 7 };
    final KStream<Integer, String> stream;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
    stream.filterNot(isMultipleOfThree).process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        for (final int expectedKey : expectedKeys) {
            final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
            inputTopic.pipeInput(expectedKey, "V" + expectedKey);
        }
    }
    assertEquals(5, supplier.theCapturedProcessor().processed().size());
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 27 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamGlobalKTableJoinTest method pushToStream.

private void pushToStream(final int messageCount, final String valuePrefix, final boolean includeForeignKey, final boolean includeNullKey) {
    final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(streamTopic, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ofMillis(1L));
    for (int i = 0; i < messageCount; i++) {
        String value = valuePrefix + expectedKeys[i];
        if (includeForeignKey) {
            value = value + ",FKey" + expectedKeys[i];
        }
        Integer key = expectedKeys[i];
        if (includeNullKey && i == 0) {
            key = null;
        }
        inputTopic.pipeInput(key, value);
    }
}
Also used : IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer)

Example 28 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamTransformIntegrationTest method verifyResult.

private void verifyResult(final List<KeyValue<Integer, Integer>> expected) {
    final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.Integer(), Serdes.Integer());
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(topic, new IntegerSerializer(), new IntegerSerializer());
        inputTopic.pipeKeyValueList(Arrays.asList(new KeyValue<>(1, 1), new KeyValue<>(2, 2), new KeyValue<>(3, 3), new KeyValue<>(2, 1), new KeyValue<>(2, 3), new KeyValue<>(1, 3)));
    }
    assertThat(results, equalTo(expected));
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer)

Example 29 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class StoreQueryIntegrationTest method shouldQuerySpecificStalePartitionStoresMultiStreamThreads.

@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreads() throws Exception {
    final int batch1NumMessages = 100;
    final int key = 1;
    final Semaphore semaphore = new Semaphore(0);
    final int numStreamThreads = 2;
    final StreamsBuilder builder = new StreamsBuilder();
    getStreamsBuilderWithTopology(builder, semaphore);
    final Properties streamsConfiguration1 = streamsConfiguration();
    streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
    final Properties streamsConfiguration2 = streamsConfiguration();
    streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
    final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration1);
    final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration2);
    final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
    startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
    assertTrue(kafkaStreams1.metadataForLocalThreads().size() > 1);
    assertTrue(kafkaStreams2.metadataForLocalThreads().size() > 1);
    produceValueRange(key, 0, batch1NumMessages);
    // Assert that all messages in the first batch were processed in a timely manner
    assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
    final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer());
    // key belongs to this partition
    final int keyPartition = keyQueryMetadata.partition();
    // key doesn't belongs to this partition
    final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
    final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
    // Assert that both active and standby are able to query for a key
    final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters.fromNameAndType(TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyPartition);
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
        return store1.get(key) != null;
    }, "store1 cannot find results for key");
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
        return store2.get(key) != null;
    }, "store2 cannot find results for key");
    final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters.fromNameAndType(TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyDontBelongPartition);
    final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
    final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
    // Assert that
    assertThat(store3.get(key), is(nullValue()));
    assertThat(store4.get(key), is(nullValue()));
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) Semaphore(java.util.concurrent.Semaphore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 30 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class GlobalStateTaskTest method shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler.

@Test
public void shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler() {
    final GlobalStateUpdateTask globalStateTask2 = new GlobalStateUpdateTask(logContext, topology, context, stateMgr, new LogAndContinueExceptionHandler());
    final byte[] key = new LongSerializer().serialize(topic2, 1L);
    final byte[] recordValue = new IntegerSerializer().serialize(topic2, 10);
    maybeDeserialize(globalStateTask2, key, recordValue, false);
}
Also used : LongSerializer(org.apache.kafka.common.serialization.LongSerializer) LogAndContinueExceptionHandler(org.apache.kafka.streams.errors.LogAndContinueExceptionHandler) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Test(org.junit.Test)

Aggregations

IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)86 Test (org.junit.Test)65 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)64 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)61 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)53 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)42 Properties (java.util.Properties)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 HashSet (java.util.HashSet)11 Set (java.util.Set)11 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)8 Serdes (org.apache.kafka.common.serialization.Serdes)8 KeyValue (org.apache.kafka.streams.KeyValue)8 TestInputTopic (org.apache.kafka.streams.TestInputTopic)8 Consumed (org.apache.kafka.streams.kstream.Consumed)8 KStream (org.apache.kafka.streams.kstream.KStream)8 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)8 Duration (java.time.Duration)5 Instant (java.time.Instant)5 ArrayList (java.util.ArrayList)5