use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamFilterTest method testFilterNot.
@Test
public void testFilterNot() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6, 7 };
final KStream<Integer, String> stream;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.filterNot(isMultipleOfThree).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
for (final int expectedKey : expectedKeys) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
inputTopic.pipeInput(expectedKey, "V" + expectedKey);
}
}
assertEquals(5, supplier.theCapturedProcessor().processed().size());
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamGlobalKTableJoinTest method pushToStream.
private void pushToStream(final int messageCount, final String valuePrefix, final boolean includeForeignKey, final boolean includeNullKey) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(streamTopic, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ofMillis(1L));
for (int i = 0; i < messageCount; i++) {
String value = valuePrefix + expectedKeys[i];
if (includeForeignKey) {
value = value + ",FKey" + expectedKeys[i];
}
Integer key = expectedKeys[i];
if (includeNullKey && i == 0) {
key = null;
}
inputTopic.pipeInput(key, value);
}
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamTransformIntegrationTest method verifyResult.
private void verifyResult(final List<KeyValue<Integer, Integer>> expected) {
final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.Integer(), Serdes.Integer());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(topic, new IntegerSerializer(), new IntegerSerializer());
inputTopic.pipeKeyValueList(Arrays.asList(new KeyValue<>(1, 1), new KeyValue<>(2, 2), new KeyValue<>(3, 3), new KeyValue<>(2, 1), new KeyValue<>(2, 3), new KeyValue<>(1, 3)));
}
assertThat(results, equalTo(expected));
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class StoreQueryIntegrationTest method shouldQuerySpecificStalePartitionStoresMultiStreamThreads.
@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreads() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final int numStreamThreads = 2;
final StreamsBuilder builder = new StreamsBuilder();
getStreamsBuilderWithTopology(builder, semaphore);
final Properties streamsConfiguration1 = streamsConfiguration();
streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final Properties streamsConfiguration2 = streamsConfiguration();
streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration1);
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration2);
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
assertTrue(kafkaStreams1.metadataForLocalThreads().size() > 1);
assertTrue(kafkaStreams2.metadataForLocalThreads().size() > 1);
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer());
// key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
// key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters.fromNameAndType(TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters.fromNameAndType(TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class GlobalStateTaskTest method shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler.
@Test
public void shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler() {
final GlobalStateUpdateTask globalStateTask2 = new GlobalStateUpdateTask(logContext, topology, context, stateMgr, new LogAndContinueExceptionHandler());
final byte[] key = new LongSerializer().serialize(topic2, 1L);
final byte[] recordValue = new IntegerSerializer().serialize(topic2, 10);
maybeDeserialize(globalStateTask2, key, recordValue, false);
}
Aggregations