use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class QueryableStateIntegrationTest method shouldBeAbleToQueryFilterState.
@Test
public void shouldBeAbleToQueryFilterState() throws Exception {
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Long().getClass());
final StreamsBuilder builder = new StreamsBuilder();
final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
final Set<KeyValue<String, Long>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], 1L), new KeyValue<>(keys[1], 1L), new KeyValue<>(keys[2], 3L), new KeyValue<>(keys[3], 5L), new KeyValue<>(keys[4], 2L)));
final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, LongSerializer.class, new Properties()), mockTime);
final Predicate<String, Long> filterPredicate = new Predicate<String, Long>() {
@Override
public boolean test(final String key, final Long value) {
return key.contains("kafka");
}
};
final KTable<String, Long> t1 = builder.table(streamOne);
final KTable<String, Long> t2 = t1.filter(filterPredicate, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryFilter"));
t1.filterNot(filterPredicate, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryFilterNot"));
t2.toStream().to(outputTopic);
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
kafkaStreams.start();
waitUntilAtLeastNumRecordProcessed(outputTopic, 2);
final ReadOnlyKeyValueStore<String, Long> myFilterStore = kafkaStreams.store("queryFilter", QueryableStoreTypes.<String, Long>keyValueStore());
final ReadOnlyKeyValueStore<String, Long> myFilterNotStore = kafkaStreams.store("queryFilterNot", QueryableStoreTypes.<String, Long>keyValueStore());
for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
assertEquals(myFilterStore.get(expectedEntry.key), expectedEntry.value);
}
for (final KeyValue<String, Long> batchEntry : batch1) {
if (!expectedBatch1.contains(batchEntry)) {
assertNull(myFilterStore.get(batchEntry.key));
}
}
for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
assertNull(myFilterNotStore.get(expectedEntry.key));
}
for (final KeyValue<String, Long> batchEntry : batch1) {
if (!expectedBatch1.contains(batchEntry)) {
assertEquals(myFilterNotStore.get(batchEntry.key), batchEntry.value);
}
}
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class QueryableStateIntegrationTest method createCountStream.
/**
* Creates a typical word count topology
*/
private KafkaStreams createCountStream(final String inputTopic, final String outputTopic, final String windowOutputTopic, final String storeName, final String windowStoreName, final Properties streamsConfiguration) {
final StreamsBuilder builder = new StreamsBuilder();
final Serde<String> stringSerde = Serdes.String();
final KStream<String, String> textLines = builder.stream(inputTopic, Consumed.with(stringSerde, stringSerde));
final KGroupedStream<String, String> groupedByWord = textLines.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public Iterable<String> apply(final String value) {
return Arrays.asList(value.split("\\W+"));
}
}).groupBy(MockMapper.<String, String>selectValueMapper());
// Create a State Store for the all time word count
groupedByWord.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as(storeName + "-" + inputTopic)).toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
// Create a Windowed State Store that contains the word count for every 1 minute
groupedByWord.windowedBy(TimeWindows.of(WINDOW_SIZE)).count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as(windowStoreName + "-" + inputTopic)).toStream(new KeyValueMapper<Windowed<String>, Long, String>() {
@Override
public String apply(final Windowed<String> key, final Long value) {
return key.key();
}
}).to(windowOutputTopic, Produced.with(Serdes.String(), Serdes.Long()));
return new KafkaStreams(builder.build(), streamsConfiguration);
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesState.
@Test
public void shouldBeAbleToQueryMapValuesState() throws Exception {
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
final StreamsBuilder builder = new StreamsBuilder();
final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
final KTable<String, String> t1 = builder.table(streamOne);
final KTable<String, Long> t2 = t1.mapValues(new ValueMapper<String, Long>() {
@Override
public Long apply(final String value) {
return Long.valueOf(value);
}
}, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
t2.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
kafkaStreams.start();
waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
final ReadOnlyKeyValueStore<String, Long> myMapStore = kafkaStreams.store("queryMapValues", QueryableStoreTypes.<String, Long>keyValueStore());
for (final KeyValue<String, String> batchEntry : batch1) {
assertEquals(myMapStore.get(batchEntry.key), Long.valueOf(batchEntry.value));
}
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class RegexSourceIntegrationTest method testNoMessagesSentExceptionFromOverlappingPatterns.
// TODO should be updated to expected = TopologyBuilderException after KAFKA-3708
@Test(expected = AssertionError.class)
public void testNoMessagesSentExceptionFromOverlappingPatterns() throws Exception {
final String fooMessage = "fooMessage";
final String fMessage = "fMessage";
final Serde<String> stringSerde = Serdes.String();
final StreamsBuilder builder = new StreamsBuilder();
// overlapping patterns here, no messages should be sent as TopologyBuilderException
// will be thrown when the processor topology is built.
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("foo.*"));
final KStream<String, String> pattern2Stream = builder.stream(Pattern.compile("f.*"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
pattern2Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
streams = new KafkaStreams(builder.build(), streamsConfiguration);
streams.start();
final Properties producerConfig = TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class);
IntegrationTestUtils.produceValuesSynchronously(FA_TOPIC, Arrays.asList(fMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(FOO_TOPIC, Arrays.asList(fooMessage), producerConfig, mockTime);
final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, DEFAULT_OUTPUT_TOPIC, 2, 5000);
fail("Should not get here");
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class RegexSourceIntegrationTest method testShouldReadFromRegexAndNamedTopics.
@Test
public void testShouldReadFromRegexAndNamedTopics() throws Exception {
final String topic1TestMessage = "topic-1 test";
final String topic2TestMessage = "topic-2 test";
final String topicATestMessage = "topic-A test";
final String topicCTestMessage = "topic-C test";
final String topicYTestMessage = "topic-Y test";
final String topicZTestMessage = "topic-Z test";
final Serde<String> stringSerde = Serdes.String();
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("topic-\\d"));
final KStream<String, String> pattern2Stream = builder.stream(Pattern.compile("topic-[A-D]"));
final KStream<String, String> namedTopicsStream = builder.stream(Arrays.asList(TOPIC_Y, TOPIC_Z));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
pattern2Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
namedTopicsStream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
streams = new KafkaStreams(builder.build(), streamsConfiguration);
streams.start();
final Properties producerConfig = TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_1, Arrays.asList(topic1TestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_2, Arrays.asList(topic2TestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_A, Arrays.asList(topicATestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_C, Arrays.asList(topicCTestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_Y, Arrays.asList(topicYTestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_Z, Arrays.asList(topicZTestMessage), producerConfig, mockTime);
final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
final List<String> expectedReceivedValues = Arrays.asList(topicATestMessage, topic1TestMessage, topic2TestMessage, topicCTestMessage, topicYTestMessage, topicZTestMessage);
final List<KeyValue<String, String>> receivedKeyValues = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, DEFAULT_OUTPUT_TOPIC, 6);
final List<String> actualValues = new ArrayList<>(6);
for (final KeyValue<String, String> receivedKeyValue : receivedKeyValues) {
actualValues.add(receivedKeyValue.value);
}
Collections.sort(actualValues);
Collections.sort(expectedReceivedValues);
assertThat(actualValues, equalTo(expectedReceivedValues));
}
Aggregations