use of org.apache.kafka.streams.kstream.Predicate in project kafka by apache.
the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.
@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
final StreamsBuilder builder = new StreamsBuilder();
final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
final Predicate<String, String> filterPredicate = (key, value) -> key.contains("kafka");
final KTable<String, String> t1 = builder.table(streamOne);
final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.as("queryFilter"));
final KTable<String, Long> t3 = t2.mapValues((ValueMapper<String, Long>) Long::valueOf, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
startKafkaStreamsAndWaitForRunningState(kafkaStreams);
waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
final ReadOnlyKeyValueStore<String, Long> myMapStore = IntegrationTestUtils.getStore("queryMapValues", kafkaStreams, keyValueStore());
for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
assertEquals(expectedEntry.value, myMapStore.get(expectedEntry.key));
}
for (final KeyValue<String, String> batchEntry : batch1) {
final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
if (!expectedBatch1.contains(batchEntryMapValue)) {
assertNull(myMapStore.get(batchEntry.key));
}
}
}
use of org.apache.kafka.streams.kstream.Predicate in project kafka by apache.
the class KTableFilterTest method testSendingOldValue.
@Test
public void testSendingOldValue() throws IOException {
KStreamBuilder builder = new KStreamBuilder();
String topic1 = "topic1";
KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(stringSerde, intSerde, topic1, "anyStoreName");
KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
});
table2.enableSendingOldValues();
MockProcessorSupplier<String, Integer> proc1 = new MockProcessorSupplier<>();
MockProcessorSupplier<String, Integer> proc2 = new MockProcessorSupplier<>();
builder.addProcessor("proc1", proc1, table1.name);
builder.addProcessor("proc2", proc2, table2.name);
driver = new KStreamTestDriver(builder, stateDir, null, null);
driver.process(topic1, "A", 1);
driver.process(topic1, "B", 1);
driver.process(topic1, "C", 1);
driver.flushState();
proc1.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
proc2.checkEmptyAndClearProcessResult();
driver.process(topic1, "A", 2);
driver.process(topic1, "B", 2);
driver.flushState();
proc1.checkAndClearProcessResult("A:(2<-1)", "B:(2<-1)");
proc2.checkAndClearProcessResult("A:(2<-null)", "B:(2<-null)");
driver.process(topic1, "A", 3);
driver.flushState();
proc1.checkAndClearProcessResult("A:(3<-2)");
proc2.checkAndClearProcessResult("A:(null<-2)");
driver.process(topic1, "A", null);
driver.process(topic1, "B", null);
driver.flushState();
proc1.checkAndClearProcessResult("A:(null<-3)", "B:(null<-2)");
proc2.checkAndClearProcessResult("B:(null<-2)");
}
use of org.apache.kafka.streams.kstream.Predicate in project kafka by apache.
the class KTableFilterTest method testValueGetter.
@Test
public void testValueGetter() throws IOException {
KStreamBuilder builder = new KStreamBuilder();
String topic1 = "topic1";
KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(stringSerde, intSerde, topic1, "anyStoreName");
KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
});
KTableImpl<String, Integer, Integer> table3 = (KTableImpl<String, Integer, Integer>) table1.filterNot(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
});
KTableValueGetterSupplier<String, Integer> getterSupplier2 = table2.valueGetterSupplier();
KTableValueGetterSupplier<String, Integer> getterSupplier3 = table3.valueGetterSupplier();
driver = new KStreamTestDriver(builder, stateDir, null, null);
KTableValueGetter<String, Integer> getter2 = getterSupplier2.get();
KTableValueGetter<String, Integer> getter3 = getterSupplier3.get();
getter2.init(driver.context());
getter3.init(driver.context());
driver.process(topic1, "A", 1);
driver.process(topic1, "B", 1);
driver.process(topic1, "C", 1);
assertNull(getter2.get("A"));
assertNull(getter2.get("B"));
assertNull(getter2.get("C"));
assertEquals(1, (int) getter3.get("A"));
assertEquals(1, (int) getter3.get("B"));
assertEquals(1, (int) getter3.get("C"));
driver.process(topic1, "A", 2);
driver.process(topic1, "B", 2);
assertEquals(2, (int) getter2.get("A"));
assertEquals(2, (int) getter2.get("B"));
assertNull(getter2.get("C"));
assertNull(getter3.get("A"));
assertNull(getter3.get("B"));
assertEquals(1, (int) getter3.get("C"));
driver.process(topic1, "A", 3);
assertNull(getter2.get("A"));
assertEquals(2, (int) getter2.get("B"));
assertNull(getter2.get("C"));
assertEquals(3, (int) getter3.get("A"));
assertNull(getter3.get("B"));
assertEquals(1, (int) getter3.get("C"));
driver.process(topic1, "A", null);
driver.process(topic1, "B", null);
assertNull(getter2.get("A"));
assertNull(getter2.get("B"));
assertNull(getter2.get("C"));
assertNull(getter3.get("A"));
assertNull(getter3.get("B"));
assertEquals(1, (int) getter3.get("C"));
}
use of org.apache.kafka.streams.kstream.Predicate in project microservices by pwillhan.
the class GeoLocationStreams method init.
@PostConstruct
public void init() {
Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "geolocation-application");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.99.100:9092");
props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, GeoLocationSerdes.class.getName());
StreamsConfig config = new StreamsConfig(props);
KStreamBuilder builder = new KStreamBuilder();
builder.stream("geolocationStreams").filter(new Predicate<Object, Object>() {
@Override
public boolean test(Object key, Object value) {
GeoLocation geolocation = (GeoLocation) value;
System.out.println("Stream received => " + value);
return geolocation.getLatitude() >= -90 && geolocation.getLatitude() < 90 && geolocation.getLongitude() >= -180 && geolocation.getLongitude() < 180;
}
}).to("geolocations");
KafkaStreams streams = new KafkaStreams(builder, config);
streams.start();
}
use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.
the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.
@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
final StreamsBuilder builder = new StreamsBuilder();
final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
final Predicate<String, String> filterPredicate = new Predicate<String, String>() {
@Override
public boolean test(final String key, final String value) {
return key.contains("kafka");
}
};
final KTable<String, String> t1 = builder.table(streamOne);
final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("queryFilter"));
final KTable<String, Long> t3 = t2.mapValues(new ValueMapper<String, Long>() {
@Override
public Long apply(final String value) {
return Long.valueOf(value);
}
}, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
kafkaStreams.start();
waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
final ReadOnlyKeyValueStore<String, Long> myMapStore = kafkaStreams.store("queryMapValues", QueryableStoreTypes.<String, Long>keyValueStore());
for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
assertEquals(myMapStore.get(expectedEntry.key), expectedEntry.value);
}
for (final KeyValue<String, String> batchEntry : batch1) {
final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
if (!expectedBatch1.contains(batchEntryMapValue)) {
assertNull(myMapStore.get(batchEntry.key));
}
}
}
Aggregations