Search in sources :

Example 16 with Predicate

use of org.apache.kafka.streams.kstream.Predicate in project kafka by apache.

the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.

@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    final StreamsBuilder builder = new StreamsBuilder();
    final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
    final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
    final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
    IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    final Predicate<String, String> filterPredicate = (key, value) -> key.contains("kafka");
    final KTable<String, String> t1 = builder.table(streamOne);
    final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.as("queryFilter"));
    final KTable<String, Long> t3 = t2.mapValues((ValueMapper<String, Long>) Long::valueOf, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
    t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
    startKafkaStreamsAndWaitForRunningState(kafkaStreams);
    waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
    final ReadOnlyKeyValueStore<String, Long> myMapStore = IntegrationTestUtils.getStore("queryMapValues", kafkaStreams, keyValueStore());
    for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
        assertEquals(expectedEntry.value, myMapStore.get(expectedEntry.key));
    }
    for (final KeyValue<String, String> batchEntry : batch1) {
        final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
        if (!expectedBatch1.contains(batchEntryMapValue)) {
            assertNull(myMapStore.get(batchEntry.key));
        }
    }
}
Also used : Arrays(java.util.Arrays) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockTime(kafka.utils.MockTime) Instant.ofEpochMilli(java.time.Instant.ofEpochMilli) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) StoreQueryParameters.fromNameAndType(org.apache.kafka.streams.StoreQueryParameters.fromNameAndType) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) StreamsTestUtils.startKafkaStreamsAndWaitForRunningState(org.apache.kafka.test.StreamsTestUtils.startKafkaStreamsAndWaitForRunningState) Utils.mkSet(org.apache.kafka.common.utils.Utils.mkSet) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) QueryableStoreTypes(org.apache.kafka.streams.state.QueryableStoreTypes) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Matchers.is(org.hamcrest.Matchers.is) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TreeSet(java.util.TreeSet) UnknownStateStoreException(org.apache.kafka.streams.errors.UnknownStateStoreException) ArrayList(java.util.ArrayList) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) KTable(org.apache.kafka.streams.kstream.KTable) IntegrationTestUtils.waitForApplicationState(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForApplicationState) Properties(java.util.Properties) Test(org.junit.Test) IOException(java.io.IOException) File(java.io.File) Assert.assertNull(org.junit.Assert.assertNull) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) StringReader(java.io.StringReader) TreeMap(java.util.TreeMap) IntegrationTestUtils.getRunningStreams(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getRunningStreams) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) KafkaStreams(org.apache.kafka.streams.KafkaStreams) BufferedReader(java.io.BufferedReader) ReadOnlySessionStore(org.apache.kafka.streams.state.ReadOnlySessionStore) Assert.assertEquals(org.junit.Assert.assertEquals) QueryableStoreTypes.sessionStore(org.apache.kafka.streams.state.QueryableStoreTypes.sessionStore) QueryableStoreTypes.keyValueStore(org.apache.kafka.streams.state.QueryableStoreTypes.keyValueStore) Produced(org.apache.kafka.streams.kstream.Produced) LoggerFactory(org.slf4j.LoggerFactory) IsEqual.equalTo(org.hamcrest.core.IsEqual.equalTo) Serde(org.apache.kafka.common.serialization.Serde) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) MockMapper(org.apache.kafka.test.MockMapper) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Bytes(org.apache.kafka.common.utils.Bytes) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) Entry(java.util.Map.Entry) Duration.ofMillis(java.time.Duration.ofMillis) InvalidStateStoreException(org.apache.kafka.streams.errors.InvalidStateStoreException) StreamsConfig(org.apache.kafka.streams.StreamsConfig) ReadOnlyWindowStore(org.apache.kafka.streams.state.ReadOnlyWindowStore) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) IntegrationTestUtils.startApplicationAndWaitUntilRunning(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning) BeforeClass(org.junit.BeforeClass) Assert.assertThrows(org.junit.Assert.assertThrows) IntegrationTest(org.apache.kafka.test.IntegrationTest) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) KStream(org.apache.kafka.streams.kstream.KStream) Duration.ofSeconds(java.time.Duration.ofSeconds) NoRetryException(org.apache.kafka.test.NoRetryException) HashSet(java.util.HashSet) TestUtils.retryOnExceptionWithTimeout(org.apache.kafka.test.TestUtils.retryOnExceptionWithTimeout) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) PrintStream(java.io.PrintStream) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) Matchers(org.hamcrest.Matchers) TimeUnit(java.util.concurrent.TimeUnit) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) Rule(org.junit.Rule) LagInfo(org.apache.kafka.streams.LagInfo) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) FileReader(java.io.FileReader) Comparator(java.util.Comparator) Collections(java.util.Collections) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) HashSet(java.util.HashSet) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) Test(org.junit.Test) IntegrationTest(org.apache.kafka.test.IntegrationTest)

Example 17 with Predicate

use of org.apache.kafka.streams.kstream.Predicate in project kafka by apache.

the class KTableFilterTest method testSendingOldValue.

@Test
public void testSendingOldValue() throws IOException {
    KStreamBuilder builder = new KStreamBuilder();
    String topic1 = "topic1";
    KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(stringSerde, intSerde, topic1, "anyStoreName");
    KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(new Predicate<String, Integer>() {

        @Override
        public boolean test(String key, Integer value) {
            return (value % 2) == 0;
        }
    });
    table2.enableSendingOldValues();
    MockProcessorSupplier<String, Integer> proc1 = new MockProcessorSupplier<>();
    MockProcessorSupplier<String, Integer> proc2 = new MockProcessorSupplier<>();
    builder.addProcessor("proc1", proc1, table1.name);
    builder.addProcessor("proc2", proc2, table2.name);
    driver = new KStreamTestDriver(builder, stateDir, null, null);
    driver.process(topic1, "A", 1);
    driver.process(topic1, "B", 1);
    driver.process(topic1, "C", 1);
    driver.flushState();
    proc1.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
    proc2.checkEmptyAndClearProcessResult();
    driver.process(topic1, "A", 2);
    driver.process(topic1, "B", 2);
    driver.flushState();
    proc1.checkAndClearProcessResult("A:(2<-1)", "B:(2<-1)");
    proc2.checkAndClearProcessResult("A:(2<-null)", "B:(2<-null)");
    driver.process(topic1, "A", 3);
    driver.flushState();
    proc1.checkAndClearProcessResult("A:(3<-2)");
    proc2.checkAndClearProcessResult("A:(null<-2)");
    driver.process(topic1, "A", null);
    driver.process(topic1, "B", null);
    driver.flushState();
    proc1.checkAndClearProcessResult("A:(null<-3)", "B:(null<-2)");
    proc2.checkAndClearProcessResult("B:(null<-2)");
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Predicate(org.apache.kafka.streams.kstream.Predicate) Test(org.junit.Test)

Example 18 with Predicate

use of org.apache.kafka.streams.kstream.Predicate in project kafka by apache.

the class KTableFilterTest method testValueGetter.

@Test
public void testValueGetter() throws IOException {
    KStreamBuilder builder = new KStreamBuilder();
    String topic1 = "topic1";
    KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(stringSerde, intSerde, topic1, "anyStoreName");
    KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(new Predicate<String, Integer>() {

        @Override
        public boolean test(String key, Integer value) {
            return (value % 2) == 0;
        }
    });
    KTableImpl<String, Integer, Integer> table3 = (KTableImpl<String, Integer, Integer>) table1.filterNot(new Predicate<String, Integer>() {

        @Override
        public boolean test(String key, Integer value) {
            return (value % 2) == 0;
        }
    });
    KTableValueGetterSupplier<String, Integer> getterSupplier2 = table2.valueGetterSupplier();
    KTableValueGetterSupplier<String, Integer> getterSupplier3 = table3.valueGetterSupplier();
    driver = new KStreamTestDriver(builder, stateDir, null, null);
    KTableValueGetter<String, Integer> getter2 = getterSupplier2.get();
    KTableValueGetter<String, Integer> getter3 = getterSupplier3.get();
    getter2.init(driver.context());
    getter3.init(driver.context());
    driver.process(topic1, "A", 1);
    driver.process(topic1, "B", 1);
    driver.process(topic1, "C", 1);
    assertNull(getter2.get("A"));
    assertNull(getter2.get("B"));
    assertNull(getter2.get("C"));
    assertEquals(1, (int) getter3.get("A"));
    assertEquals(1, (int) getter3.get("B"));
    assertEquals(1, (int) getter3.get("C"));
    driver.process(topic1, "A", 2);
    driver.process(topic1, "B", 2);
    assertEquals(2, (int) getter2.get("A"));
    assertEquals(2, (int) getter2.get("B"));
    assertNull(getter2.get("C"));
    assertNull(getter3.get("A"));
    assertNull(getter3.get("B"));
    assertEquals(1, (int) getter3.get("C"));
    driver.process(topic1, "A", 3);
    assertNull(getter2.get("A"));
    assertEquals(2, (int) getter2.get("B"));
    assertNull(getter2.get("C"));
    assertEquals(3, (int) getter3.get("A"));
    assertNull(getter3.get("B"));
    assertEquals(1, (int) getter3.get("C"));
    driver.process(topic1, "A", null);
    driver.process(topic1, "B", null);
    assertNull(getter2.get("A"));
    assertNull(getter2.get("B"));
    assertNull(getter2.get("C"));
    assertNull(getter3.get("A"));
    assertNull(getter3.get("B"));
    assertEquals(1, (int) getter3.get("C"));
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) Predicate(org.apache.kafka.streams.kstream.Predicate) Test(org.junit.Test)

Example 19 with Predicate

use of org.apache.kafka.streams.kstream.Predicate in project microservices by pwillhan.

the class GeoLocationStreams method init.

@PostConstruct
public void init() {
    Map<String, Object> props = new HashMap<>();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "geolocation-application");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.99.100:9092");
    props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, GeoLocationSerdes.class.getName());
    StreamsConfig config = new StreamsConfig(props);
    KStreamBuilder builder = new KStreamBuilder();
    builder.stream("geolocationStreams").filter(new Predicate<Object, Object>() {

        @Override
        public boolean test(Object key, Object value) {
            GeoLocation geolocation = (GeoLocation) value;
            System.out.println("Stream received => " + value);
            return geolocation.getLatitude() >= -90 && geolocation.getLatitude() < 90 && geolocation.getLongitude() >= -180 && geolocation.getLongitude() < 180;
        }
    }).to("geolocations");
    KafkaStreams streams = new KafkaStreams(builder, config);
    streams.start();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) HashMap(java.util.HashMap) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Predicate(org.apache.kafka.streams.kstream.Predicate) PostConstruct(javax.annotation.PostConstruct)

Example 20 with Predicate

use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.

the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.

@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    final StreamsBuilder builder = new StreamsBuilder();
    final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
    final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
    final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
    IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    final Predicate<String, String> filterPredicate = new Predicate<String, String>() {

        @Override
        public boolean test(final String key, final String value) {
            return key.contains("kafka");
        }
    };
    final KTable<String, String> t1 = builder.table(streamOne);
    final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("queryFilter"));
    final KTable<String, Long> t3 = t2.mapValues(new ValueMapper<String, Long>() {

        @Override
        public Long apply(final String value) {
            return Long.valueOf(value);
        }
    }, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
    t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
    kafkaStreams.start();
    waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
    final ReadOnlyKeyValueStore<String, Long> myMapStore = kafkaStreams.store("queryMapValues", QueryableStoreTypes.<String, Long>keyValueStore());
    for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
        assertEquals(myMapStore.get(expectedEntry.key), expectedEntry.value);
    }
    for (final KeyValue<String, String> batchEntry : batch1) {
        final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
        if (!expectedBatch1.contains(batchEntryMapValue)) {
            assertNull(myMapStore.get(batchEntry.key));
        }
    }
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Properties(java.util.Properties) Predicate(org.apache.kafka.streams.kstream.Predicate) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Bytes(org.apache.kafka.common.utils.Bytes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) HashSet(java.util.HashSet) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Aggregations

Predicate (org.apache.kafka.streams.kstream.Predicate)32 Test (org.junit.Test)27 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)22 ValueMapper (org.apache.kafka.streams.kstream.ValueMapper)8 Properties (java.util.Properties)7 KafkaStreams (org.apache.kafka.streams.KafkaStreams)7 KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)7 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)6 KStreamTestDriver (org.apache.kafka.test.KStreamTestDriver)6 Bytes (org.apache.kafka.common.utils.Bytes)5 KeyValue (org.apache.kafka.streams.KeyValue)5 KStream (org.apache.kafka.streams.kstream.KStream)5 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)5 HashSet (java.util.HashSet)4 List (java.util.List)4 Serdes (org.apache.kafka.common.serialization.Serdes)4 KafkaStreamsTest (org.apache.kafka.streams.KafkaStreamsTest)4 Consumed (org.apache.kafka.streams.kstream.Consumed)4 IntegrationTest (org.apache.kafka.test.IntegrationTest)4 Assert.assertEquals (org.junit.Assert.assertEquals)4