Search in sources :

Example 11 with Windowed

use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.

the class WindowedStreamPartitionerTest method testCopartitioning.

@Test
public void testCopartitioning() {
    Random rand = new Random();
    DefaultPartitioner defaultPartitioner = new DefaultPartitioner();
    WindowedSerializer<Integer> windowedSerializer = new WindowedSerializer<>(intSerializer);
    WindowedStreamPartitioner<Integer, String> streamPartitioner = new WindowedStreamPartitioner<>(windowedSerializer);
    for (int k = 0; k < 10; k++) {
        Integer key = rand.nextInt();
        byte[] keyBytes = intSerializer.serialize(topicName, key);
        String value = key.toString();
        byte[] valueBytes = stringSerializer.serialize(topicName, value);
        Integer expected = defaultPartitioner.partition("topic", key, keyBytes, value, valueBytes, cluster);
        for (int w = 1; w < 10; w++) {
            TimeWindow window = new TimeWindow(10 * w, 20 * w);
            Windowed<Integer> windowedKey = new Windowed<>(key, window);
            Integer actual = streamPartitioner.partition(windowedKey, value, infos.size());
            assertEquals(expected, actual);
        }
    }
}
Also used : DefaultPartitioner(org.apache.kafka.clients.producer.internals.DefaultPartitioner) Windowed(org.apache.kafka.streams.kstream.Windowed) Random(java.util.Random) Test(org.junit.Test)

Example 12 with Windowed

use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.

the class KStreamWindowAggregateTest method testAggBasic.

@Test
public void testAggBasic() throws Exception {
    final File baseDir = Files.createTempDirectory("test").toFile();
    try {
        final KStreamBuilder builder = new KStreamBuilder();
        String topic1 = "topic1";
        KStream<String, String> stream1 = builder.stream(strSerde, strSerde, topic1);
        KTable<Windowed<String>, String> table2 = stream1.groupByKey(strSerde, strSerde).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, TimeWindows.of(10).advanceBy(5), strSerde, "topic1-Canonized");
        MockProcessorSupplier<Windowed<String>, String> proc2 = new MockProcessorSupplier<>();
        table2.toStream().process(proc2);
        driver = new KStreamTestDriver(builder, baseDir);
        setRecordContext(0, topic1);
        driver.process(topic1, "A", "1");
        driver.flushState();
        setRecordContext(1, topic1);
        driver.process(topic1, "B", "2");
        driver.flushState();
        setRecordContext(2, topic1);
        driver.process(topic1, "C", "3");
        driver.flushState();
        setRecordContext(3, topic1);
        driver.process(topic1, "D", "4");
        driver.flushState();
        setRecordContext(4, topic1);
        driver.process(topic1, "A", "1");
        driver.flushState();
        setRecordContext(5, topic1);
        driver.process(topic1, "A", "1");
        driver.flushState();
        setRecordContext(6, topic1);
        driver.process(topic1, "B", "2");
        driver.flushState();
        setRecordContext(7, topic1);
        driver.process(topic1, "D", "4");
        driver.flushState();
        setRecordContext(8, topic1);
        driver.process(topic1, "B", "2");
        driver.flushState();
        setRecordContext(9, topic1);
        driver.process(topic1, "C", "3");
        driver.flushState();
        setRecordContext(10, topic1);
        driver.process(topic1, "A", "1");
        driver.flushState();
        setRecordContext(11, topic1);
        driver.process(topic1, "B", "2");
        driver.flushState();
        setRecordContext(12, topic1);
        driver.flushState();
        driver.process(topic1, "D", "4");
        driver.flushState();
        setRecordContext(13, topic1);
        driver.process(topic1, "B", "2");
        driver.flushState();
        setRecordContext(14, topic1);
        driver.process(topic1, "C", "3");
        driver.flushState();
        assertEquals(Utils.mkList("[A@0]:0+1", "[B@0]:0+2", "[C@0]:0+3", "[D@0]:0+4", "[A@0]:0+1+1", "[A@0]:0+1+1+1", "[A@5]:0+1", "[B@0]:0+2+2", "[B@5]:0+2", "[D@0]:0+4+4", "[D@5]:0+4", "[B@0]:0+2+2+2", "[B@5]:0+2+2", "[C@0]:0+3+3", "[C@5]:0+3", "[A@5]:0+1+1", "[A@10]:0+1", "[B@5]:0+2+2+2", "[B@10]:0+2", "[D@5]:0+4+4", "[D@10]:0+4", "[B@5]:0+2+2+2+2", "[B@10]:0+2+2", "[C@5]:0+3+3", "[C@10]:0+3"), proc2.processed);
    } finally {
        Utils.delete(baseDir);
    }
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) File(java.io.File) Test(org.junit.Test)

Example 13 with Windowed

use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.

the class CachingSessionStoreTest method shouldFetchCorrectlyAcrossSegments.

@Test
public void shouldFetchCorrectlyAcrossSegments() throws Exception {
    final Windowed<String> a1 = new Windowed<>("a", new SessionWindow(0, 0));
    final Windowed<String> a2 = new Windowed<>("a", new SessionWindow(Segments.MIN_SEGMENT_INTERVAL, Segments.MIN_SEGMENT_INTERVAL));
    final Windowed<String> a3 = new Windowed<>("a", new SessionWindow(Segments.MIN_SEGMENT_INTERVAL * 2, Segments.MIN_SEGMENT_INTERVAL * 2));
    cachingStore.put(a1, 1L);
    cachingStore.put(a2, 2L);
    cachingStore.put(a3, 3L);
    cachingStore.flush();
    final KeyValueIterator<Windowed<String>, Long> results = cachingStore.findSessions("a", 0, Segments.MIN_SEGMENT_INTERVAL * 2);
    assertEquals(a1, results.next().key);
    assertEquals(a2, results.next().key);
    assertEquals(a3, results.next().key);
    assertFalse(results.hasNext());
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) SessionWindow(org.apache.kafka.streams.kstream.internals.SessionWindow) Test(org.junit.Test)

Example 14 with Windowed

use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.

the class CachingSessionStoreTest method shouldClearNamespaceCacheOnClose.

@Test
public void shouldClearNamespaceCacheOnClose() throws Exception {
    final Windowed<String> a1 = new Windowed<>("a", new SessionWindow(0, 0));
    cachingStore.put(a1, 1L);
    assertEquals(1, cache.size());
    cachingStore.close();
    assertEquals(0, cache.size());
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) SessionWindow(org.apache.kafka.streams.kstream.internals.SessionWindow) Test(org.junit.Test)

Example 15 with Windowed

use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.

the class CachingSessionStoreTest method addSingleSession.

private void addSingleSession(final String sessionId, final List<KeyValue<Windowed<String>, Long>> allSessions) {
    final int timestamp = allSessions.size() * 10;
    final Windowed<String> key = new Windowed<>(sessionId, new SessionWindow(timestamp, timestamp));
    final Long value = 1L;
    cachingStore.put(key, value);
    allSessions.add(KeyValue.pair(key, value));
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) SessionWindow(org.apache.kafka.streams.kstream.internals.SessionWindow)

Aggregations

Windowed (org.apache.kafka.streams.kstream.Windowed)39 Test (org.junit.Test)32 KeyValue (org.apache.kafka.streams.KeyValue)18 SessionWindow (org.apache.kafka.streams.kstream.internals.SessionWindow)18 HashMap (java.util.HashMap)6 KeyValueMapper (org.apache.kafka.streams.kstream.KeyValueMapper)6 KStreamTestDriver (org.apache.kafka.test.KStreamTestDriver)6 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)5 Bytes (org.apache.kafka.common.utils.Bytes)5 KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)5 MockKeyValueMapper (org.apache.kafka.test.MockKeyValueMapper)5 Properties (java.util.Properties)4 Comparator (java.util.Comparator)3 File (java.io.File)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)2 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)2 KafkaStreams (org.apache.kafka.streams.KafkaStreams)2 Reducer (org.apache.kafka.streams.kstream.Reducer)2 ValueJoiner (org.apache.kafka.streams.kstream.ValueJoiner)2