use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.
the class CompositeReadOnlySessionStoreTest method shouldFindValueForKeyWhenMultiStores.
@Test
public void shouldFindValueForKeyWhenMultiStores() throws Exception {
final ReadOnlySessionStoreStub<String, Long> secondUnderlying = new ReadOnlySessionStoreStub<>();
stubProviderTwo.addStore(storeName, secondUnderlying);
final Windowed<String> keyOne = new Windowed<>("key-one", new SessionWindow(0, 0));
final Windowed<String> keyTwo = new Windowed<>("key-two", new SessionWindow(0, 0));
underlyingSessionStore.put(keyOne, 0L);
secondUnderlying.put(keyTwo, 10L);
final List<KeyValue<Windowed<String>, Long>> keyOneResults = toList(sessionStore.fetch("key-one"));
final List<KeyValue<Windowed<String>, Long>> keyTwoResults = toList(sessionStore.fetch("key-two"));
assertEquals(Collections.singletonList(KeyValue.pair(keyOne, 0L)), keyOneResults);
assertEquals(Collections.singletonList(KeyValue.pair(keyTwo, 10L)), keyTwoResults);
}
use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.
the class CachingSessionStoreTest method shouldPutFetchFromCache.
@Test
public void shouldPutFetchFromCache() throws Exception {
cachingStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
cachingStore.put(new Windowed<>("aa", new SessionWindow(0, 0)), 1L);
cachingStore.put(new Windowed<>("b", new SessionWindow(0, 0)), 1L);
final KeyValueIterator<Windowed<String>, Long> a = cachingStore.findSessions("a", 0, 0);
final KeyValueIterator<Windowed<String>, Long> b = cachingStore.findSessions("b", 0, 0);
assertEquals(KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L), a.next());
assertEquals(KeyValue.pair(new Windowed<>("b", new SessionWindow(0, 0)), 1L), b.next());
assertFalse(a.hasNext());
assertFalse(b.hasNext());
assertEquals(3, cache.size());
}
use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.
the class KStreamWindowAggregateTest method testJoin.
@Test
public void testJoin() throws Exception {
final File baseDir = Files.createTempDirectory("test").toFile();
try {
final KStreamBuilder builder = new KStreamBuilder();
String topic1 = "topic1";
String topic2 = "topic2";
KStream<String, String> stream1 = builder.stream(strSerde, strSerde, topic1);
KTable<Windowed<String>, String> table1 = stream1.groupByKey(strSerde, strSerde).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, TimeWindows.of(10).advanceBy(5), strSerde, "topic1-Canonized");
MockProcessorSupplier<Windowed<String>, String> proc1 = new MockProcessorSupplier<>();
table1.toStream().process(proc1);
KStream<String, String> stream2 = builder.stream(strSerde, strSerde, topic2);
KTable<Windowed<String>, String> table2 = stream2.groupByKey(strSerde, strSerde).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, TimeWindows.of(10).advanceBy(5), strSerde, "topic2-Canonized");
MockProcessorSupplier<Windowed<String>, String> proc2 = new MockProcessorSupplier<>();
table2.toStream().process(proc2);
MockProcessorSupplier<Windowed<String>, String> proc3 = new MockProcessorSupplier<>();
table1.join(table2, new ValueJoiner<String, String, String>() {
@Override
public String apply(String p1, String p2) {
return p1 + "%" + p2;
}
}).toStream().process(proc3);
driver = new KStreamTestDriver(builder, baseDir);
setRecordContext(0, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(1, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(2, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
setRecordContext(3, topic1);
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(4, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
proc1.checkAndClearProcessResult("[A@0]:0+1", "[B@0]:0+2", "[C@0]:0+3", "[D@0]:0+4", "[A@0]:0+1+1");
proc2.checkAndClearProcessResult();
proc3.checkAndClearProcessResult();
setRecordContext(5, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(6, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(7, topic1);
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(8, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(9, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
proc1.checkAndClearProcessResult("[A@0]:0+1+1+1", "[A@5]:0+1", "[B@0]:0+2+2", "[B@5]:0+2", "[D@0]:0+4+4", "[D@5]:0+4", "[B@0]:0+2+2+2", "[B@5]:0+2+2", "[C@0]:0+3+3", "[C@5]:0+3");
proc2.checkAndClearProcessResult();
proc3.checkAndClearProcessResult();
setRecordContext(0, topic1);
driver.process(topic2, "A", "a");
driver.flushState();
setRecordContext(1, topic1);
driver.process(topic2, "B", "b");
driver.flushState();
setRecordContext(2, topic1);
driver.process(topic2, "C", "c");
driver.flushState();
setRecordContext(3, topic1);
driver.process(topic2, "D", "d");
driver.flushState();
setRecordContext(4, topic1);
driver.process(topic2, "A", "a");
driver.flushState();
proc1.checkAndClearProcessResult();
proc2.checkAndClearProcessResult("[A@0]:0+a", "[B@0]:0+b", "[C@0]:0+c", "[D@0]:0+d", "[A@0]:0+a+a");
proc3.checkAndClearProcessResult("[A@0]:0+1+1+1%0+a", "[B@0]:0+2+2+2%0+b", "[C@0]:0+3+3%0+c", "[D@0]:0+4+4%0+d", "[A@0]:0+1+1+1%0+a+a");
setRecordContext(5, topic1);
driver.process(topic2, "A", "a");
driver.flushState();
setRecordContext(6, topic1);
driver.process(topic2, "B", "b");
driver.flushState();
setRecordContext(7, topic1);
driver.process(topic2, "D", "d");
driver.flushState();
setRecordContext(8, topic1);
driver.process(topic2, "B", "b");
driver.flushState();
setRecordContext(9, topic1);
driver.process(topic2, "C", "c");
driver.flushState();
proc1.checkAndClearProcessResult();
proc2.checkAndClearProcessResult("[A@0]:0+a+a+a", "[A@5]:0+a", "[B@0]:0+b+b", "[B@5]:0+b", "[D@0]:0+d+d", "[D@5]:0+d", "[B@0]:0+b+b+b", "[B@5]:0+b+b", "[C@0]:0+c+c", "[C@5]:0+c");
proc3.checkAndClearProcessResult("[A@0]:0+1+1+1%0+a+a+a", "[A@5]:0+1%0+a", "[B@0]:0+2+2+2%0+b+b", "[B@5]:0+2+2%0+b", "[D@0]:0+4+4%0+d+d", "[D@5]:0+4%0+d", "[B@0]:0+2+2+2%0+b+b+b", "[B@5]:0+2+2%0+b+b", "[C@0]:0+3+3%0+c+c", "[C@5]:0+3%0+c");
} finally {
Utils.delete(baseDir);
}
}
use of org.apache.kafka.streams.kstream.Windowed in project kafka by apache.
the class SessionKeySerde method from.
public static <K> Windowed<K> from(final byte[] binaryKey, final Deserializer<K> keyDeserializer) {
final K key = extractKey(binaryKey, keyDeserializer);
final Window window = extractWindow(binaryKey);
return new Windowed<>(key, window);
}
Aggregations