use of org.apache.kafka.streams.KeyValue in project kafka by apache.
the class ReadOnlyWindowStoreStub method fetchAll.
@Override
public KeyValueIterator<Windowed<K>, V> fetchAll(final Instant timeFrom, final Instant timeTo) {
if (!open) {
throw new InvalidStateStoreException("Store is not open");
}
final List<KeyValue<Windowed<K>, V>> results = new ArrayList<>();
for (final long now : data.keySet()) {
if (!(now >= timeFrom.toEpochMilli() && now <= timeTo.toEpochMilli())) {
continue;
}
final NavigableMap<K, V> kvMap = data.get(now);
if (kvMap != null) {
for (final Entry<K, V> entry : kvMap.entrySet()) {
results.add(new KeyValue<>(new Windowed<>(entry.getKey(), new TimeWindow(now, now + windowSize)), entry.getValue()));
}
}
}
final Iterator<KeyValue<Windowed<K>, V>> iterator = results.iterator();
return new KeyValueIterator<Windowed<K>, V>() {
@Override
public void close() {
}
@Override
public Windowed<K> peekNextKey() {
throw new UnsupportedOperationException("peekNextKey() not supported in " + getClass().getName());
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public KeyValue<Windowed<K>, V> next() {
return iterator.next();
}
};
}
use of org.apache.kafka.streams.KeyValue in project kafka by apache.
the class RocksDBStoreTest method shouldPutAll.
@Test
public void shouldPutAll() {
final List<KeyValue<Bytes, byte[]>> entries = new ArrayList<>();
entries.add(new KeyValue<>(new Bytes(stringSerializer.serialize(null, "1")), stringSerializer.serialize(null, "a")));
entries.add(new KeyValue<>(new Bytes(stringSerializer.serialize(null, "2")), stringSerializer.serialize(null, "b")));
entries.add(new KeyValue<>(new Bytes(stringSerializer.serialize(null, "3")), stringSerializer.serialize(null, "c")));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
rocksDBStore.putAll(entries);
rocksDBStore.flush();
assertEquals("a", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals("b", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals("c", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
}
use of org.apache.kafka.streams.KeyValue in project kafka by apache.
the class RocksDBStoreTest method shouldRestoreThenDeleteOnRestoreAll.
@Test
public void shouldRestoreThenDeleteOnRestoreAll() {
final List<KeyValue<byte[], byte[]>> entries = getKeyValueEntries();
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
assertEquals("a", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals("b", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals("c", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
entries.clear();
entries.add(new KeyValue<>("2".getBytes(UTF_8), "b".getBytes(UTF_8)));
entries.add(new KeyValue<>("3".getBytes(UTF_8), "c".getBytes(UTF_8)));
entries.add(new KeyValue<>("1".getBytes(UTF_8), null));
context.restore(rocksDBStore.name(), entries);
try (final KeyValueIterator<Bytes, byte[]> iterator = rocksDBStore.all()) {
final Set<String> keys = new HashSet<>();
while (iterator.hasNext()) {
keys.add(stringDeserializer.deserialize(null, iterator.next().key.get()));
}
assertThat(keys, equalTo(Utils.mkSet("2", "3")));
}
}
use of org.apache.kafka.streams.KeyValue in project kafka by apache.
the class RocksDBStoreTest method shouldNotThrowWhenRestoringOnMissingHeaders.
@Test
public void shouldNotThrowWhenRestoringOnMissingHeaders() {
final List<KeyValue<byte[], byte[]>> entries = getChangelogRecordsWithoutHeaders();
final Properties props = StreamsTestUtils.getStreamsConfig();
props.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class);
props.put(InternalConfig.IQ_CONSISTENCY_OFFSET_VECTOR_ENABLED, true);
dir = TestUtils.tempDirectory();
context = new InternalMockProcessorContext<>(dir, Serdes.String(), Serdes.String(), new StreamsConfig(props));
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
assertThat(rocksDBStore.getPosition(), is(Position.emptyPosition()));
}
use of org.apache.kafka.streams.KeyValue in project kafka by apache.
the class RocksDBStoreTest method shouldRestoreAll.
@Test
public void shouldRestoreAll() {
final List<KeyValue<byte[], byte[]>> entries = getKeyValueEntries();
rocksDBStore.init((StateStoreContext) context, rocksDBStore);
context.restore(rocksDBStore.name(), entries);
assertEquals("a", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "1")))));
assertEquals("b", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "2")))));
assertEquals("c", stringDeserializer.deserialize(null, rocksDBStore.get(new Bytes(stringSerializer.serialize(null, "3")))));
}
Aggregations