use of org.apache.kafka.test.MockProcessorContext in project kafka by apache.
the class RocksDBKeyValueStoreSupplierTest method shouldNotBeLoggingEnabledStoreWhenLoggingNotEnabled.
@Test
public void shouldNotBeLoggingEnabledStoreWhenLoggingNotEnabled() throws Exception {
store = createStore(false, false);
final List<ProducerRecord> logged = new ArrayList<>();
final NoOpRecordCollector collector = new NoOpRecordCollector() {
@Override
public <K, V> void send(final String topic, K key, V value, Integer partition, Long timestamp, Serializer<K> keySerializer, Serializer<V> valueSerializer) {
logged.add(new ProducerRecord<K, V>(topic, partition, timestamp, key, value));
}
};
final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(), Serdes.String(), Serdes.String(), collector, cache);
context.setTime(1);
store.init(context, store);
store.put("a", "b");
assertTrue(logged.isEmpty());
}
use of org.apache.kafka.test.MockProcessorContext in project kafka by apache.
the class RocksDBKeyValueStoreTest method shouldPerformAllQueriesWithCachingDisabled.
@Test
public void shouldPerformAllQueriesWithCachingDisabled() throws Exception {
final KeyValueStoreTestDriver<Integer, String> driver = KeyValueStoreTestDriver.create(Integer.class, String.class);
final MockProcessorContext context = (MockProcessorContext) driver.context();
final KeyValueStore<Integer, String> store = createStore(context, Integer.class, String.class, false, false);
context.setTime(1L);
store.put(1, "hi");
store.put(2, "goodbye");
final KeyValueIterator<Integer, String> range = store.all();
assertEquals("hi", range.next().value);
assertEquals("goodbye", range.next().value);
assertFalse(range.hasNext());
}
use of org.apache.kafka.test.MockProcessorContext in project kafka by apache.
the class RocksDBKeyValueStoreTest method shouldCloseOpenIteratorsWhenStoreClosedAndThrowInvalidStateStoreOnHasNextAndNext.
@Test
public void shouldCloseOpenIteratorsWhenStoreClosedAndThrowInvalidStateStoreOnHasNextAndNext() throws Exception {
final KeyValueStoreTestDriver<Integer, String> driver = KeyValueStoreTestDriver.create(Integer.class, String.class);
final MockProcessorContext context = (MockProcessorContext) driver.context();
context.setTime(1L);
final KeyValueStore<Integer, String> store = createStore(context, Integer.class, String.class, false, false);
store.put(1, "hi");
store.put(2, "goodbye");
final KeyValueIterator<Integer, String> iteratorOne = store.range(1, 5);
final KeyValueIterator<Integer, String> iteratorTwo = store.range(1, 4);
assertTrue(iteratorOne.hasNext());
assertTrue(iteratorTwo.hasNext());
store.close();
try {
iteratorOne.hasNext();
fail("should have thrown InvalidStateStoreException on closed store");
} catch (InvalidStateStoreException e) {
// ok
}
try {
iteratorOne.next();
fail("should have thrown InvalidStateStoreException on closed store");
} catch (InvalidStateStoreException e) {
// ok
}
try {
iteratorTwo.hasNext();
fail("should have thrown InvalidStateStoreException on closed store");
} catch (InvalidStateStoreException e) {
// ok
}
try {
iteratorTwo.next();
fail("should have thrown InvalidStateStoreException on closed store");
} catch (InvalidStateStoreException e) {
// ok
}
}
use of org.apache.kafka.test.MockProcessorContext in project kafka by apache.
the class MeteredSegmentedBytesStoreTest method setUp.
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
final Metrics metrics = new Metrics();
final StreamsMetrics streamsMetrics = new StreamsMetrics() {
@Override
public Map<MetricName, ? extends Metric> metrics() {
return Collections.unmodifiableMap(metrics.metrics());
}
@Override
public Sensor addLatencyAndThroughputSensor(String scopeName, String entityName, String operationName, Sensor.RecordingLevel recordLevel, String... tags) {
return metrics.sensor(operationName);
}
@Override
public void recordLatency(final Sensor sensor, final long startNs, final long endNs) {
latencyRecorded.add(sensor.name());
}
@Override
public Sensor addThroughputSensor(String scopeName, String entityName, String operationName, Sensor.RecordingLevel recordLevel, String... tags) {
return metrics.sensor(operationName);
}
@Override
public void recordThroughput(Sensor sensor, long value) {
throughputRecorded.add(sensor.name());
}
@Override
public void removeSensor(Sensor sensor) {
metrics.removeSensor(sensor.name());
}
@Override
public Sensor addSensor(String name, Sensor.RecordingLevel recordLevel) {
return metrics.sensor(name);
}
@Override
public Sensor addSensor(String name, Sensor.RecordingLevel recordLevel, Sensor... parents) {
return metrics.sensor(name);
}
};
final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(), Serdes.String(), Serdes.Long(), new NoOpRecordCollector(), new ThreadCache("testCache", 0, streamsMetrics)) {
@Override
public StreamsMetrics metrics() {
return streamsMetrics;
}
};
store.init(context, store);
}
use of org.apache.kafka.test.MockProcessorContext in project kafka by apache.
the class RocksDBSessionStoreTest method before.
@Before
public void before() {
final RocksDBSegmentedBytesStore bytesStore = new RocksDBSegmentedBytesStore("session-store", 10000L, 3, new SessionKeySchema());
sessionStore = new RocksDBSessionStore<>(bytesStore, Serdes.String(), Serdes.Long());
final MockProcessorContext context = new MockProcessorContext(TestUtils.tempDirectory(), Serdes.String(), Serdes.Long(), new NoOpRecordCollector(), new ThreadCache("testCache", 0, new MockStreamsMetrics(new Metrics())));
sessionStore.init(context, sessionStore);
}
Aggregations