use of org.apache.kafka.test.InternalMockProcessorContext in project apache-kafka-on-k8s by banzaicloud.
the class AbstractTaskTest method shouldDeleteAndRecreateStoreDirectoryOnReinitialize.
@Test
public void shouldDeleteAndRecreateStoreDirectoryOnReinitialize() throws IOException {
final StreamsConfig streamsConfig = new StreamsConfig(new Properties() {
{
put(StreamsConfig.APPLICATION_ID_CONFIG, "app-id");
put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
}
});
final Consumer consumer = EasyMock.createNiceMock(Consumer.class);
final StateStore store1 = EasyMock.createNiceMock(StateStore.class);
final StateStore store2 = EasyMock.createNiceMock(StateStore.class);
final StateStore store3 = EasyMock.createNiceMock(StateStore.class);
final StateStore store4 = EasyMock.createNiceMock(StateStore.class);
final String storeName1 = "storeName1";
final String storeName2 = "storeName2";
final String storeName3 = "storeName3";
final String storeName4 = "storeName4";
expect(store1.name()).andReturn(storeName1).anyTimes();
EasyMock.replay(store1);
expect(store2.name()).andReturn(storeName2).anyTimes();
EasyMock.replay(store2);
expect(store3.name()).andReturn(storeName3).anyTimes();
EasyMock.replay(store3);
expect(store4.name()).andReturn(storeName4).anyTimes();
EasyMock.replay(store4);
final StateDirectory stateDirectory = new StateDirectory(streamsConfig, new MockTime());
final AbstractTask task = createTask(consumer, new HashMap<StateStore, String>() {
{
put(store1, storeTopicPartition1.topic());
put(store2, storeTopicPartition2.topic());
put(store3, storeTopicPartition3.topic());
put(store4, storeTopicPartition4.topic());
}
}, stateDirectory);
final String taskDir = stateDirectory.directoryForTask(task.id).getAbsolutePath();
final File storeDirectory1 = new File(taskDir + File.separator + "rocksdb" + File.separator + storeName1);
final File storeDirectory2 = new File(taskDir + File.separator + "rocksdb" + File.separator + storeName2);
final File storeDirectory3 = new File(taskDir + File.separator + storeName3);
final File storeDirectory4 = new File(taskDir + File.separator + storeName4);
final File testFile1 = new File(storeDirectory1.getAbsolutePath() + File.separator + "testFile");
final File testFile2 = new File(storeDirectory2.getAbsolutePath() + File.separator + "testFile");
final File testFile3 = new File(storeDirectory3.getAbsolutePath() + File.separator + "testFile");
final File testFile4 = new File(storeDirectory4.getAbsolutePath() + File.separator + "testFile");
storeDirectory1.mkdirs();
storeDirectory2.mkdirs();
storeDirectory3.mkdirs();
storeDirectory4.mkdirs();
testFile1.createNewFile();
assertTrue(testFile1.exists());
testFile2.createNewFile();
assertTrue(testFile2.exists());
testFile3.createNewFile();
assertTrue(testFile3.exists());
testFile4.createNewFile();
assertTrue(testFile4.exists());
task.processorContext = new InternalMockProcessorContext(stateDirectory.directoryForTask(task.id), streamsConfig);
task.stateMgr.register(store1, new MockRestoreCallback());
task.stateMgr.register(store2, new MockRestoreCallback());
task.stateMgr.register(store3, new MockRestoreCallback());
task.stateMgr.register(store4, new MockRestoreCallback());
// only reinitialize store1 and store3 -- store2 and store4 should be untouched
task.reinitializeStateStoresForPartitions(Utils.mkSet(storeTopicPartition1, storeTopicPartition3));
assertFalse(testFile1.exists());
assertTrue(testFile2.exists());
assertFalse(testFile3.exists());
assertTrue(testFile4.exists());
}
use of org.apache.kafka.test.InternalMockProcessorContext in project apache-kafka-on-k8s by banzaicloud.
the class CachingKeyValueStoreTest method setUp.
@Before
public void setUp() {
final String storeName = "store";
underlyingStore = new InMemoryKeyValueStore<>(storeName, Serdes.Bytes(), Serdes.ByteArray());
cacheFlushListener = new CacheFlushListenerStub<>();
store = new CachingKeyValueStore<>(underlyingStore, Serdes.String(), Serdes.String());
store.setFlushListener(cacheFlushListener, false);
cache = new ThreadCache(new LogContext("testCache "), maxCacheSizeBytes, new MockStreamsMetrics(new Metrics()));
context = new InternalMockProcessorContext(null, null, null, (RecordCollector) null, cache);
topic = "topic";
context.setRecordContext(new ProcessorRecordContext(10, 0, 0, topic));
store.init(context, null);
}
use of org.apache.kafka.test.InternalMockProcessorContext in project apache-kafka-on-k8s by banzaicloud.
the class StateStoreTestUtils method newKeyValueStore.
public static <K, V> KeyValueStore<K, V> newKeyValueStore(final String name, final String applicationId, final Class<K> keyType, final Class<V> valueType) {
final InMemoryKeyValueStoreSupplier<K, V> supplier = new InMemoryKeyValueStoreSupplier<>(name, null, null, new MockTime(), false, Collections.<String, String>emptyMap());
final StateStore stateStore = supplier.get();
stateStore.init(new InternalMockProcessorContext(StateSerdes.withBuiltinTypes(ProcessorStateManager.storeChangelogTopic(applicationId, name), keyType, valueType), new NoOpRecordCollector()), stateStore);
return (KeyValueStore<K, V>) stateStore;
}
use of org.apache.kafka.test.InternalMockProcessorContext in project apache-kafka-on-k8s by banzaicloud.
the class RocksDBWindowStoreSupplierTest method shouldCreateLoggingEnabledStoreWhenWindowStoreLogged.
@Test
public void shouldCreateLoggingEnabledStoreWhenWindowStoreLogged() {
store = createStore(true, false, 3);
final List<ProducerRecord> logged = new ArrayList<>();
final NoOpRecordCollector collector = new NoOpRecordCollector() {
@Override
public <K, V> void send(final String topic, K key, V value, Integer partition, Long timestamp, Serializer<K> keySerializer, Serializer<V> valueSerializer) {
logged.add(new ProducerRecord<K, V>(topic, partition, timestamp, key, value));
}
};
final InternalMockProcessorContext context = new InternalMockProcessorContext(TestUtils.tempDirectory(), Serdes.String(), Serdes.String(), collector, cache);
context.setTime(1);
store.init(context, store);
store.put("a", "b");
assertFalse(logged.isEmpty());
}
use of org.apache.kafka.test.InternalMockProcessorContext in project apache-kafka-on-k8s by banzaicloud.
the class RocksDBWindowStoreSupplierTest method shouldNotBeLoggingEnabledStoreWhenLogginNotEnabled.
@Test
public void shouldNotBeLoggingEnabledStoreWhenLogginNotEnabled() {
store = createStore(false, false, 3);
final List<ProducerRecord> logged = new ArrayList<>();
final NoOpRecordCollector collector = new NoOpRecordCollector() {
@Override
public <K, V> void send(final String topic, K key, V value, Integer partition, Long timestamp, Serializer<K> keySerializer, Serializer<V> valueSerializer) {
logged.add(new ProducerRecord<K, V>(topic, partition, timestamp, key, value));
}
};
final InternalMockProcessorContext context = new InternalMockProcessorContext(TestUtils.tempDirectory(), Serdes.String(), Serdes.String(), collector, cache);
context.setTime(1);
store.init(context, store);
store.put("a", "b");
assertTrue(logged.isEmpty());
}
Aggregations