use of org.apache.kafka.test.InternalMockProcessorContext in project kafka by apache.
the class ChangeLoggingTimestampedKeyValueBytesStoreTest method shouldDelegateInit.
@Test
public void shouldDelegateInit() {
final InternalMockProcessorContext context = mockContext();
final KeyValueStore<Bytes, byte[]> inner = EasyMock.mock(InMemoryKeyValueStore.class);
final StateStore outer = new ChangeLoggingTimestampedKeyValueBytesStore(inner);
inner.init((StateStoreContext) context, outer);
EasyMock.expectLastCall();
EasyMock.replay(inner);
outer.init((StateStoreContext) context, outer);
EasyMock.verify(inner);
}
use of org.apache.kafka.test.InternalMockProcessorContext in project kafka by apache.
the class ChangeLoggingTimestampedKeyValueBytesStoreTest method before.
@Before
public void before() {
final InternalMockProcessorContext context = mockContext();
context.setTime(0);
store.init((StateStoreContext) context, store);
}
use of org.apache.kafka.test.InternalMockProcessorContext in project kafka by apache.
the class ChangeLoggingTimestampedKeyValueBytesStoreTest method shouldDelegateDeprecatedInit.
@SuppressWarnings("deprecation")
@Test
public void shouldDelegateDeprecatedInit() {
final InternalMockProcessorContext context = mockContext();
final KeyValueStore<Bytes, byte[]> inner = EasyMock.mock(InMemoryKeyValueStore.class);
final StateStore outer = new ChangeLoggingTimestampedKeyValueBytesStore(inner);
inner.init((ProcessorContext) context, outer);
EasyMock.expectLastCall();
EasyMock.replay(inner);
outer.init((ProcessorContext) context, outer);
EasyMock.verify(inner);
}
use of org.apache.kafka.test.InternalMockProcessorContext in project kafka by apache.
the class CompositeReadOnlyKeyValueStoreTest method newStoreInstance.
private KeyValueStore<String, String> newStoreInstance() {
final KeyValueStore<String, String> store = Stores.keyValueStoreBuilder(Stores.inMemoryKeyValueStore(storeName), Serdes.String(), Serdes.String()).build();
@SuppressWarnings("rawtypes") final InternalMockProcessorContext context = new InternalMockProcessorContext<>(new StateSerdes<>(ProcessorStateManager.storeChangelogTopic("appId", storeName, null), Serdes.String(), Serdes.String()), new MockRecordCollector());
context.setTime(1L);
store.init((StateStoreContext) context, store);
return store;
}
use of org.apache.kafka.test.InternalMockProcessorContext in project kafka by apache.
the class AbstractSessionBytesStoreTest method shouldLogAndMeasureExpiredRecords.
@Test
public void shouldLogAndMeasureExpiredRecords() {
final Properties streamsConfig = StreamsTestUtils.getStreamsConfig();
final SessionStore<String, Long> sessionStore = buildSessionStore(RETENTION_PERIOD, Serdes.String(), Serdes.Long());
final InternalMockProcessorContext context = new InternalMockProcessorContext(TestUtils.tempDirectory(), new StreamsConfig(streamsConfig), recordCollector);
final Time time = new SystemTime();
context.setTime(1L);
context.setSystemTimeMs(time.milliseconds());
sessionStore.init((StateStoreContext) context, sessionStore);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister()) {
// Advance stream time by inserting record with large enough timestamp that records with timestamp 0 are expired
// Note that rocksdb will only expire segments at a time (where segment interval = 60,000 for this retention period)
sessionStore.put(new Windowed<>("initial record", new SessionWindow(0, 2 * SEGMENT_INTERVAL)), 0L);
// Try inserting a record with timestamp 0 -- should be dropped
sessionStore.put(new Windowed<>("late record", new SessionWindow(0, 0)), 0L);
sessionStore.put(new Windowed<>("another on-time record", new SessionWindow(0, 2 * SEGMENT_INTERVAL)), 0L);
final List<String> messages = appender.getMessages();
assertThat(messages, hasItem("Skipping record for expired segment."));
}
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
final String threadId = Thread.currentThread().getName();
final Metric dropTotal;
final Metric dropRate;
dropTotal = metrics.get(new MetricName("dropped-records-total", "stream-task-metrics", "", mkMap(mkEntry("thread-id", threadId), mkEntry("task-id", "0_0"))));
dropRate = metrics.get(new MetricName("dropped-records-rate", "stream-task-metrics", "", mkMap(mkEntry("thread-id", threadId), mkEntry("task-id", "0_0"))));
assertEquals(1.0, dropTotal.metricValue());
assertNotEquals(0.0, dropRate.metricValue());
sessionStore.close();
}
Aggregations