Search in sources :

Example 1 with Event

use of org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event in project kafka by apache.

the class KStreamSessionWindowAggregateProcessorTest method shouldLogAndMeterWhenSkippingNullKeyWithBuiltInMetrics.

@Test
public void shouldLogAndMeterWhenSkippingNullKeyWithBuiltInMetrics() {
    setup(false);
    context.setRecordContext(new ProcessorRecordContext(-1, -2, -3, "topic", new RecordHeaders()));
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamSessionWindowAggregate.class)) {
        processor.process(new Record<>(null, "1", 0L));
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Skipping record due to null key. topic=[topic] partition=[-3] offset=[-2]"));
    }
    assertEquals(1.0, getMetricByName(context.metrics().metrics(), "dropped-records-total", "stream-task-metrics").metricValue());
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) MockTime(org.apache.kafka.common.utils.MockTime) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) TaskMetrics(org.apache.kafka.streams.processor.internals.metrics.TaskMetrics) Stores(org.apache.kafka.streams.state.Stores) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) LogContext(org.apache.kafka.common.utils.LogContext) After(org.junit.After) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl) Aggregator(org.apache.kafka.streams.kstream.Aggregator) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Time(org.apache.kafka.common.utils.Time) TestUtils(org.apache.kafka.test.TestUtils) ThreadCache(org.apache.kafka.streams.state.internals.ThreadCache) KeyValue(org.apache.kafka.streams.KeyValue) Collectors(java.util.stream.Collectors) List(java.util.List) Metrics(org.apache.kafka.common.metrics.Metrics) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Assert.assertFalse(org.junit.Assert.assertFalse) Matchers.greaterThan(org.hamcrest.Matchers.greaterThan) Duration.ofMillis(java.time.Duration.ofMillis) StreamsConfig(org.apache.kafka.streams.StreamsConfig) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) ArrayList(java.util.ArrayList) Initializer(org.apache.kafka.streams.kstream.Initializer) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) StateStoreContext(org.apache.kafka.streams.processor.StateStoreContext) Windowed(org.apache.kafka.streams.kstream.Windowed) Record(org.apache.kafka.streams.processor.api.Record) Processor(org.apache.kafka.streams.processor.api.Processor) SessionStore(org.apache.kafka.streams.state.SessionStore) MockRecordCollector(org.apache.kafka.test.MockRecordCollector) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) StreamsTestUtils.getMetricByName(org.apache.kafka.test.StreamsTestUtils.getMetricByName) Before(org.junit.Before) InternalMockProcessorContext(org.apache.kafka.test.InternalMockProcessorContext) Merger(org.apache.kafka.streams.kstream.Merger) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) StoreBuilder(org.apache.kafka.streams.state.StoreBuilder) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Assert.assertEquals(org.junit.Assert.assertEquals) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Test(org.junit.Test)

Example 2 with Event

use of org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method shouldLogAndMeterWhenSkippingNullKey.

@Test
public void shouldLogAndMeterWhenSkippingNullKey() {
    final String builtInMetricsVersion = StreamsConfig.METRICS_LATEST;
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(100))).aggregate(MockInitializer.STRING_INIT, MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()));
    props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamSlidingWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput(null, "1");
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Skipping record due to null key or value. topic=[topic] partition=[0] offset=[0]"));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) InMemoryWindowStore(org.apache.kafka.streams.state.internals.InMemoryWindowStore) WindowStore(org.apache.kafka.streams.state.WindowStore) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) Map(java.util.Map) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) TestRecord(org.apache.kafka.streams.test.TestRecord) Parameterized(org.junit.runners.Parameterized) TimeWindowedDeserializer(org.apache.kafka.streams.kstream.TimeWindowedDeserializer) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Collection(java.util.Collection) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) Collectors(java.util.stream.Collectors) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) Duration.ofMillis(java.time.Duration.ofMillis) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) InMemoryWindowStore(org.apache.kafka.streams.state.internals.InMemoryWindowStore) RunWith(org.junit.runner.RunWith) CoreMatchers.not(org.hamcrest.CoreMatchers.not) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) ArrayList(java.util.ArrayList) Windowed(org.apache.kafka.streams.kstream.Windowed) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) Matcher(org.hamcrest.Matcher) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Comparator(java.util.Comparator) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 3 with Event

use of org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event in project kafka by apache.

the class KTableSourceTest method kTableShouldLogOnOutOfOrder.

@Test
public void kTableShouldLogOnOutOfOrder() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.table(topic, stringConsumed, Materialized.as("store"));
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KTableSource.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("key", "value", 10L);
        inputTopic.pipeInput("key", "value", 5L);
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Detected out-of-order KTable update for store, old timestamp=[10] new timestamp=[5]. topic=[topic] partition=[1] offset=[0]."));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) StreamsTestUtils.getMetricByName(org.apache.kafka.test.StreamsTestUtils.getMetricByName) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Assert.assertNull(org.junit.Assert.assertNull) Ignore(org.junit.Ignore) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 4 with Event

use of org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event in project kafka by apache.

the class KTableKTableRightJoinTest method shouldLogAndMeterSkippedRecordsDueToNullLeftKeyWithBuiltInMetricsVersionLatest.

@Test
public void shouldLogAndMeterSkippedRecordsDueToNullLeftKeyWithBuiltInMetricsVersionLatest() {
    final StreamsBuilder builder = new StreamsBuilder();
    @SuppressWarnings("unchecked") final Processor<String, Change<String>, String, Change<Object>> join = new KTableKTableRightJoin<>((KTableImpl<String, String, String>) builder.table("left", Consumed.with(Serdes.String(), Serdes.String())), (KTableImpl<String, String, String>) builder.table("right", Consumed.with(Serdes.String(), Serdes.String())), null).get();
    props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, StreamsConfig.METRICS_LATEST);
    final MockProcessorContext<String, Change<Object>> context = new MockProcessorContext<>(props);
    context.setRecordMetadata("left", -1, -2);
    join.init(context);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KTableKTableRightJoin.class)) {
        join.process(new Record<>(null, new Change<>("new", "old"), 0));
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Skipping record due to null key. topic=[left] partition=[-1] offset=[-2]"));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) Collectors(java.util.stream.Collectors) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) Serdes(org.apache.kafka.common.serialization.Serdes) Record(org.apache.kafka.streams.processor.api.Record) Processor(org.apache.kafka.streams.processor.api.Processor) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Test(org.junit.Test)

Example 5 with Event

use of org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event in project kafka by apache.

the class KTableSourceTest method kTableShouldLogAndMeterOnSkippedRecords.

@Test
public void kTableShouldLogAndMeterOnSkippedRecords() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.table(topic, stringConsumed);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KTableSource.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput(null, "value");
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Skipping record due to null key. topic=[topic] partition=[0] offset=[0]"));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) StreamsTestUtils.getMetricByName(org.apache.kafka.test.StreamsTestUtils.getMetricByName) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Assert.assertNull(org.junit.Assert.assertNull) Ignore(org.junit.Ignore) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

Collectors (java.util.stream.Collectors)5 Serdes (org.apache.kafka.common.serialization.Serdes)5 LogCaptureAppender (org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender)5 Event (org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event)5 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)5 CoreMatchers.hasItem (org.hamcrest.CoreMatchers.hasItem)5 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)5 Test (org.junit.Test)5 Properties (java.util.Properties)4 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)4 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)4 Consumed (org.apache.kafka.streams.kstream.Consumed)4 Assert.assertEquals (org.junit.Assert.assertEquals)4 Assert.assertTrue (org.junit.Assert.assertTrue)4 Duration (java.time.Duration)3 Arrays.asList (java.util.Arrays.asList)3 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)3 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)3 StreamsConfig (org.apache.kafka.streams.StreamsConfig)3 TestInputTopic (org.apache.kafka.streams.TestInputTopic)3