Search in sources :

Example 1 with ConsumerRecordFactory

use of org.apache.kafka.streams.test.ConsumerRecordFactory in project kafkastreams-cep by fhussonnois.

the class CEPStreamIntegrationTest method testPatternGivenMultipleRecordKeys.

@Test
public void testPatternGivenMultipleRecordKeys() {
    // Build query
    ComplexStreamsBuilder builder = new ComplexStreamsBuilder();
    CEPStream<String, Integer> stream = builder.stream(INPUT_TOPIC_1, Consumed.with(STRING_SERDE, Serdes.Integer()));
    KStream<String, Sequence<String, Integer>> sequences = stream.query(DEFAULT_TEST_QUERY, SIMPLE_PATTERN, Queried.with(STRING_SERDE, Serdes.Integer()));
    sequences.to(OUTPUT_TOPIC_1, Produced.with(STRING_SERDE, new JsonSequenceSerde<>()));
    Topology topology = builder.build();
    driver = new TopologyTestDriver(topology, streamsConfiguration);
    ConsumerRecordFactory<String, Integer> factory = new ConsumerRecordFactory<>(Serdes.String().serializer(), Serdes.Integer().serializer());
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K1, 0));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K2, -10));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K2, 0));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K1, 3));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K2, 6));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K1, 1));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K1, 2));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K1, 6));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K2, 4));
    driver.pipeInput(factory.create(INPUT_TOPIC_1, K2, 4));
    // JSON values are de-serialized as double
    List<ProducerRecord<String, Sequence<String, Double>>> results = new ArrayList<>();
    results.add(driver.readOutput(OUTPUT_TOPIC_1, STRING_SERDE.deserializer(), new JsonSequenceSerde.SequenceDeserializer<>()));
    results.add(driver.readOutput(OUTPUT_TOPIC_1, STRING_SERDE.deserializer(), new JsonSequenceSerde.SequenceDeserializer<>()));
    Assert.assertEquals(2, results.size());
    final ProducerRecord<String, Sequence<String, Double>> kvOne = results.get(0);
    Assert.assertEquals(K1, kvOne.key());
    assertStagesNames(kvOne.value(), STAGE_1, STAGE_2, STAGE_3);
    assertStagesValue(kvOne.value(), STAGE_1, 0.0);
    assertStagesValue(kvOne.value(), STAGE_2, 3.0, 1.0, 2.0);
    assertStagesValue(kvOne.value(), STAGE_3, 6.0);
    final ProducerRecord<String, Sequence<String, Double>> kvTwo = results.get(1);
    Assert.assertEquals(K2, kvTwo.key());
    assertStagesNames(kvTwo.value(), STAGE_1, STAGE_2, STAGE_3);
    assertStagesValue(kvTwo.value(), STAGE_1, 0.0);
    assertStagesValue(kvTwo.value(), STAGE_2, 6.0, 4.0);
    assertStagesValue(kvTwo.value(), STAGE_3, 4.0);
}
Also used : ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Sequence(com.github.fhuss.kafka.streams.cep.core.Sequence) Topology(org.apache.kafka.streams.Topology) ConsumerRecordFactory(org.apache.kafka.streams.test.ConsumerRecordFactory) JsonSequenceSerde(com.github.fhuss.kafka.streams.cep.serdes.JsonSequenceSerde) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 2 with ConsumerRecordFactory

use of org.apache.kafka.streams.test.ConsumerRecordFactory in project kafkastreams-cep by fhussonnois.

the class CEPProcessorTest method shouldMeterOnSkippedRecordsWithNullValue.

@Test
public void shouldMeterOnSkippedRecordsWithNullValue() {
    StreamsBuilder builder = new StreamsBuilder();
    ConsumerRecordFactory<String, String> recordFactory = new ConsumerRecordFactory<>(new StringSerializer(), new StringSerializer());
    KStream<String, String> stream = builder.stream(TOPIC_TEST_1);
    builder.addStateStore(nfaStateStore);
    builder.addStateStore(eventBufferStore);
    builder.addStateStore(aggregateStateStores);
    final String[] stateStoreNames = new String[] { nfaStateStore.name(), eventBufferStore.name(), aggregateStateStores.name() };
    stream.process(() -> new CEPProcessor<>(TEST_QUERY, pattern), stateStoreNames);
    try (TopologyTestDriver driver = new TopologyTestDriver(builder.build(), this.props)) {
        driver.pipeInput(recordFactory.create(TOPIC_TEST_1, "A", (String) null));
        Assert.assertEquals(1.0D, StreamsTestUtils.getMetricByName(driver.metrics(), "skipped-records-total", "stream-metrics").metricValue());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ConsumerRecordFactory(org.apache.kafka.streams.test.ConsumerRecordFactory) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 3 with ConsumerRecordFactory

use of org.apache.kafka.streams.test.ConsumerRecordFactory in project apache-kafka-on-k8s by banzaicloud.

the class TopologyTestDriverTest method shouldThrowForUnknownTopic.

@Test
public void shouldThrowForUnknownTopic() {
    final String unknownTopic = "unknownTopic";
    final ConsumerRecordFactory<byte[], byte[]> consumerRecordFactory = new ConsumerRecordFactory<>("unknownTopic", new ByteArraySerializer(), new ByteArraySerializer());
    testDriver = new TopologyTestDriver(new Topology(), config);
    try {
        testDriver.pipeInput(consumerRecordFactory.create((byte[]) null));
        fail("Should have throw IllegalArgumentException");
    } catch (final IllegalArgumentException exception) {
        assertEquals("Unknown topic: " + unknownTopic, exception.getMessage());
    }
}
Also used : ConsumerRecordFactory(org.apache.kafka.streams.test.ConsumerRecordFactory) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) Test(org.junit.Test)

Example 4 with ConsumerRecordFactory

use of org.apache.kafka.streams.test.ConsumerRecordFactory in project apache-kafka-on-k8s by banzaicloud.

the class TopologyTestDriverTest method shouldUseSourceSpecificDeserializers.

@Test
public void shouldUseSourceSpecificDeserializers() {
    final Topology topology = new Topology();
    final String sourceName1 = "source-1";
    final String sourceName2 = "source-2";
    final String processor = "processor";
    topology.addSource(sourceName1, Serdes.Long().deserializer(), Serdes.String().deserializer(), SOURCE_TOPIC_1);
    topology.addSource(sourceName2, Serdes.Integer().deserializer(), Serdes.Double().deserializer(), SOURCE_TOPIC_2);
    topology.addProcessor(processor, new MockProcessorSupplier(), sourceName1, sourceName2);
    topology.addSink("sink", SINK_TOPIC_1, new Serializer() {

        @Override
        public byte[] serialize(final String topic, final Object data) {
            if (data instanceof Long) {
                return Serdes.Long().serializer().serialize(topic, (Long) data);
            }
            return Serdes.Integer().serializer().serialize(topic, (Integer) data);
        }

        @Override
        public void close() {
        }

        @Override
        public void configure(final Map configs, final boolean isKey) {
        }
    }, new Serializer() {

        @Override
        public byte[] serialize(final String topic, final Object data) {
            if (data instanceof String) {
                return Serdes.String().serializer().serialize(topic, (String) data);
            }
            return Serdes.Double().serializer().serialize(topic, (Double) data);
        }

        @Override
        public void close() {
        }

        @Override
        public void configure(final Map configs, final boolean isKey) {
        }
    }, processor);
    testDriver = new TopologyTestDriver(topology, config);
    final ConsumerRecordFactory<Long, String> source1Factory = new ConsumerRecordFactory<>(SOURCE_TOPIC_1, Serdes.Long().serializer(), Serdes.String().serializer());
    final ConsumerRecordFactory<Integer, Double> source2Factory = new ConsumerRecordFactory<>(SOURCE_TOPIC_2, Serdes.Integer().serializer(), Serdes.Double().serializer());
    final Long source1Key = 42L;
    final String source1Value = "anyString";
    final Integer source2Key = 73;
    final Double source2Value = 3.14;
    final ConsumerRecord<byte[], byte[]> consumerRecord1 = source1Factory.create(source1Key, source1Value);
    final ConsumerRecord<byte[], byte[]> consumerRecord2 = source2Factory.create(source2Key, source2Value);
    testDriver.pipeInput(consumerRecord1);
    OutputVerifier.compareKeyValue(testDriver.readOutput(SINK_TOPIC_1, Serdes.Long().deserializer(), Serdes.String().deserializer()), source1Key, source1Value);
    testDriver.pipeInput(consumerRecord2);
    OutputVerifier.compareKeyValue(testDriver.readOutput(SINK_TOPIC_1, Serdes.Integer().deserializer(), Serdes.Double().deserializer()), source2Key, source2Value);
}
Also used : ConsumerRecordFactory(org.apache.kafka.streams.test.ConsumerRecordFactory) Map(java.util.Map) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Serializer(org.apache.kafka.common.serialization.Serializer) Test(org.junit.Test)

Example 5 with ConsumerRecordFactory

use of org.apache.kafka.streams.test.ConsumerRecordFactory in project kafkastreams-cep by fhussonnois.

the class CEPProcessorTest method shouldForwardMatchingSequencesToDownStreamsProcessors.

@Test
public void shouldForwardMatchingSequencesToDownStreamsProcessors() {
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> stream = builder.stream(Arrays.asList(TOPIC_TEST_1, TOPIC_TEST_2));
    builder.addStateStore(nfaStateStore);
    builder.addStateStore(eventBufferStore);
    builder.addStateStore(aggregateStateStores);
    ConsumerRecordFactory<String, String> recordFactory = new ConsumerRecordFactory<>(new StringSerializer(), new StringSerializer());
    final String[] stateStoreNames = new String[] { nfaStateStore.name(), eventBufferStore.name(), aggregateStateStores.name() };
    CapturedCEProcessor<String, String> processor = new CapturedCEProcessor<>(TEST_QUERY, pattern);
    stream.process(() -> processor, stateStoreNames);
    try (TopologyTestDriver driver = new TopologyTestDriver(builder.build(), this.props)) {
        driver.pipeInput(recordFactory.create(TOPIC_TEST_1, TEST_RECORD_KEY, TEST_RECORD_VALUE));
        driver.pipeInput(recordFactory.create(TOPIC_TEST_2, TEST_RECORD_KEY, TEST_RECORD_VALUE));
    }
    List<CapturedProcessorContext.CapturedForward> capturedForward = processor.context.capturedForward;
    Assert.assertEquals(2, capturedForward.size());
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ConsumerRecordFactory(org.apache.kafka.streams.test.ConsumerRecordFactory) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

ConsumerRecordFactory (org.apache.kafka.streams.test.ConsumerRecordFactory)6 Test (org.junit.Test)6 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)4 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)3 Sequence (com.github.fhuss.kafka.streams.cep.core.Sequence)2 JsonSequenceSerde (com.github.fhuss.kafka.streams.cep.serdes.JsonSequenceSerde)2 ArrayList (java.util.ArrayList)2 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)2 ByteArraySerializer (org.apache.kafka.common.serialization.ByteArraySerializer)2 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)2 Topology (org.apache.kafka.streams.Topology)2 Map (java.util.Map)1 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)1 Serializer (org.apache.kafka.common.serialization.Serializer)1