Search in sources :

Example 36 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class StreamMapTest method testMap.

@Test
public void testMap() throws Exception {
    StreamMap<Integer, String> operator = new StreamMap<Integer, String>(new Map());
    OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<Integer, String>(operator);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<Integer>(1, initialTime + 1));
    testHarness.processElement(new StreamRecord<Integer>(2, initialTime + 2));
    testHarness.processWatermark(new Watermark(initialTime + 2));
    testHarness.processElement(new StreamRecord<Integer>(3, initialTime + 3));
    expectedOutput.add(new StreamRecord<String>("+2", initialTime + 1));
    expectedOutput.add(new StreamRecord<String>("+3", initialTime + 2));
    expectedOutput.add(new Watermark(initialTime + 2));
    expectedOutput.add(new StreamRecord<String>("+4", initialTime + 3));
    TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
Also used : OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 37 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class ProcessOperatorTest method testTimestampAndWatermarkQuerying.

@Test
public void testTimestampAndWatermarkQuerying() throws Exception {
    ProcessOperator<Integer, String> operator = new ProcessOperator<>(new QueryingProcessFunction(TimeDomain.EVENT_TIME));
    OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<>(operator);
    testHarness.setup();
    testHarness.open();
    testHarness.processWatermark(new Watermark(17));
    testHarness.processElement(new StreamRecord<>(5, 12L));
    testHarness.processWatermark(new Watermark(42));
    testHarness.processElement(new StreamRecord<>(6, 13L));
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    expectedOutput.add(new Watermark(17L));
    expectedOutput.add(new StreamRecord<>("5TIME:17 TS:12", 12L));
    expectedOutput.add(new Watermark(42L));
    expectedOutput.add(new StreamRecord<>("6TIME:42 TS:13", 13L));
    TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.close();
}
Also used : OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 38 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class KafkaProducerTest method testPropagateExceptions.

@Test
@SuppressWarnings("unchecked")
public void testPropagateExceptions() {
    try {
        // mock kafka producer
        KafkaProducer<?, ?> kafkaProducerMock = mock(KafkaProducer.class);
        // partition setup
        when(kafkaProducerMock.partitionsFor(anyString())).thenReturn(// returning a unmodifiable list to mimic KafkaProducer#partitionsFor() behaviour
        Collections.singletonList(new PartitionInfo("mock_topic", 42, null, null, null)));
        // failure when trying to send an element
        when(kafkaProducerMock.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Future<RecordMetadata>>() {

            @Override
            public Future<RecordMetadata> answer(InvocationOnMock invocation) throws Throwable {
                Callback callback = (Callback) invocation.getArguments()[1];
                callback.onCompletion(null, new Exception("Test error"));
                return null;
            }
        });
        // make sure the FlinkKafkaProducer instantiates our mock producer
        whenNew(KafkaProducer.class).withAnyArguments().thenReturn(kafkaProducerMock);
        // (1) producer that propagates errors
        FlinkKafkaProducer09<String> producerPropagating = new FlinkKafkaProducer09<>("mock_topic", new SimpleStringSchema(), FakeStandardProducerConfig.get(), null);
        OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink(producerPropagating));
        testHarness.open();
        try {
            testHarness.processElement(new StreamRecord<>("value"));
            testHarness.processElement(new StreamRecord<>("value"));
            fail("This should fail with an exception");
        } catch (Exception e) {
            assertNotNull(e.getCause());
            assertNotNull(e.getCause().getMessage());
            assertTrue(e.getCause().getMessage().contains("Test error"));
        }
        // (2) producer that only logs errors
        FlinkKafkaProducer09<String> producerLogging = new FlinkKafkaProducer09<>("mock_topic", new SimpleStringSchema(), FakeStandardProducerConfig.get(), null);
        producerLogging.setLogFailuresOnly(true);
        testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink(producerLogging));
        testHarness.open();
        testHarness.processElement(new StreamRecord<>("value"));
        testHarness.processElement(new StreamRecord<>("value"));
        testHarness.close();
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : StreamSink(org.apache.flink.streaming.api.operators.StreamSink) Mockito.anyString(org.mockito.Mockito.anyString) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) Callback(org.apache.kafka.clients.producer.Callback) InvocationOnMock(org.mockito.invocation.InvocationOnMock) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Future(java.util.concurrent.Future) SimpleStringSchema(org.apache.flink.streaming.util.serialization.SimpleStringSchema) PartitionInfo(org.apache.kafka.common.PartitionInfo) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

OneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness)38 Test (org.junit.Test)36 Watermark (org.apache.flink.streaming.api.watermark.Watermark)10 ArrayList (java.util.ArrayList)9 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)8 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)7 ActionRequest (org.elasticsearch.action.ActionRequest)7 StreamStateHandle (org.apache.flink.runtime.state.StreamStateHandle)6 NoOpFailureHandler (org.apache.flink.streaming.connectors.elasticsearch.util.NoOpFailureHandler)6 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)6 OperatorStateHandles (org.apache.flink.streaming.runtime.tasks.OperatorStateHandles)6 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)6 CheckedThread (org.apache.flink.core.testutils.CheckedThread)5 ContinuousFileReaderOperator (org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperator)5 TimestampedFileInputSplit (org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit)5 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)5 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)4 FileInputSplit (org.apache.flink.core.fs.FileInputSplit)4 Path (org.apache.flink.core.fs.Path)4 Callback (org.apache.kafka.clients.producer.Callback)3