use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.
the class StreamMapTest method testMap.
@Test
public void testMap() throws Exception {
StreamMap<Integer, String> operator = new StreamMap<Integer, String>(new Map());
OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<Integer, String>(operator);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
testHarness.open();
testHarness.processElement(new StreamRecord<Integer>(1, initialTime + 1));
testHarness.processElement(new StreamRecord<Integer>(2, initialTime + 2));
testHarness.processWatermark(new Watermark(initialTime + 2));
testHarness.processElement(new StreamRecord<Integer>(3, initialTime + 3));
expectedOutput.add(new StreamRecord<String>("+2", initialTime + 1));
expectedOutput.add(new StreamRecord<String>("+3", initialTime + 2));
expectedOutput.add(new Watermark(initialTime + 2));
expectedOutput.add(new StreamRecord<String>("+4", initialTime + 3));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.
the class ProcessOperatorTest method testTimestampAndWatermarkQuerying.
@Test
public void testTimestampAndWatermarkQuerying() throws Exception {
ProcessOperator<Integer, String> operator = new ProcessOperator<>(new QueryingProcessFunction(TimeDomain.EVENT_TIME));
OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<>(operator);
testHarness.setup();
testHarness.open();
testHarness.processWatermark(new Watermark(17));
testHarness.processElement(new StreamRecord<>(5, 12L));
testHarness.processWatermark(new Watermark(42));
testHarness.processElement(new StreamRecord<>(6, 13L));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new Watermark(17L));
expectedOutput.add(new StreamRecord<>("5TIME:17 TS:12", 12L));
expectedOutput.add(new Watermark(42L));
expectedOutput.add(new StreamRecord<>("6TIME:42 TS:13", 13L));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.
the class KafkaProducerTest method testPropagateExceptions.
@Test
@SuppressWarnings("unchecked")
public void testPropagateExceptions() {
try {
// mock kafka producer
KafkaProducer<?, ?> kafkaProducerMock = mock(KafkaProducer.class);
// partition setup
when(kafkaProducerMock.partitionsFor(anyString())).thenReturn(// returning a unmodifiable list to mimic KafkaProducer#partitionsFor() behaviour
Collections.singletonList(new PartitionInfo("mock_topic", 42, null, null, null)));
// failure when trying to send an element
when(kafkaProducerMock.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Future<RecordMetadata>>() {
@Override
public Future<RecordMetadata> answer(InvocationOnMock invocation) throws Throwable {
Callback callback = (Callback) invocation.getArguments()[1];
callback.onCompletion(null, new Exception("Test error"));
return null;
}
});
// make sure the FlinkKafkaProducer instantiates our mock producer
whenNew(KafkaProducer.class).withAnyArguments().thenReturn(kafkaProducerMock);
// (1) producer that propagates errors
FlinkKafkaProducer09<String> producerPropagating = new FlinkKafkaProducer09<>("mock_topic", new SimpleStringSchema(), FakeStandardProducerConfig.get(), null);
OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink(producerPropagating));
testHarness.open();
try {
testHarness.processElement(new StreamRecord<>("value"));
testHarness.processElement(new StreamRecord<>("value"));
fail("This should fail with an exception");
} catch (Exception e) {
assertNotNull(e.getCause());
assertNotNull(e.getCause().getMessage());
assertTrue(e.getCause().getMessage().contains("Test error"));
}
// (2) producer that only logs errors
FlinkKafkaProducer09<String> producerLogging = new FlinkKafkaProducer09<>("mock_topic", new SimpleStringSchema(), FakeStandardProducerConfig.get(), null);
producerLogging.setLogFailuresOnly(true);
testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink(producerLogging));
testHarness.open();
testHarness.processElement(new StreamRecord<>("value"));
testHarness.processElement(new StreamRecord<>("value"));
testHarness.close();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations