use of org.apache.flink.streaming.api.operators.StreamSink in project flink by apache.
the class DataStreamSink method forSinkFunction.
static <T> DataStreamSink<T> forSinkFunction(DataStream<T> inputStream, SinkFunction<T> sinkFunction) {
StreamSink<T> sinkOperator = new StreamSink<>(sinkFunction);
final StreamExecutionEnvironment executionEnvironment = inputStream.getExecutionEnvironment();
PhysicalTransformation<T> transformation = new LegacySinkTransformation<>(inputStream.getTransformation(), "Unnamed", sinkOperator, executionEnvironment.getParallelism());
executionEnvironment.addOperator(transformation);
return new DataStreamSink<>(transformation);
}
use of org.apache.flink.streaming.api.operators.StreamSink in project flink by apache.
the class KafkaProducerTest method testPropagateExceptions.
@Test
@SuppressWarnings("unchecked")
public void testPropagateExceptions() {
try {
// mock kafka producer
KafkaProducer<?, ?> kafkaProducerMock = mock(KafkaProducer.class);
// partition setup
when(kafkaProducerMock.partitionsFor(anyString())).thenReturn(// returning a unmodifiable list to mimic KafkaProducer#partitionsFor() behaviour
Collections.singletonList(new PartitionInfo("mock_topic", 42, null, null, null)));
// failure when trying to send an element
when(kafkaProducerMock.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Future<RecordMetadata>>() {
@Override
public Future<RecordMetadata> answer(InvocationOnMock invocation) throws Throwable {
Callback callback = (Callback) invocation.getArguments()[1];
callback.onCompletion(null, new Exception("Test error"));
return null;
}
});
// make sure the FlinkKafkaProducer instantiates our mock producer
whenNew(KafkaProducer.class).withAnyArguments().thenReturn(kafkaProducerMock);
// (1) producer that propagates errors
FlinkKafkaProducer09<String> producerPropagating = new FlinkKafkaProducer09<>("mock_topic", new SimpleStringSchema(), FakeStandardProducerConfig.get(), null);
OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink(producerPropagating));
testHarness.open();
try {
testHarness.processElement(new StreamRecord<>("value"));
testHarness.processElement(new StreamRecord<>("value"));
fail("This should fail with an exception");
} catch (Exception e) {
assertNotNull(e.getCause());
assertNotNull(e.getCause().getMessage());
assertTrue(e.getCause().getMessage().contains("Test error"));
}
// (2) producer that only logs errors
FlinkKafkaProducer09<String> producerLogging = new FlinkKafkaProducer09<>("mock_topic", new SimpleStringSchema(), FakeStandardProducerConfig.get(), null);
producerLogging.setLogFailuresOnly(true);
testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink(producerLogging));
testHarness.open();
testHarness.processElement(new StreamRecord<>("value"));
testHarness.processElement(new StreamRecord<>("value"));
testHarness.close();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations