use of org.apache.flink.test.streaming.runtime.util.TestListResultSink in project flink by apache.
the class SideOutputITCase method testProcessFunctionSideOutputWithWrongTag.
/**
* Test ProcessFunction side outputs with wrong {@code OutputTag}.
*/
@Test
public void testProcessFunctionSideOutputWithWrongTag() throws Exception {
final OutputTag<String> sideOutputTag1 = new OutputTag<String>("side") {
};
final OutputTag<String> sideOutputTag2 = new OutputTag<String>("other-side") {
};
TestListResultSink<String> sideOutputResultSink = new TestListResultSink<>();
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
see.setParallelism(3);
DataStream<Integer> dataStream = see.fromCollection(elements);
dataStream.process(new ProcessFunction<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public void processElement(Integer value, Context ctx, Collector<Integer> out) throws Exception {
out.collect(value);
ctx.output(sideOutputTag2, "sideout-" + String.valueOf(value));
}
}).getSideOutput(sideOutputTag1).addSink(sideOutputResultSink);
see.execute();
assertEquals(Arrays.asList(), sideOutputResultSink.getSortedResult());
}
use of org.apache.flink.test.streaming.runtime.util.TestListResultSink in project flink by apache.
the class CoStreamITCase method test.
@Test
public void test() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
TestListResultSink<String> resultSink = new TestListResultSink<String>();
DataStream<Integer> src = env.fromElements(1, 3, 5);
DataStream<Integer> filter1 = src.filter(new FilterFunction<Integer>() {
@Override
public boolean filter(Integer value) throws Exception {
return true;
}
}).keyBy(new KeySelector<Integer, Integer>() {
@Override
public Integer getKey(Integer value) throws Exception {
return value;
}
});
DataStream<Tuple2<Integer, Integer>> filter2 = src.map(new MapFunction<Integer, Tuple2<Integer, Integer>>() {
@Override
public Tuple2<Integer, Integer> map(Integer value) throws Exception {
return new Tuple2<>(value, value + 1);
}
}).rebalance().filter(new FilterFunction<Tuple2<Integer, Integer>>() {
@Override
public boolean filter(Tuple2<Integer, Integer> value) throws Exception {
return true;
}
}).disableChaining().keyBy(new KeySelector<Tuple2<Integer, Integer>, Integer>() {
@Override
public Integer getKey(Tuple2<Integer, Integer> value) throws Exception {
return value.f0;
}
});
DataStream<String> connected = filter1.connect(filter2).flatMap(new CoFlatMapFunction<Integer, Tuple2<Integer, Integer>, String>() {
@Override
public void flatMap1(Integer value, Collector<String> out) throws Exception {
out.collect(value.toString());
}
@Override
public void flatMap2(Tuple2<Integer, Integer> value, Collector<String> out) throws Exception {
out.collect(value.toString());
}
});
connected.addSink(resultSink);
env.execute();
List<String> expected = Arrays.asList("(1,2)", "(3,4)", "(5,6)", "1", "3", "5");
List<String> result = resultSink.getResult();
Collections.sort(result);
assertEquals(expected, result);
}
use of org.apache.flink.test.streaming.runtime.util.TestListResultSink in project flink by apache.
the class OutputSplitterITCase method testOnMergedDataStream.
@SuppressWarnings("unchecked")
@Test
public void testOnMergedDataStream() throws Exception {
TestListResultSink<Integer> splitterResultSink1 = new TestListResultSink<Integer>();
TestListResultSink<Integer> splitterResultSink2 = new TestListResultSink<Integer>();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.setBufferTimeout(1);
DataStream<Integer> d1 = env.fromElements(0, 2, 4, 6, 8);
DataStream<Integer> d2 = env.fromElements(1, 3, 5, 7, 9);
d1 = d1.union(d2);
d1.split(new OutputSelector<Integer>() {
private static final long serialVersionUID = 8354166915727490130L;
@Override
public Iterable<String> select(Integer value) {
List<String> s = new ArrayList<String>();
if (value > 4) {
s.add(">");
} else {
s.add("<");
}
return s;
}
}).select(">").addSink(splitterResultSink1);
d1.split(new OutputSelector<Integer>() {
private static final long serialVersionUID = -6822487543355994807L;
@Override
public Iterable<String> select(Integer value) {
List<String> s = new ArrayList<String>();
if (value % 3 == 0) {
s.add("yes");
} else {
s.add("no");
}
return s;
}
}).select("yes").addSink(splitterResultSink2);
env.execute();
expectedSplitterResult.clear();
expectedSplitterResult.addAll(Arrays.asList(5, 6, 7, 8, 9));
assertEquals(expectedSplitterResult, splitterResultSink1.getSortedResult());
expectedSplitterResult.clear();
expectedSplitterResult.addAll(Arrays.asList(0, 3, 6, 9));
assertEquals(expectedSplitterResult, splitterResultSink2.getSortedResult());
}
use of org.apache.flink.test.streaming.runtime.util.TestListResultSink in project flink by apache.
the class OutputSplitterITCase method testOnSingleDataStream.
@Test
public void testOnSingleDataStream() throws Exception {
TestListResultSink<Integer> splitterResultSink1 = new TestListResultSink<Integer>();
TestListResultSink<Integer> splitterResultSink2 = new TestListResultSink<Integer>();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.setBufferTimeout(1);
DataStream<Integer> ds = env.fromElements(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
ds.split(new OutputSelector<Integer>() {
private static final long serialVersionUID = 2524335410904414121L;
@Override
public Iterable<String> select(Integer value) {
List<String> s = new ArrayList<String>();
if (value % 2 == 0) {
s.add("even");
} else {
s.add("odd");
}
return s;
}
}).select("even").addSink(splitterResultSink1);
ds.split(new OutputSelector<Integer>() {
private static final long serialVersionUID = -511693919586034092L;
@Override
public Iterable<String> select(Integer value) {
List<String> s = new ArrayList<String>();
if (value % 4 == 0) {
s.add("yes");
} else {
s.add("no");
}
return s;
}
}).select("yes").addSink(splitterResultSink2);
env.execute();
expectedSplitterResult.clear();
expectedSplitterResult.addAll(Arrays.asList(0, 2, 4, 6, 8));
assertEquals(expectedSplitterResult, splitterResultSink1.getSortedResult());
expectedSplitterResult.clear();
expectedSplitterResult.addAll(Arrays.asList(0, 4, 8));
assertEquals(expectedSplitterResult, splitterResultSink2.getSortedResult());
}
use of org.apache.flink.test.streaming.runtime.util.TestListResultSink in project flink by apache.
the class SideOutputITCase method testKeyedProcessFunctionSideOutput.
/**
* Test keyed ProcessFunction side output.
*/
@Test
public void testKeyedProcessFunctionSideOutput() throws Exception {
final OutputTag<String> sideOutputTag = new OutputTag<String>("side") {
};
TestListResultSink<String> sideOutputResultSink = new TestListResultSink<>();
TestListResultSink<Integer> resultSink = new TestListResultSink<>();
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
see.setParallelism(3);
DataStream<Integer> dataStream = see.fromCollection(elements);
SingleOutputStreamOperator<Integer> passThroughtStream = dataStream.keyBy(new KeySelector<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer getKey(Integer value) throws Exception {
return value;
}
}).process(new ProcessFunction<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public void processElement(Integer value, Context ctx, Collector<Integer> out) throws Exception {
out.collect(value);
ctx.output(sideOutputTag, "sideout-" + String.valueOf(value));
}
});
passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink);
passThroughtStream.addSink(resultSink);
see.execute();
assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink.getSortedResult());
assertEquals(Arrays.asList(1, 2, 3, 4, 5), resultSink.getSortedResult());
}
Aggregations