use of org.apache.flink.streaming.api.environment.StreamExecutionEnvironment in project flink by apache.
the class SideOutputITCase method testProcessFunctionSideOutputWithWrongTag.
/**
* Test ProcessFunction side outputs with wrong {@code OutputTag}.
*/
@Test
public void testProcessFunctionSideOutputWithWrongTag() throws Exception {
final OutputTag<String> sideOutputTag1 = new OutputTag<String>("side") {
};
final OutputTag<String> sideOutputTag2 = new OutputTag<String>("other-side") {
};
TestListResultSink<String> sideOutputResultSink = new TestListResultSink<>();
StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
see.setParallelism(3);
DataStream<Integer> dataStream = see.fromCollection(elements);
dataStream.process(new ProcessFunction<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public void processElement(Integer value, Context ctx, Collector<Integer> out) throws Exception {
out.collect(value);
ctx.output(sideOutputTag2, "sideout-" + String.valueOf(value));
}
}).getSideOutput(sideOutputTag1).addSink(sideOutputResultSink);
see.execute();
assertEquals(Arrays.asList(), sideOutputResultSink.getSortedResult());
}
use of org.apache.flink.streaming.api.environment.StreamExecutionEnvironment in project flink by apache.
the class StreamTaskTimerITCase method testTwoInputOperatorWithoutChaining.
@Test
public void testTwoInputOperatorWithoutChaining() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(timeCharacteristic);
env.setParallelism(1);
DataStream<String> source = env.addSource(new InfiniteTestSource());
source.connect(source).transform("Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TwoInputTimerOperator(ChainingStrategy.NEVER));
boolean testSuccess = false;
try {
env.execute("Timer test");
} catch (JobExecutionException e) {
if (e.getCause() instanceof TimerException) {
TimerException te = (TimerException) e.getCause();
if (te.getCause() instanceof RuntimeException) {
RuntimeException re = (RuntimeException) te.getCause();
if (re.getMessage().equals("TEST SUCCESS")) {
testSuccess = true;
} else {
throw e;
}
} else {
throw e;
}
} else {
throw e;
}
}
Assert.assertTrue(testSuccess);
}
use of org.apache.flink.streaming.api.environment.StreamExecutionEnvironment in project flink by apache.
the class TextOutputFormatITCase method testProgram.
@Override
protected void testProgram() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<String> text = env.fromElements(WordCountData.TEXT);
DataStream<Tuple2<String, Integer>> counts = text.flatMap(new Tokenizer()).keyBy(0).sum(1);
counts.writeAsText(resultPath);
env.execute("WriteAsTextTest");
}
use of org.apache.flink.streaming.api.environment.StreamExecutionEnvironment in project flink by apache.
the class ChainedRuntimeContextITCase method test.
@Test
public void test() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.addSource(new TestSource()).map(new TestMap()).addSink(new DiscardingSink<Integer>());
env.execute();
assertNotEquals(srcContext, mapContext);
}
use of org.apache.flink.streaming.api.environment.StreamExecutionEnvironment in project flink by apache.
the class CoGroupJoinITCase method testSelfJoin.
@Test
public void testSelfJoin() throws Exception {
testResults = new ArrayList<>();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
env.setParallelism(1);
DataStream<Tuple3<String, String, Integer>> source1 = env.addSource(new SourceFunction<Tuple3<String, String, Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void run(SourceContext<Tuple3<String, String, Integer>> ctx) throws Exception {
ctx.collect(Tuple3.of("a", "x", 0));
ctx.collect(Tuple3.of("a", "y", 1));
ctx.collect(Tuple3.of("a", "z", 2));
ctx.collect(Tuple3.of("b", "u", 3));
ctx.collect(Tuple3.of("b", "w", 5));
ctx.collect(Tuple3.of("a", "i", 6));
ctx.collect(Tuple3.of("a", "j", 7));
ctx.collect(Tuple3.of("a", "k", 8));
// source is finite, so it will have an implicit MAX watermark when it finishes
}
@Override
public void cancel() {
}
}).assignTimestampsAndWatermarks(new Tuple3TimestampExtractor());
source1.join(source1).where(new Tuple3KeyExtractor()).equalTo(new Tuple3KeyExtractor()).window(TumblingEventTimeWindows.of(Time.of(3, TimeUnit.MILLISECONDS))).apply(new JoinFunction<Tuple3<String, String, Integer>, Tuple3<String, String, Integer>, String>() {
@Override
public String join(Tuple3<String, String, Integer> first, Tuple3<String, String, Integer> second) throws Exception {
return first + ":" + second;
}
}).addSink(new SinkFunction<String>() {
@Override
public void invoke(String value) throws Exception {
testResults.add(value);
}
});
env.execute("Self-Join Test");
List<String> expectedResult = Arrays.asList("(a,x,0):(a,x,0)", "(a,x,0):(a,y,1)", "(a,x,0):(a,z,2)", "(a,y,1):(a,x,0)", "(a,y,1):(a,y,1)", "(a,y,1):(a,z,2)", "(a,z,2):(a,x,0)", "(a,z,2):(a,y,1)", "(a,z,2):(a,z,2)", "(b,u,3):(b,u,3)", "(b,u,3):(b,w,5)", "(b,w,5):(b,u,3)", "(b,w,5):(b,w,5)", "(a,i,6):(a,i,6)", "(a,i,6):(a,j,7)", "(a,i,6):(a,k,8)", "(a,j,7):(a,i,6)", "(a,j,7):(a,j,7)", "(a,j,7):(a,k,8)", "(a,k,8):(a,i,6)", "(a,k,8):(a,j,7)", "(a,k,8):(a,k,8)");
Collections.sort(expectedResult);
Collections.sort(testResults);
Assert.assertEquals(expectedResult, testResults);
}
Aggregations