use of org.apache.flink.util.Collector in project flink by apache.
the class InternalWindowFunctionTest method testInternalIterableWindowFunction.
@SuppressWarnings("unchecked")
@Test
public void testInternalIterableWindowFunction() throws Exception {
WindowFunctionMock mock = mock(WindowFunctionMock.class);
InternalIterableWindowFunction<Long, String, Long, TimeWindow> windowFunction = new InternalIterableWindowFunction<>(mock);
// check setOutputType
TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO;
ExecutionConfig execConf = new ExecutionConfig();
execConf.setParallelism(42);
StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf);
verify(mock).setOutputType(stringType, execConf);
// check open
Configuration config = new Configuration();
windowFunction.open(config);
verify(mock).open(config);
// check setRuntimeContext
RuntimeContext rCtx = mock(RuntimeContext.class);
windowFunction.setRuntimeContext(rCtx);
verify(mock).setRuntimeContext(rCtx);
// check apply
TimeWindow w = mock(TimeWindow.class);
Iterable<Long> i = (Iterable<Long>) mock(Iterable.class);
Collector<String> c = (Collector<String>) mock(Collector.class);
InternalWindowFunction.InternalWindowContext ctx = mock(InternalWindowFunction.InternalWindowContext.class);
windowFunction.process(42L, w, ctx, i, c);
verify(mock).apply(eq(42L), eq(w), eq(i), eq(c));
// check close
windowFunction.close();
verify(mock).close();
}
use of org.apache.flink.util.Collector in project flink by apache.
the class InternalWindowFunctionTest method testInternalAggregateProcessAllWindowFunction.
@SuppressWarnings("unchecked")
@Test
public void testInternalAggregateProcessAllWindowFunction() throws Exception {
AggregateProcessAllWindowFunctionMock mock = mock(AggregateProcessAllWindowFunctionMock.class);
InternalAggregateProcessAllWindowFunction<Long, Set<Long>, Map<Long, Long>, String, TimeWindow> windowFunction = new InternalAggregateProcessAllWindowFunction<>(new AggregateFunction<Long, Set<Long>, Map<Long, Long>>() {
private static final long serialVersionUID = 1L;
@Override
public Set<Long> createAccumulator() {
return new HashSet<>();
}
@Override
public Set<Long> add(Long value, Set<Long> accumulator) {
accumulator.add(value);
return accumulator;
}
@Override
public Map<Long, Long> getResult(Set<Long> accumulator) {
Map<Long, Long> result = new HashMap<>();
for (Long in : accumulator) {
result.put(in, in);
}
return result;
}
@Override
public Set<Long> merge(Set<Long> a, Set<Long> b) {
a.addAll(b);
return a;
}
}, mock);
// check setOutputType
TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO;
ExecutionConfig execConf = new ExecutionConfig();
execConf.setParallelism(42);
StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf);
verify(mock).setOutputType(stringType, execConf);
// check open
Configuration config = new Configuration();
windowFunction.open(config);
verify(mock).open(config);
// check setRuntimeContext
RuntimeContext rCtx = mock(RuntimeContext.class);
windowFunction.setRuntimeContext(rCtx);
verify(mock).setRuntimeContext(rCtx);
// check apply
TimeWindow w = mock(TimeWindow.class);
Collector<String> c = (Collector<String>) mock(Collector.class);
List<Long> args = new LinkedList<>();
args.add(23L);
args.add(24L);
InternalWindowFunction.InternalWindowContext ctx = mock(InternalWindowFunction.InternalWindowContext.class);
windowFunction.process(((byte) 0), w, ctx, args, c);
verify(mock).process((AggregateProcessAllWindowFunctionMock.Context) anyObject(), (Iterable) argThat(containsInAnyOrder(allOf(hasEntry(is(23L), is(23L)), hasEntry(is(24L), is(24L))))), eq(c));
// check close
windowFunction.close();
verify(mock).close();
}
use of org.apache.flink.util.Collector in project flink by apache.
the class InternalWindowFunctionTest method testInternalSingleValueWindowFunction.
@SuppressWarnings("unchecked")
@Test
public void testInternalSingleValueWindowFunction() throws Exception {
WindowFunctionMock mock = mock(WindowFunctionMock.class);
InternalSingleValueWindowFunction<Long, String, Long, TimeWindow> windowFunction = new InternalSingleValueWindowFunction<>(mock);
// check setOutputType
TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO;
ExecutionConfig execConf = new ExecutionConfig();
execConf.setParallelism(42);
StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf);
verify(mock).setOutputType(stringType, execConf);
// check open
Configuration config = new Configuration();
windowFunction.open(config);
verify(mock).open(config);
// check setRuntimeContext
RuntimeContext rCtx = mock(RuntimeContext.class);
windowFunction.setRuntimeContext(rCtx);
verify(mock).setRuntimeContext(rCtx);
// check apply
TimeWindow w = mock(TimeWindow.class);
Collector<String> c = (Collector<String>) mock(Collector.class);
InternalWindowFunction.InternalWindowContext ctx = mock(InternalWindowFunction.InternalWindowContext.class);
windowFunction.process(42L, w, ctx, 23L, c);
verify(mock).apply(eq(42L), eq(w), (Iterable<Long>) argThat(IsIterableContainingInOrder.contains(23L)), eq(c));
// check close
windowFunction.close();
verify(mock).close();
}
use of org.apache.flink.util.Collector in project flink by apache.
the class StreamingJobGraphGeneratorTest method testResourcesForIteration.
/**
* Verifies that the resources are merged correctly for chained operators (covers middle
* chaining and iteration cases) when generating job graph.
*/
@Test
public void testResourcesForIteration() throws Exception {
ResourceSpec resource1 = ResourceSpec.newBuilder(0.1, 100).build();
ResourceSpec resource2 = ResourceSpec.newBuilder(0.2, 200).build();
ResourceSpec resource3 = ResourceSpec.newBuilder(0.3, 300).build();
ResourceSpec resource4 = ResourceSpec.newBuilder(0.4, 400).build();
ResourceSpec resource5 = ResourceSpec.newBuilder(0.5, 500).build();
Method opMethod = getSetResourcesMethodAndSetAccessible(SingleOutputStreamOperator.class);
Method sinkMethod = getSetResourcesMethodAndSetAccessible(DataStreamSink.class);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> source = env.addSource(new ParallelSourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
}
@Override
public void cancel() {
}
}).name("test_source");
opMethod.invoke(source, resource1);
IterativeStream<Integer> iteration = source.iterate(3000);
opMethod.invoke(iteration, resource2);
DataStream<Integer> flatMap = iteration.flatMap(new FlatMapFunction<Integer, Integer>() {
@Override
public void flatMap(Integer value, Collector<Integer> out) throws Exception {
out.collect(value);
}
}).name("test_flatMap");
opMethod.invoke(flatMap, resource3);
// CHAIN(flatMap -> Filter)
DataStream<Integer> increment = flatMap.filter(new FilterFunction<Integer>() {
@Override
public boolean filter(Integer value) throws Exception {
return false;
}
}).name("test_filter");
opMethod.invoke(increment, resource4);
DataStreamSink<Integer> sink = iteration.closeWith(increment).addSink(new SinkFunction<Integer>() {
@Override
public void invoke(Integer value) throws Exception {
}
}).disableChaining().name("test_sink");
sinkMethod.invoke(sink, resource5);
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
for (JobVertex jobVertex : jobGraph.getVertices()) {
if (jobVertex.getName().contains("test_source")) {
assertTrue(jobVertex.getMinResources().equals(resource1));
} else if (jobVertex.getName().contains("Iteration_Source")) {
assertTrue(jobVertex.getPreferredResources().equals(resource2));
} else if (jobVertex.getName().contains("test_flatMap")) {
assertTrue(jobVertex.getMinResources().equals(resource3.merge(resource4)));
} else if (jobVertex.getName().contains("Iteration_Tail")) {
assertTrue(jobVertex.getPreferredResources().equals(ResourceSpec.DEFAULT));
} else if (jobVertex.getName().contains("test_sink")) {
assertTrue(jobVertex.getMinResources().equals(resource5));
}
}
}
use of org.apache.flink.util.Collector in project flink by apache.
the class DataStreamTest method testFailedTranslationOnKeyed.
/**
* Tests that with a {@link KeyedStream} we have to provide a {@link
* KeyedBroadcastProcessFunction}.
*/
@Test
public void testFailedTranslationOnKeyed() {
final MapStateDescriptor<Long, String> descriptor = new MapStateDescriptor<>("broadcast", BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final DataStream<Long> srcOne = env.generateSequence(0L, 5L).assignTimestampsAndWatermarks(new CustomWmEmitter<Long>() {
@Override
public long extractTimestamp(Long element, long previousElementTimestamp) {
return element;
}
}).keyBy((KeySelector<Long, Long>) value -> value);
final DataStream<String> srcTwo = env.fromElements("Test:0", "Test:1", "Test:2", "Test:3", "Test:4", "Test:5").assignTimestampsAndWatermarks(new CustomWmEmitter<String>() {
@Override
public long extractTimestamp(String element, long previousElementTimestamp) {
return Long.parseLong(element.split(":")[1]);
}
});
BroadcastStream<String> broadcast = srcTwo.broadcast(descriptor);
BroadcastConnectedStream<Long, String> bcStream = srcOne.connect(broadcast);
expectedException.expect(IllegalArgumentException.class);
bcStream.process(new BroadcastProcessFunction<Long, String, String>() {
@Override
public void processBroadcastElement(String value, Context ctx, Collector<String> out) throws Exception {
// do nothing
}
@Override
public void processElement(Long value, ReadOnlyContext ctx, Collector<String> out) throws Exception {
// do nothing
}
});
}
Aggregations