use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.
the class InternalWindowFunctionTest method testInternalSingleValueAllWindowFunction.
@SuppressWarnings("unchecked")
@Test
public void testInternalSingleValueAllWindowFunction() throws Exception {
AllWindowFunctionMock mock = mock(AllWindowFunctionMock.class);
InternalSingleValueAllWindowFunction<Long, String, TimeWindow> windowFunction = new InternalSingleValueAllWindowFunction<>(mock);
// check setOutputType
TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO;
ExecutionConfig execConf = new ExecutionConfig();
execConf.setParallelism(42);
StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf);
verify(mock).setOutputType(stringType, execConf);
// check open
Configuration config = new Configuration();
windowFunction.open(config);
verify(mock).open(config);
// check setRuntimeContext
RuntimeContext rCtx = mock(RuntimeContext.class);
windowFunction.setRuntimeContext(rCtx);
verify(mock).setRuntimeContext(rCtx);
// check apply
TimeWindow w = mock(TimeWindow.class);
Collector<String> c = (Collector<String>) mock(Collector.class);
windowFunction.apply(((byte) 0), w, 23L, c);
verify(mock).apply(eq(w), (Iterable<Long>) argThat(IsIterableContainingInOrder.contains(23L)), eq(c));
// check close
windowFunction.close();
verify(mock).close();
}
use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.
the class InternalWindowFunctionTest method testInternalSingleValueProcessAllWindowFunction.
@SuppressWarnings("unchecked")
@Test
public void testInternalSingleValueProcessAllWindowFunction() throws Exception {
ProcessAllWindowFunctionMock mock = mock(ProcessAllWindowFunctionMock.class);
InternalSingleValueProcessAllWindowFunction<Long, String, TimeWindow> windowFunction = new InternalSingleValueProcessAllWindowFunction<>(mock);
// check setOutputType
TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO;
ExecutionConfig execConf = new ExecutionConfig();
execConf.setParallelism(42);
StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf);
verify(mock).setOutputType(stringType, execConf);
// check open
Configuration config = new Configuration();
windowFunction.open(config);
verify(mock).open(config);
// check setRuntimeContext
RuntimeContext rCtx = mock(RuntimeContext.class);
windowFunction.setRuntimeContext(rCtx);
verify(mock).setRuntimeContext(rCtx);
// check apply
TimeWindow w = mock(TimeWindow.class);
Collector<String> c = (Collector<String>) mock(Collector.class);
windowFunction.apply(((byte) 0), w, 23L, c);
verify(mock).process((ProcessAllWindowFunctionMock.Context) anyObject(), (Iterable<Long>) argThat(IsIterableContainingInOrder.contains(23L)), eq(c));
// check close
windowFunction.close();
verify(mock).close();
}
use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.
the class StreamSourceOperatorTest method setupSourceOperator.
@SuppressWarnings("unchecked")
private static <T> void setupSourceOperator(StreamSource<T, ?> operator, TimeCharacteristic timeChar, long watermarkInterval, long latencyMarkInterval, final ProcessingTimeService timeProvider) {
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.setAutoWatermarkInterval(watermarkInterval);
executionConfig.setLatencyTrackingInterval(latencyMarkInterval);
StreamConfig cfg = new StreamConfig(new Configuration());
cfg.setStateBackend(new MemoryStateBackend());
cfg.setTimeCharacteristic(timeChar);
Environment env = new DummyEnvironment("MockTwoInputTask", 1, 0);
StreamStatusMaintainer streamStatusMaintainer = mock(StreamStatusMaintainer.class);
when(streamStatusMaintainer.getStreamStatus()).thenReturn(StreamStatus.ACTIVE);
StreamTask<?, ?> mockTask = mock(StreamTask.class);
when(mockTask.getName()).thenReturn("Mock Task");
when(mockTask.getCheckpointLock()).thenReturn(new Object());
when(mockTask.getConfiguration()).thenReturn(cfg);
when(mockTask.getEnvironment()).thenReturn(env);
when(mockTask.getExecutionConfig()).thenReturn(executionConfig);
when(mockTask.getAccumulatorMap()).thenReturn(Collections.<String, Accumulator<?, ?>>emptyMap());
when(mockTask.getStreamStatusMaintainer()).thenReturn(streamStatusMaintainer);
doAnswer(new Answer<ProcessingTimeService>() {
@Override
public ProcessingTimeService answer(InvocationOnMock invocation) throws Throwable {
if (timeProvider == null) {
throw new RuntimeException("The time provider is null.");
}
return timeProvider;
}
}).when(mockTask).getProcessingTimeService();
operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
}
use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.
the class EvictingWindowOperatorTest method testCountTrigger.
@Test
@SuppressWarnings("unchecked")
public void testCountTrigger() throws Exception {
final int WINDOW_SIZE = 4;
final int WINDOW_SLIDE = 2;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new ReduceIterableWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>(new SumReducer())), CountTrigger.of(WINDOW_SLIDE), CountEvictor.of(WINDOW_SIZE), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.api.common.ExecutionConfig in project flink by apache.
the class EvictingWindowOperatorTest method testCountTriggerWithApply.
@Test
@SuppressWarnings("unchecked")
public void testCountTriggerWithApply() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int WINDOW_SIZE = 4;
final int WINDOW_SLIDE = 2;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(WINDOW_SLIDE), CountEvictor.of(WINDOW_SIZE), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Aggregations