Search in sources :

Example 56 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EventTimeTriggerTest method testMergingWindows.

@Test
public void testMergingWindows() throws Exception {
    TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(EventTimeTrigger.create(), new TimeWindow.Serializer());
    assertTrue(EventTimeTrigger.create().canMerge());
    assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
    assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
    assertEquals(0, testHarness.numStateEntries());
    assertEquals(0, testHarness.numProcessingTimeTimers());
    assertEquals(2, testHarness.numEventTimeTimers());
    assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
    assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));
    testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4)));
    assertEquals(0, testHarness.numStateEntries());
    assertEquals(0, testHarness.numProcessingTimeTimers());
    assertEquals(1, testHarness.numEventTimeTimers());
    assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
    assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(2, 4)));
    assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 4)));
    assertEquals(TriggerResult.FIRE, testHarness.advanceWatermark(4, new TimeWindow(0, 4)));
    assertEquals(0, testHarness.numStateEntries());
    assertEquals(0, testHarness.numProcessingTimeTimers());
    assertEquals(0, testHarness.numEventTimeTimers());
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Test(org.junit.Test)

Example 57 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testTimeEvictorEvictAfter.

/**
	 * Tests TimeEvictor evictAfter behavior
	 * @throws Exception
	 */
@Test
public void testTimeEvictorEvictAfter() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int TRIGGER_COUNT = 2;
    final boolean EVICT_AFTER = true;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(TRIGGER_COUNT), TimeEvictor.of(Time.seconds(2), EVICT_AFTER), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 4000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2001));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1001));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1002));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Example 58 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class SideOutputITCase method testWatermarkForwarding.

/**
	 * Verify that watermarks are forwarded to all side outputs.
	 */
@Test
public void testWatermarkForwarding() throws Exception {
    final OutputTag<String> sideOutputTag1 = new OutputTag<String>("side") {
    };
    final OutputTag<String> sideOutputTag2 = new OutputTag<String>("other-side") {
    };
    TestListResultSink<String> sideOutputResultSink1 = new TestListResultSink<>();
    TestListResultSink<String> sideOutputResultSink2 = new TestListResultSink<>();
    TestListResultSink<String> resultSink = new TestListResultSink<>();
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
    env.setParallelism(3);
    DataStream<Integer> dataStream = env.addSource(new SourceFunction<Integer>() {

        private static final long serialVersionUID = 1L;

        @Override
        public void run(SourceContext<Integer> ctx) throws Exception {
            ctx.collectWithTimestamp(1, 0);
            ctx.emitWatermark(new Watermark(0));
            ctx.collectWithTimestamp(2, 1);
            ctx.collectWithTimestamp(5, 2);
            ctx.emitWatermark(new Watermark(2));
            ctx.collectWithTimestamp(3, 3);
            ctx.collectWithTimestamp(4, 4);
        }

        @Override
        public void cancel() {
        }
    });
    SingleOutputStreamOperator<Integer> passThroughtStream = dataStream.process(new ProcessFunction<Integer, Integer>() {

        private static final long serialVersionUID = 1L;

        @Override
        public void processElement(Integer value, Context ctx, Collector<Integer> out) throws Exception {
            out.collect(value);
            ctx.output(sideOutputTag1, "sideout-" + String.valueOf(value));
        }
    });
    class WatermarkReifier extends AbstractStreamOperator<String> implements OneInputStreamOperator<String, String> {

        private static final long serialVersionUID = 1L;

        @Override
        public void processElement(StreamRecord<String> element) throws Exception {
            output.collect(new StreamRecord<>("E:" + element.getValue()));
        }

        @Override
        public void processWatermark(Watermark mark) throws Exception {
            super.processWatermark(mark);
            output.collect(new StreamRecord<>("WM:" + mark.getTimestamp()));
        }
    }
    passThroughtStream.getSideOutput(sideOutputTag1).transform("ReifyWatermarks", BasicTypeInfo.STRING_TYPE_INFO, new WatermarkReifier()).addSink(sideOutputResultSink1);
    passThroughtStream.getSideOutput(sideOutputTag2).transform("ReifyWatermarks", BasicTypeInfo.STRING_TYPE_INFO, new WatermarkReifier()).addSink(sideOutputResultSink2);
    passThroughtStream.map(new MapFunction<Integer, String>() {

        private static final long serialVersionUID = 1L;

        @Override
        public String map(Integer value) throws Exception {
            return value.toString();
        }
    }).transform("ReifyWatermarks", BasicTypeInfo.STRING_TYPE_INFO, new WatermarkReifier()).addSink(resultSink);
    env.execute();
    assertEquals(Arrays.asList("E:sideout-1", "E:sideout-2", "E:sideout-3", "E:sideout-4", "E:sideout-5", "WM:0", "WM:2", "WM:" + Long.MAX_VALUE), sideOutputResultSink1.getSortedResult());
    assertEquals(Arrays.asList("E:sideout-1", "E:sideout-2", "E:sideout-3", "E:sideout-4", "E:sideout-5", "WM:0", "WM:2", "WM:" + Long.MAX_VALUE), sideOutputResultSink1.getSortedResult());
    assertEquals(Arrays.asList("E:1", "E:2", "E:3", "E:4", "E:5", "WM:0", "WM:2", "WM:" + Long.MAX_VALUE), resultSink.getSortedResult());
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) AbstractStreamOperator(org.apache.flink.streaming.api.operators.AbstractStreamOperator) ExpectedException(org.junit.rules.ExpectedException) TestListResultSink(org.apache.flink.test.streaming.runtime.util.TestListResultSink) OneInputStreamOperator(org.apache.flink.streaming.api.operators.OneInputStreamOperator) OutputTag(org.apache.flink.util.OutputTag) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 59 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class AllWindowedStream method aggregate.

/**
	 * Applies the given window function to each window. The window function is called for each
	 * evaluation of the window for each key individually. The output of the window function is
	 * interpreted as a regular non-windowed stream.
	 *
	 * <p>Arriving data is incrementally aggregated using the given aggregate function. This means
	 * that the window function typically has only a single value to process when called.
	 *
	 * @param aggregateFunction The aggregation function that is used for incremental aggregation.
	 * @param windowFunction The process window function.
	 * @param accumulatorType Type information for the internal accumulator type of the aggregation function
	 * @param resultType Type information for the result type of the window function
	 *
	 * @return The data stream that is the result of applying the window function to the window.
	 *
	 * @param <ACC> The type of the AggregateFunction's accumulator
	 * @param <V> The type of AggregateFunction's result, and the WindowFunction's input
	 * @param <R> The type of the elements in the resulting stream, equal to the
	 *            WindowFunction's result type
	 */
@PublicEvolving
public <ACC, V, R> SingleOutputStreamOperator<R> aggregate(AggregateFunction<T, ACC, V> aggregateFunction, ProcessAllWindowFunction<V, R, W> windowFunction, TypeInformation<ACC> accumulatorType, TypeInformation<V> aggregateResultType, TypeInformation<R> resultType) {
    checkNotNull(aggregateFunction, "aggregateFunction");
    checkNotNull(windowFunction, "windowFunction");
    checkNotNull(accumulatorType, "accumulatorType");
    checkNotNull(aggregateResultType, "aggregateResultType");
    checkNotNull(resultType, "resultType");
    if (aggregateFunction instanceof RichFunction) {
        throw new UnsupportedOperationException("This aggregate function cannot be a RichFunction.");
    }
    //clean the closures
    windowFunction = input.getExecutionEnvironment().clean(windowFunction);
    aggregateFunction = input.getExecutionEnvironment().clean(aggregateFunction);
    final String callLocation = Utils.getCallLocationName();
    final String udfName = "AllWindowedStream." + callLocation;
    final String opName;
    final KeySelector<T, Byte> keySel = input.getKeySelector();
    OneInputStreamOperator<T, R> operator;
    if (evictor != null) {
        @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<T>> streamRecordSerializer = (TypeSerializer<StreamRecord<T>>) new StreamElementSerializer(input.getType().createSerializer(getExecutionEnvironment().getConfig()));
        ListStateDescriptor<StreamRecord<T>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + evictor + ", " + udfName + ")";
        operator = new EvictingWindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalAggregateProcessAllWindowFunction<>(aggregateFunction, windowFunction), trigger, evictor, allowedLateness, lateDataOutputTag);
    } else {
        AggregatingStateDescriptor<T, ACC, V> stateDesc = new AggregatingStateDescriptor<>("window-contents", aggregateFunction, accumulatorType.createSerializer(getExecutionEnvironment().getConfig()));
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + udfName + ")";
        operator = new WindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalSingleValueProcessAllWindowFunction<>(windowFunction), trigger, allowedLateness, lateDataOutputTag);
    }
    return input.transform(opName, resultType, operator).forceNonParallel();
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) RichFunction(org.apache.flink.api.common.functions.RichFunction) AggregatingStateDescriptor(org.apache.flink.api.common.state.AggregatingStateDescriptor) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) InternalSingleValueProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueProcessAllWindowFunction) InternalAggregateProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalAggregateProcessAllWindowFunction) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 60 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class AllWindowedStream method fold.

/**
	 * Applies the given window function to each window. The window function is called for each
	 * evaluation of the window for each key individually. The output of the window function is
	 * interpreted as a regular non-windowed stream.
	 *
	 * <p>
	 * Arriving data is incrementally aggregated using the given fold function.
	 *
	 * @param initialValue The initial value of the fold.
	 * @param foldFunction The fold function that is used for incremental aggregation.
	 * @param function The process window function.
	 * @param foldAccumulatorType Type information for the result type of the fold function
	 * @param resultType Type information for the result type of the window function
	 * @return The data stream that is the result of applying the window function to the window.
	 */
@PublicEvolving
public <ACC, R> SingleOutputStreamOperator<R> fold(ACC initialValue, FoldFunction<T, ACC> foldFunction, ProcessAllWindowFunction<ACC, R, W> function, TypeInformation<ACC> foldAccumulatorType, TypeInformation<R> resultType) {
    if (foldFunction instanceof RichFunction) {
        throw new UnsupportedOperationException("FoldFunction of fold can not be a RichFunction.");
    }
    if (windowAssigner instanceof MergingWindowAssigner) {
        throw new UnsupportedOperationException("Fold cannot be used with a merging WindowAssigner.");
    }
    //clean the closures
    function = input.getExecutionEnvironment().clean(function);
    foldFunction = input.getExecutionEnvironment().clean(foldFunction);
    String callLocation = Utils.getCallLocationName();
    String udfName = "AllWindowedStream." + callLocation;
    String opName;
    KeySelector<T, Byte> keySel = input.getKeySelector();
    OneInputStreamOperator<T, R> operator;
    if (evictor != null) {
        @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<T>> streamRecordSerializer = (TypeSerializer<StreamRecord<T>>) new StreamElementSerializer(input.getType().createSerializer(getExecutionEnvironment().getConfig()));
        ListStateDescriptor<StreamRecord<T>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + evictor + ", " + udfName + ")";
        operator = new EvictingWindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalIterableProcessAllWindowFunction<>(new FoldApplyProcessAllWindowFunction<>(initialValue, foldFunction, function, foldAccumulatorType)), trigger, evictor, allowedLateness, lateDataOutputTag);
    } else {
        FoldingStateDescriptor<T, ACC> stateDesc = new FoldingStateDescriptor<>("window-contents", initialValue, foldFunction, foldAccumulatorType.createSerializer(getExecutionEnvironment().getConfig()));
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + udfName + ")";
        operator = new WindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalSingleValueProcessAllWindowFunction<>(function), trigger, allowedLateness, lateDataOutputTag);
    }
    return input.transform(opName, resultType, operator).forceNonParallel();
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) RichFunction(org.apache.flink.api.common.functions.RichFunction) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) InternalSingleValueProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueProcessAllWindowFunction) FoldingStateDescriptor(org.apache.flink.api.common.state.FoldingStateDescriptor) MergingWindowAssigner(org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) InternalIterableProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalIterableProcessAllWindowFunction) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Aggregations

StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)76 Test (org.junit.Test)50 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)27 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)27 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)27 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)21 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)20 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)19 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)19 Watermark (org.apache.flink.streaming.api.watermark.Watermark)17 RichFunction (org.apache.flink.api.common.functions.RichFunction)16 ArrayList (java.util.ArrayList)14 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)14 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)14 Map (java.util.Map)11 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)11 Event (org.apache.flink.cep.Event)11 HashMap (java.util.HashMap)10 PublicEvolving (org.apache.flink.annotation.PublicEvolving)9 MergingWindowAssigner (org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner)9