Search in sources :

Example 21 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EmitterTest method testEmitterWithExceptions.

/**
	 * Tests that the emitter handles exceptions occurring in the {@link AsyncCollector} correctly.
	 */
@Test
public void testEmitterWithExceptions() throws Exception {
    Object lock = new Object();
    List<StreamElement> list = new ArrayList<>();
    Output<StreamRecord<Integer>> output = new CollectorOutput<>(list);
    List<StreamElement> expected = Arrays.asList(new StreamRecord<>(1, 0L), new Watermark(3L));
    OperatorActions operatorActions = mock(OperatorActions.class);
    final int capacity = 3;
    StreamElementQueue queue = new OrderedStreamElementQueue(capacity, executor, operatorActions);
    final Emitter<Integer> emitter = new Emitter<>(lock, output, queue, operatorActions);
    final Thread emitterThread = new Thread(emitter);
    emitterThread.start();
    final Exception testException = new Exception("Test exception");
    try {
        StreamRecordQueueEntry<Integer> record1 = new StreamRecordQueueEntry<>(new StreamRecord<>(1, 0L));
        StreamRecordQueueEntry<Integer> record2 = new StreamRecordQueueEntry<>(new StreamRecord<>(2, 1L));
        WatermarkQueueEntry watermark1 = new WatermarkQueueEntry(new Watermark(3L));
        queue.put(record1);
        queue.put(record2);
        queue.put(watermark1);
        record2.collect(testException);
        record1.collect(Arrays.asList(1));
        synchronized (lock) {
            while (!queue.isEmpty()) {
                lock.wait();
            }
        }
        Assert.assertEquals(expected, list);
        ArgumentCaptor<Throwable> argumentCaptor = ArgumentCaptor.forClass(Throwable.class);
        verify(operatorActions).failOperator(argumentCaptor.capture());
        Throwable failureCause = argumentCaptor.getValue();
        Assert.assertNotNull(failureCause.getCause());
        Assert.assertTrue(failureCause.getCause() instanceof ExecutionException);
        Assert.assertNotNull(failureCause.getCause().getCause());
        Assert.assertEquals(testException, failureCause.getCause().getCause());
    } finally {
        emitter.stop();
        emitterThread.interrupt();
    }
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) ArrayList(java.util.ArrayList) StreamElement(org.apache.flink.streaming.runtime.streamrecord.StreamElement) ExecutionException(java.util.concurrent.ExecutionException) StreamElementQueue(org.apache.flink.streaming.api.operators.async.queue.StreamElementQueue) OrderedStreamElementQueue(org.apache.flink.streaming.api.operators.async.queue.OrderedStreamElementQueue) OrderedStreamElementQueue(org.apache.flink.streaming.api.operators.async.queue.OrderedStreamElementQueue) CollectorOutput(org.apache.flink.streaming.util.CollectorOutput) ExecutionException(java.util.concurrent.ExecutionException) Watermark(org.apache.flink.streaming.api.watermark.Watermark) StreamRecordQueueEntry(org.apache.flink.streaming.api.operators.async.queue.StreamRecordQueueEntry) WatermarkQueueEntry(org.apache.flink.streaming.api.operators.async.queue.WatermarkQueueEntry) Test(org.junit.Test)

Example 22 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class StreamElementQueueTest method testPut.

@Test
public void testPut() throws InterruptedException {
    OperatorActions operatorActions = mock(OperatorActions.class);
    StreamElementQueue queue = createStreamElementQueue(2, operatorActions);
    final Watermark watermark = new Watermark(0L);
    final StreamRecord<Integer> streamRecord = new StreamRecord<>(42, 1L);
    final Watermark nextWatermark = new Watermark(2L);
    final WatermarkQueueEntry watermarkQueueEntry = new WatermarkQueueEntry(watermark);
    final StreamRecordQueueEntry<Integer> streamRecordQueueEntry = new StreamRecordQueueEntry<>(streamRecord);
    queue.put(watermarkQueueEntry);
    queue.put(streamRecordQueueEntry);
    Assert.assertEquals(2, queue.size());
    Assert.assertFalse(queue.tryPut(new WatermarkQueueEntry(nextWatermark)));
    Collection<StreamElementQueueEntry<?>> actualValues = queue.values();
    List<StreamElementQueueEntry<?>> expectedValues = Arrays.asList(watermarkQueueEntry, streamRecordQueueEntry);
    Assert.assertEquals(expectedValues, actualValues);
    verify(operatorActions, never()).failOperator(any(Exception.class));
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) OperatorActions(org.apache.flink.streaming.api.operators.async.OperatorActions) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 23 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class StreamSourceOperatorTest method setupSourceOperator.

@SuppressWarnings("unchecked")
private static <T> void setupSourceOperator(StreamSource<T, ?> operator, TimeCharacteristic timeChar, long watermarkInterval, long latencyMarkInterval, final ProcessingTimeService timeProvider) {
    ExecutionConfig executionConfig = new ExecutionConfig();
    executionConfig.setAutoWatermarkInterval(watermarkInterval);
    executionConfig.setLatencyTrackingInterval(latencyMarkInterval);
    StreamConfig cfg = new StreamConfig(new Configuration());
    cfg.setStateBackend(new MemoryStateBackend());
    cfg.setTimeCharacteristic(timeChar);
    Environment env = new DummyEnvironment("MockTwoInputTask", 1, 0);
    StreamStatusMaintainer streamStatusMaintainer = mock(StreamStatusMaintainer.class);
    when(streamStatusMaintainer.getStreamStatus()).thenReturn(StreamStatus.ACTIVE);
    StreamTask<?, ?> mockTask = mock(StreamTask.class);
    when(mockTask.getName()).thenReturn("Mock Task");
    when(mockTask.getCheckpointLock()).thenReturn(new Object());
    when(mockTask.getConfiguration()).thenReturn(cfg);
    when(mockTask.getEnvironment()).thenReturn(env);
    when(mockTask.getExecutionConfig()).thenReturn(executionConfig);
    when(mockTask.getAccumulatorMap()).thenReturn(Collections.<String, Accumulator<?, ?>>emptyMap());
    when(mockTask.getStreamStatusMaintainer()).thenReturn(streamStatusMaintainer);
    doAnswer(new Answer<ProcessingTimeService>() {

        @Override
        public ProcessingTimeService answer(InvocationOnMock invocation) throws Throwable {
            if (timeProvider == null) {
                throw new RuntimeException("The time provider is null.");
            }
            return timeProvider;
        }
    }).when(mockTask).getProcessingTimeService();
    operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Configuration(org.apache.flink.configuration.Configuration) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) StreamStatusMaintainer(org.apache.flink.streaming.runtime.streamstatus.StreamStatusMaintainer) DummyEnvironment(org.apache.flink.runtime.operators.testutils.DummyEnvironment) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) ProcessingTimeService(org.apache.flink.streaming.runtime.tasks.ProcessingTimeService) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService) InvocationOnMock(org.mockito.invocation.InvocationOnMock) DummyEnvironment(org.apache.flink.runtime.operators.testutils.DummyEnvironment) Environment(org.apache.flink.runtime.execution.Environment)

Example 24 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testCountTrigger.

@Test
@SuppressWarnings("unchecked")
public void testCountTrigger() throws Exception {
    final int WINDOW_SIZE = 4;
    final int WINDOW_SLIDE = 2;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new ReduceIterableWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>(new SumReducer())), CountTrigger.of(WINDOW_SLIDE), CountEvictor.of(WINDOW_SIZE), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // The global window actually ignores these timestamps...
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) ReduceIterableWindowFunction(org.apache.flink.streaming.api.functions.windowing.ReduceIterableWindowFunction) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Example 25 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testCountTriggerWithApply.

@Test
@SuppressWarnings("unchecked")
public void testCountTriggerWithApply() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int WINDOW_SIZE = 4;
    final int WINDOW_SLIDE = 2;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(WINDOW_SLIDE), CountEvictor.of(WINDOW_SIZE), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // The global window actually ignores these timestamps...
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Aggregations

StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)76 Test (org.junit.Test)50 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)27 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)27 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)27 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)21 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)20 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)19 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)19 Watermark (org.apache.flink.streaming.api.watermark.Watermark)17 RichFunction (org.apache.flink.api.common.functions.RichFunction)16 ArrayList (java.util.ArrayList)14 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)14 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)14 Map (java.util.Map)11 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)11 Event (org.apache.flink.cep.Event)11 HashMap (java.util.HashMap)10 PublicEvolving (org.apache.flink.annotation.PublicEvolving)9 MergingWindowAssigner (org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner)9