Search in sources :

Example 1 with StatefulSequenceSource

use of org.apache.flink.streaming.api.functions.source.StatefulSequenceSource in project flink by apache.

the class StreamExecutionEnvironmentTest method testSources.

@Test
public void testSources() {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    SourceFunction<Integer> srcFun = new SourceFunction<Integer>() {

        private static final long serialVersionUID = 1L;

        @Override
        public void run(SourceContext<Integer> ctx) throws Exception {
        }

        @Override
        public void cancel() {
        }
    };
    DataStreamSource<Integer> src1 = env.addSource(srcFun);
    src1.addSink(new DiscardingSink<Integer>());
    assertEquals(srcFun, getFunctionFromDataSource(src1));
    List<Long> list = Arrays.asList(0L, 1L, 2L);
    DataStreamSource<Long> src2 = env.generateSequence(0, 2);
    assertTrue(getFunctionFromDataSource(src2) instanceof StatefulSequenceSource);
    DataStreamSource<Long> src3 = env.fromElements(0L, 1L, 2L);
    assertTrue(getFunctionFromDataSource(src3) instanceof FromElementsFunction);
    DataStreamSource<Long> src4 = env.fromCollection(list);
    assertTrue(getFunctionFromDataSource(src4) instanceof FromElementsFunction);
}
Also used : SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) FromElementsFunction(org.apache.flink.streaming.api.functions.source.FromElementsFunction) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) StatefulSequenceSource(org.apache.flink.streaming.api.functions.source.StatefulSequenceSource) Test(org.junit.Test)

Example 2 with StatefulSequenceSource

use of org.apache.flink.streaming.api.functions.source.StatefulSequenceSource in project flink by apache.

the class StatefulSequenceSourceTest method testCheckpointRestore.

@Test
public void testCheckpointRestore() throws Exception {
    final int initElement = 0;
    final int maxElement = 100;
    final int maxParallelsim = 2;
    final Set<Long> expectedOutput = new HashSet<>();
    for (long i = initElement; i <= maxElement; i++) {
        expectedOutput.add(i);
    }
    final ConcurrentHashMap<String, List<Long>> outputCollector = new ConcurrentHashMap<>();
    final OneShotLatch latchToTrigger1 = new OneShotLatch();
    final OneShotLatch latchToWait1 = new OneShotLatch();
    final OneShotLatch latchToTrigger2 = new OneShotLatch();
    final OneShotLatch latchToWait2 = new OneShotLatch();
    final StatefulSequenceSource source1 = new StatefulSequenceSource(initElement, maxElement);
    StreamSource<Long, StatefulSequenceSource> src1 = new StreamSource<>(source1);
    final AbstractStreamOperatorTestHarness<Long> testHarness1 = new AbstractStreamOperatorTestHarness<>(src1, maxParallelsim, 2, 0);
    testHarness1.open();
    final StatefulSequenceSource source2 = new StatefulSequenceSource(initElement, maxElement);
    StreamSource<Long, StatefulSequenceSource> src2 = new StreamSource<>(source2);
    final AbstractStreamOperatorTestHarness<Long> testHarness2 = new AbstractStreamOperatorTestHarness<>(src2, maxParallelsim, 2, 1);
    testHarness2.open();
    final Throwable[] error = new Throwable[3];
    // run the source asynchronously
    Thread runner1 = new Thread() {

        @Override
        public void run() {
            try {
                source1.run(new BlockingSourceContext<>("1", latchToTrigger1, latchToWait1, outputCollector, 21));
            } catch (Throwable t) {
                t.printStackTrace();
                error[0] = t;
            }
        }
    };
    // run the source asynchronously
    Thread runner2 = new Thread() {

        @Override
        public void run() {
            try {
                source2.run(new BlockingSourceContext<>("2", latchToTrigger2, latchToWait2, outputCollector, 32));
            } catch (Throwable t) {
                t.printStackTrace();
                error[1] = t;
            }
        }
    };
    runner1.start();
    runner2.start();
    if (!latchToTrigger1.isTriggered()) {
        latchToTrigger1.await();
    }
    if (!latchToTrigger2.isTriggered()) {
        latchToTrigger2.await();
    }
    OperatorSubtaskState snapshot = AbstractStreamOperatorTestHarness.repackageState(testHarness1.snapshot(0L, 0L), testHarness2.snapshot(0L, 0L));
    final StatefulSequenceSource source3 = new StatefulSequenceSource(initElement, maxElement);
    StreamSource<Long, StatefulSequenceSource> src3 = new StreamSource<>(source3);
    final OperatorSubtaskState initState = AbstractStreamOperatorTestHarness.repartitionOperatorState(snapshot, maxParallelsim, 2, 1, 0);
    final AbstractStreamOperatorTestHarness<Long> testHarness3 = new AbstractStreamOperatorTestHarness<>(src3, maxParallelsim, 1, 0);
    testHarness3.setup();
    testHarness3.initializeState(initState);
    testHarness3.open();
    final OneShotLatch latchToTrigger3 = new OneShotLatch();
    final OneShotLatch latchToWait3 = new OneShotLatch();
    latchToWait3.trigger();
    // run the source asynchronously
    Thread runner3 = new Thread() {

        @Override
        public void run() {
            try {
                source3.run(new BlockingSourceContext<>("3", latchToTrigger3, latchToWait3, outputCollector, 3));
            } catch (Throwable t) {
                t.printStackTrace();
                error[2] = t;
            }
        }
    };
    runner3.start();
    runner3.join();
    // we have 3 tasks.
    Assert.assertEquals(3, outputCollector.size());
    // test for at-most-once
    Set<Long> dedupRes = new HashSet<>(Math.abs(maxElement - initElement) + 1);
    for (Map.Entry<String, List<Long>> elementsPerTask : outputCollector.entrySet()) {
        String key = elementsPerTask.getKey();
        List<Long> elements = outputCollector.get(key);
        // this tests the correctness of the latches in the test
        Assert.assertTrue(elements.size() > 0);
        for (Long elem : elements) {
            if (!dedupRes.add(elem)) {
                Assert.fail("Duplicate entry: " + elem);
            }
            if (!expectedOutput.contains(elem)) {
                Assert.fail("Unexpected element: " + elem);
            }
        }
    }
    // test for exactly-once
    Assert.assertEquals(Math.abs(initElement - maxElement) + 1, dedupRes.size());
    latchToWait1.trigger();
    latchToWait2.trigger();
    // wait for everybody ot finish.
    runner1.join();
    runner2.join();
}
Also used : StreamSource(org.apache.flink.streaming.api.operators.StreamSource) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) AbstractStreamOperatorTestHarness(org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness) OneShotLatch(org.apache.flink.core.testutils.OneShotLatch) ArrayList(java.util.ArrayList) List(java.util.List) StatefulSequenceSource(org.apache.flink.streaming.api.functions.source.StatefulSequenceSource) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Map(java.util.Map) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

StatefulSequenceSource (org.apache.flink.streaming.api.functions.source.StatefulSequenceSource)2 Test (org.junit.Test)2 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 OneShotLatch (org.apache.flink.core.testutils.OneShotLatch)1 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)1 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)1 FromElementsFunction (org.apache.flink.streaming.api.functions.source.FromElementsFunction)1 SourceFunction (org.apache.flink.streaming.api.functions.source.SourceFunction)1 StreamSource (org.apache.flink.streaming.api.operators.StreamSource)1 AbstractStreamOperatorTestHarness (org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness)1