Search in sources :

Example 6 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class AccumulatingAlignedProcessingTimeWindowOperatorTest method checkpointRestoreWithPendingWindowSliding.

@Test
public void checkpointRestoreWithPendingWindowSliding() {
    try {
        final int factor = 4;
        final int windowSlide = 50;
        final int windowSize = factor * windowSlide;
        // sliding window (200 msecs) every 50 msecs
        AccumulatingProcessingTimeWindowOperator<Integer, Integer, Integer> op = new AccumulatingProcessingTimeWindowOperator<>(validatingIdentityFunction, identitySelector, IntSerializer.INSTANCE, IntSerializer.INSTANCE, windowSize, windowSlide);
        OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(op);
        testHarness.setProcessingTime(0);
        testHarness.setup();
        testHarness.open();
        // inject some elements
        final int numElements = 1000;
        final int numElementsFirst = 700;
        for (int i = 0; i < numElementsFirst; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        // draw a snapshot
        List<Integer> resultAtSnapshot = extractFromStreamRecords(testHarness.getOutput());
        int beforeSnapShot = testHarness.getOutput().size();
        StreamStateHandle state = testHarness.snapshotLegacy(1L, System.currentTimeMillis());
        int afterSnapShot = testHarness.getOutput().size();
        assertEquals("operator performed computation during snapshot", beforeSnapShot, afterSnapShot);
        assertTrue(resultAtSnapshot.size() <= factor * numElementsFirst);
        // inject the remaining elements - these should not influence the snapshot
        for (int i = numElementsFirst; i < numElements; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        testHarness.close();
        // re-create the operator and restore the state
        op = new AccumulatingProcessingTimeWindowOperator<>(validatingIdentityFunction, identitySelector, IntSerializer.INSTANCE, IntSerializer.INSTANCE, windowSize, windowSlide);
        testHarness = new OneInputStreamOperatorTestHarness<>(op);
        testHarness.setup();
        testHarness.restore(state);
        testHarness.open();
        // inject again the remaining elements
        for (int i = numElementsFirst; i < numElements; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        testHarness.setProcessingTime(50);
        testHarness.setProcessingTime(100);
        testHarness.setProcessingTime(150);
        testHarness.setProcessingTime(200);
        testHarness.setProcessingTime(250);
        testHarness.setProcessingTime(300);
        testHarness.setProcessingTime(350);
        // get and verify the result
        List<Integer> finalResult = new ArrayList<>(resultAtSnapshot);
        List<Integer> finalPartialResult = extractFromStreamRecords(testHarness.getOutput());
        finalResult.addAll(finalPartialResult);
        assertEquals(factor * numElements, finalResult.size());
        Collections.sort(finalResult);
        for (int i = 0; i < factor * numElements; i++) {
            assertEquals(i / factor, finalResult.get(i).intValue());
        }
        testHarness.close();
        op.dispose();
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) StreamStateHandle(org.apache.flink.runtime.state.StreamStateHandle) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 7 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class AccumulatingAlignedProcessingTimeWindowOperatorTest method checkpointRestoreWithPendingWindowTumbling.

@Test
public void checkpointRestoreWithPendingWindowTumbling() {
    try {
        final int windowSize = 200;
        // tumbling window that triggers every 200 milliseconds
        AccumulatingProcessingTimeWindowOperator<Integer, Integer, Integer> op = new AccumulatingProcessingTimeWindowOperator<>(validatingIdentityFunction, identitySelector, IntSerializer.INSTANCE, IntSerializer.INSTANCE, windowSize, windowSize);
        OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(op);
        testHarness.setup();
        testHarness.open();
        testHarness.setProcessingTime(0);
        // inject some elements
        final int numElementsFirst = 700;
        final int numElements = 1000;
        for (int i = 0; i < numElementsFirst; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        // draw a snapshot and dispose the window
        int beforeSnapShot = testHarness.getOutput().size();
        StreamStateHandle state = testHarness.snapshotLegacy(1L, System.currentTimeMillis());
        List<Integer> resultAtSnapshot = extractFromStreamRecords(testHarness.getOutput());
        int afterSnapShot = testHarness.getOutput().size();
        assertEquals("operator performed computation during snapshot", beforeSnapShot, afterSnapShot);
        assertTrue(afterSnapShot <= numElementsFirst);
        // inject some random elements, which should not show up in the state
        for (int i = 0; i < 300; i++) {
            testHarness.processElement(new StreamRecord<>(i + numElementsFirst));
        }
        testHarness.close();
        op.dispose();
        // re-create the operator and restore the state
        op = new AccumulatingProcessingTimeWindowOperator<>(validatingIdentityFunction, identitySelector, IntSerializer.INSTANCE, IntSerializer.INSTANCE, windowSize, windowSize);
        testHarness = new OneInputStreamOperatorTestHarness<>(op);
        testHarness.setup();
        testHarness.restore(state);
        testHarness.open();
        // inject some more elements
        for (int i = numElementsFirst; i < numElements; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        testHarness.setProcessingTime(400);
        // get and verify the result
        List<Integer> finalResult = new ArrayList<>();
        finalResult.addAll(resultAtSnapshot);
        List<Integer> finalPartialResult = extractFromStreamRecords(testHarness.getOutput());
        finalResult.addAll(finalPartialResult);
        assertEquals(numElements, finalResult.size());
        Collections.sort(finalResult);
        for (int i = 0; i < numElements; i++) {
            assertEquals(i, finalResult.get(i).intValue());
        }
        testHarness.close();
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) StreamStateHandle(org.apache.flink.runtime.state.StreamStateHandle) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 8 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class AccumulatingAlignedProcessingTimeWindowOperatorTest method checkpointRestoreWithPendingWindowSlidingWithProcessFunction.

@Test
public void checkpointRestoreWithPendingWindowSlidingWithProcessFunction() {
    try {
        final int factor = 4;
        final int windowSlide = 50;
        final int windowSize = factor * windowSlide;
        // sliding window (200 msecs) every 50 msecs
        AccumulatingProcessingTimeWindowOperator<Integer, Integer, Integer> op = new AccumulatingProcessingTimeWindowOperator<>(validatingIdentityProcessFunction, identitySelector, IntSerializer.INSTANCE, IntSerializer.INSTANCE, windowSize, windowSlide);
        OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(op);
        testHarness.setProcessingTime(0);
        testHarness.setup();
        testHarness.open();
        // inject some elements
        final int numElements = 1000;
        final int numElementsFirst = 700;
        for (int i = 0; i < numElementsFirst; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        // draw a snapshot
        List<Integer> resultAtSnapshot = extractFromStreamRecords(testHarness.getOutput());
        int beforeSnapShot = testHarness.getOutput().size();
        StreamStateHandle state = testHarness.snapshotLegacy(1L, System.currentTimeMillis());
        int afterSnapShot = testHarness.getOutput().size();
        assertEquals("operator performed computation during snapshot", beforeSnapShot, afterSnapShot);
        assertTrue(resultAtSnapshot.size() <= factor * numElementsFirst);
        // inject the remaining elements - these should not influence the snapshot
        for (int i = numElementsFirst; i < numElements; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        testHarness.close();
        // re-create the operator and restore the state
        op = new AccumulatingProcessingTimeWindowOperator<>(validatingIdentityProcessFunction, identitySelector, IntSerializer.INSTANCE, IntSerializer.INSTANCE, windowSize, windowSlide);
        testHarness = new OneInputStreamOperatorTestHarness<>(op);
        testHarness.setup();
        testHarness.restore(state);
        testHarness.open();
        // inject again the remaining elements
        for (int i = numElementsFirst; i < numElements; i++) {
            testHarness.processElement(new StreamRecord<>(i));
        }
        testHarness.setProcessingTime(50);
        testHarness.setProcessingTime(100);
        testHarness.setProcessingTime(150);
        testHarness.setProcessingTime(200);
        testHarness.setProcessingTime(250);
        testHarness.setProcessingTime(300);
        testHarness.setProcessingTime(350);
        // get and verify the result
        List<Integer> finalResult = new ArrayList<>(resultAtSnapshot);
        List<Integer> finalPartialResult = extractFromStreamRecords(testHarness.getOutput());
        finalResult.addAll(finalPartialResult);
        assertEquals(factor * numElements, finalResult.size());
        Collections.sort(finalResult);
        for (int i = 0; i < factor * numElements; i++) {
            assertEquals(i / factor, finalResult.get(i).intValue());
        }
        testHarness.close();
        op.dispose();
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) StreamStateHandle(org.apache.flink.runtime.state.StreamStateHandle) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 9 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class CassandraTupleWriteAheadSinkTest method testAckLoopExitOnException.

@Test(timeout = 20000)
public void testAckLoopExitOnException() throws Exception {
    final AtomicReference<Runnable> runnableFuture = new AtomicReference<>();
    final ClusterBuilder clusterBuilder = new ClusterBuilder() {

        private static final long serialVersionUID = 4624400760492936756L;

        @Override
        protected Cluster buildCluster(Cluster.Builder builder) {
            try {
                BoundStatement boundStatement = mock(BoundStatement.class);
                when(boundStatement.setDefaultTimestamp(any(long.class))).thenReturn(boundStatement);
                PreparedStatement preparedStatement = mock(PreparedStatement.class);
                when(preparedStatement.bind(Matchers.anyVararg())).thenReturn(boundStatement);
                ResultSetFuture future = mock(ResultSetFuture.class);
                when(future.get()).thenThrow(new RuntimeException("Expected exception."));
                doAnswer(new Answer<Void>() {

                    @Override
                    public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
                        synchronized (runnableFuture) {
                            runnableFuture.set((((Runnable) invocationOnMock.getArguments()[0])));
                            runnableFuture.notifyAll();
                        }
                        return null;
                    }
                }).when(future).addListener(any(Runnable.class), any(Executor.class));
                Session session = mock(Session.class);
                when(session.prepare(anyString())).thenReturn(preparedStatement);
                when(session.executeAsync(any(BoundStatement.class))).thenReturn(future);
                Cluster cluster = mock(Cluster.class);
                when(cluster.connect()).thenReturn(session);
                return cluster;
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
    };
    // Our asynchronous executor thread
    new Thread(new Runnable() {

        @Override
        public void run() {
            synchronized (runnableFuture) {
                while (runnableFuture.get() == null) {
                    try {
                        runnableFuture.wait();
                    } catch (InterruptedException e) {
                    // ignore interrupts
                    }
                }
            }
            runnableFuture.get().run();
        }
    }).start();
    CheckpointCommitter cc = mock(CheckpointCommitter.class);
    final CassandraTupleWriteAheadSink<Tuple0> sink = new CassandraTupleWriteAheadSink<>("abc", TupleTypeInfo.of(Tuple0.class).createSerializer(new ExecutionConfig()), clusterBuilder, cc);
    OneInputStreamOperatorTestHarness<Tuple0, Tuple0> harness = new OneInputStreamOperatorTestHarness(sink);
    harness.getEnvironment().getTaskConfiguration().setBoolean("checkpointing", true);
    harness.setup();
    sink.open();
    // we should leave the loop and return false since we've seen an exception
    assertFalse(sink.sendValues(Collections.singleton(new Tuple0()), 0L));
    sink.close();
}
Also used : ResultSetFuture(com.datastax.driver.core.ResultSetFuture) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) CheckpointCommitter(org.apache.flink.streaming.runtime.operators.CheckpointCommitter) Executor(java.util.concurrent.Executor) Cluster(com.datastax.driver.core.Cluster) AtomicReference(java.util.concurrent.atomic.AtomicReference) PreparedStatement(com.datastax.driver.core.PreparedStatement) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) Tuple0(org.apache.flink.api.java.tuple.Tuple0) InvocationOnMock(org.mockito.invocation.InvocationOnMock) BoundStatement(com.datastax.driver.core.BoundStatement) Session(com.datastax.driver.core.Session) Test(org.junit.Test)

Example 10 with OneInputStreamOperatorTestHarness

use of org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness in project flink by apache.

the class ElasticsearchSinkBaseTest method testBulkFailureRethrownOnOnCheckpointAfterFlush.

/**
	 * Tests that any bulk failure in the listener callbacks due to flushing on an immediately following checkpoint
	 * is rethrown; we set a timeout because the test will not finish if the logic is broken.
	 */
@Test(timeout = 5000)
public void testBulkFailureRethrownOnOnCheckpointAfterFlush() throws Throwable {
    final DummyElasticsearchSink<String> sink = new DummyElasticsearchSink<>(new HashMap<String, String>(), new SimpleSinkFunction<String>(), new NoOpFailureHandler());
    final OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink));
    testHarness.open();
    // setup the next bulk request, and let bulk request succeed
    sink.setMockItemFailuresListForNextBulkItemResponses(Collections.singletonList((Exception) null));
    testHarness.processElement(new StreamRecord<>("msg-1"));
    verify(sink.getMockBulkProcessor(), times(1)).add(any(ActionRequest.class));
    // manually execute the next bulk request
    sink.manualBulkRequestWithAllPendingRequests();
    // setup the requests to be flushed in the snapshot
    testHarness.processElement(new StreamRecord<>("msg-2"));
    testHarness.processElement(new StreamRecord<>("msg-3"));
    verify(sink.getMockBulkProcessor(), times(3)).add(any(ActionRequest.class));
    CheckedThread snapshotThread = new CheckedThread() {

        @Override
        public void go() throws Exception {
            testHarness.snapshot(1L, 1000L);
        }
    };
    snapshotThread.start();
    // the snapshot should eventually be blocked before snapshot triggers flushing
    while (snapshotThread.getState() != Thread.State.WAITING) {
        Thread.sleep(10);
    }
    // for the snapshot-triggered flush, we let the bulk request fail completely
    sink.setFailNextBulkRequestCompletely(new Exception("artificial failure for bulk request"));
    // let the snapshot-triggered flush continue (bulk request should fail completely)
    sink.continueFlush();
    try {
        snapshotThread.sync();
    } catch (Exception e) {
        // the snapshot should have failed with the bulk request failure
        Assert.assertTrue(e.getCause().getCause().getMessage().contains("artificial failure for bulk request"));
        // test succeeded
        return;
    }
    Assert.fail();
}
Also used : NoOpFailureHandler(org.apache.flink.streaming.connectors.elasticsearch.util.NoOpFailureHandler) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) CheckedThread(org.apache.flink.core.testutils.CheckedThread) ActionRequest(org.elasticsearch.action.ActionRequest) Test(org.junit.Test)

Aggregations

OneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness)38 Test (org.junit.Test)36 Watermark (org.apache.flink.streaming.api.watermark.Watermark)10 ArrayList (java.util.ArrayList)9 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)8 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)7 ActionRequest (org.elasticsearch.action.ActionRequest)7 StreamStateHandle (org.apache.flink.runtime.state.StreamStateHandle)6 NoOpFailureHandler (org.apache.flink.streaming.connectors.elasticsearch.util.NoOpFailureHandler)6 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)6 OperatorStateHandles (org.apache.flink.streaming.runtime.tasks.OperatorStateHandles)6 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)6 CheckedThread (org.apache.flink.core.testutils.CheckedThread)5 ContinuousFileReaderOperator (org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperator)5 TimestampedFileInputSplit (org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit)5 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)5 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)4 FileInputSplit (org.apache.flink.core.fs.FileInputSplit)4 Path (org.apache.flink.core.fs.Path)4 Callback (org.apache.kafka.clients.producer.Callback)3