Search in sources :

Example 16 with TimeoutException

use of java.util.concurrent.TimeoutException in project flink by apache.

the class AsyncWaitOperator method processElement.

@Override
public void processElement(StreamRecord<IN> element) throws Exception {
    final StreamRecordQueueEntry<OUT> streamRecordBufferEntry = new StreamRecordQueueEntry<>(element);
    if (timeout > 0L) {
        // register a timeout for this AsyncStreamRecordBufferEntry
        long timeoutTimestamp = timeout + getProcessingTimeService().getCurrentProcessingTime();
        final ScheduledFuture<?> timerFuture = getProcessingTimeService().registerTimer(timeoutTimestamp, new ProcessingTimeCallback() {

            @Override
            public void onProcessingTime(long timestamp) throws Exception {
                streamRecordBufferEntry.collect(new TimeoutException("Async function call has timed out."));
            }
        });
        // Cancel the timer once we've completed the stream record buffer entry. This will remove
        // the register trigger task
        streamRecordBufferEntry.onComplete(new AcceptFunction<StreamElementQueueEntry<Collection<OUT>>>() {

            @Override
            public void accept(StreamElementQueueEntry<Collection<OUT>> value) {
                timerFuture.cancel(true);
            }
        }, executor);
    }
    addAsyncBufferEntry(streamRecordBufferEntry);
    userFunction.asyncInvoke(element.getValue(), streamRecordBufferEntry);
}
Also used : StreamElementQueueEntry(org.apache.flink.streaming.api.operators.async.queue.StreamElementQueueEntry) TimeoutException(java.util.concurrent.TimeoutException) ProcessingTimeCallback(org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback) Collection(java.util.Collection) StreamRecordQueueEntry(org.apache.flink.streaming.api.operators.async.queue.StreamRecordQueueEntry) TimeoutException(java.util.concurrent.TimeoutException)

Example 17 with TimeoutException

use of java.util.concurrent.TimeoutException in project flink by apache.

the class AsyncWaitOperatorTest method testAsyncTimeout.

@Test
public void testAsyncTimeout() throws Exception {
    final long timeout = 10L;
    final AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(new LazyAsyncFunction(), timeout, 2, AsyncDataStream.OutputMode.ORDERED);
    final Environment mockEnvironment = mock(Environment.class);
    final Configuration taskConfiguration = new Configuration();
    final ExecutionConfig executionConfig = new ExecutionConfig();
    final TaskMetricGroup metricGroup = new UnregisteredTaskMetricsGroup();
    final TaskManagerRuntimeInfo taskManagerRuntimeInfo = new TestingTaskManagerRuntimeInfo();
    final TaskInfo taskInfo = new TaskInfo("foobarTask", 1, 0, 1, 1);
    when(mockEnvironment.getTaskConfiguration()).thenReturn(taskConfiguration);
    when(mockEnvironment.getExecutionConfig()).thenReturn(executionConfig);
    when(mockEnvironment.getMetricGroup()).thenReturn(metricGroup);
    when(mockEnvironment.getTaskManagerInfo()).thenReturn(taskManagerRuntimeInfo);
    when(mockEnvironment.getTaskInfo()).thenReturn(taskInfo);
    when(mockEnvironment.getUserClassLoader()).thenReturn(AsyncWaitOperatorTest.class.getClassLoader());
    final OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(operator, IntSerializer.INSTANCE, mockEnvironment);
    final long initialTime = 0L;
    final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.setProcessingTime(initialTime);
    synchronized (testHarness.getCheckpointLock()) {
        testHarness.processElement(new StreamRecord<>(1, initialTime));
        testHarness.setProcessingTime(initialTime + 5L);
        testHarness.processElement(new StreamRecord<>(2, initialTime + 5L));
    }
    // trigger the timeout of the first stream record
    testHarness.setProcessingTime(initialTime + timeout + 1L);
    // allow the second async stream record to be processed
    LazyAsyncFunction.countDown();
    // wait until all async collectors in the buffer have been emitted out.
    synchronized (testHarness.getCheckpointLock()) {
        testHarness.close();
    }
    expectedOutput.add(new StreamRecord<>(2, initialTime + 5L));
    TestHarnessUtil.assertOutputEquals("Output with watermark was not correct.", expectedOutput, testHarness.getOutput());
    ArgumentCaptor<Throwable> argumentCaptor = ArgumentCaptor.forClass(Throwable.class);
    verify(mockEnvironment).failExternally(argumentCaptor.capture());
    Throwable failureCause = argumentCaptor.getValue();
    Assert.assertNotNull(failureCause.getCause());
    Assert.assertTrue(failureCause.getCause() instanceof ExecutionException);
    Assert.assertNotNull(failureCause.getCause().getCause());
    Assert.assertTrue(failureCause.getCause().getCause() instanceof TimeoutException);
}
Also used : UnregisteredTaskMetricsGroup(org.apache.flink.runtime.operators.testutils.UnregisteredTaskMetricsGroup) Configuration(org.apache.flink.configuration.Configuration) TestingTaskManagerRuntimeInfo(org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo) TaskManagerRuntimeInfo(org.apache.flink.runtime.taskmanager.TaskManagerRuntimeInfo) TaskMetricGroup(org.apache.flink.runtime.metrics.groups.TaskMetricGroup) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) TaskInfo(org.apache.flink.api.common.TaskInfo) TestingTaskManagerRuntimeInfo(org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo) Environment(org.apache.flink.runtime.execution.Environment) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) StreamMockEnvironment(org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) Test(org.junit.Test)

Example 18 with TimeoutException

use of java.util.concurrent.TimeoutException in project flink by apache.

the class HttpTestClient method sendRequest.

/**
	 * Sends a request to to the server.
	 *
	 * <pre>
	 * HttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/overview");
	 * request.headers().set(HttpHeaders.Names.HOST, host);
	 * request.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.CLOSE);
	 *
	 * sendRequest(request);
	 * </pre>
	 *
	 * @param request The {@link HttpRequest} to send to the server
	 */
public void sendRequest(HttpRequest request, FiniteDuration timeout) throws InterruptedException, TimeoutException {
    LOG.debug("Writing {}.", request);
    // Make the connection attempt.
    ChannelFuture connect = bootstrap.connect(host, port);
    Channel channel;
    if (connect.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) {
        channel = connect.channel();
    } else {
        throw new TimeoutException("Connection failed");
    }
    channel.writeAndFlush(request);
}
Also used : ChannelFuture(io.netty.channel.ChannelFuture) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) SocketChannel(io.netty.channel.socket.SocketChannel) Channel(io.netty.channel.Channel) TimeoutException(java.util.concurrent.TimeoutException)

Example 19 with TimeoutException

use of java.util.concurrent.TimeoutException in project flink by apache.

the class RescalingITCase method testSavepointRescalingPartitionedOperatorState.

/**
	 * Tests rescaling of partitioned operator state. More specific, we test the mechanism with {@link ListCheckpointed}
	 * as it subsumes {@link org.apache.flink.streaming.api.checkpoint.CheckpointedFunction}.
	 */
public void testSavepointRescalingPartitionedOperatorState(boolean scaleOut, OperatorCheckpointMethod checkpointMethod) throws Exception {
    final int parallelism = scaleOut ? numSlots : numSlots / 2;
    final int parallelism2 = scaleOut ? numSlots / 2 : numSlots;
    final int maxParallelism = 13;
    FiniteDuration timeout = new FiniteDuration(3, TimeUnit.MINUTES);
    Deadline deadline = timeout.fromNow();
    JobID jobID = null;
    ActorGateway jobManager = null;
    int counterSize = Math.max(parallelism, parallelism2);
    if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION || checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST) {
        PartitionedStateSource.CHECK_CORRECT_SNAPSHOT = new int[counterSize];
        PartitionedStateSource.CHECK_CORRECT_RESTORE = new int[counterSize];
    } else {
        PartitionedStateSourceListCheckpointed.CHECK_CORRECT_SNAPSHOT = new int[counterSize];
        PartitionedStateSourceListCheckpointed.CHECK_CORRECT_RESTORE = new int[counterSize];
    }
    try {
        jobManager = cluster.getLeaderGateway(deadline.timeLeft());
        JobGraph jobGraph = createJobGraphWithOperatorState(parallelism, maxParallelism, checkpointMethod);
        jobID = jobGraph.getJobID();
        cluster.submitJobDetached(jobGraph);
        Object savepointResponse = null;
        // wait until the operator is started
        StateSourceBase.workStartedLatch.await();
        while (deadline.hasTimeLeft()) {
            Future<Object> savepointPathFuture = jobManager.ask(new JobManagerMessages.TriggerSavepoint(jobID, Option.<String>empty()), deadline.timeLeft());
            FiniteDuration waitingTime = new FiniteDuration(10, TimeUnit.SECONDS);
            savepointResponse = Await.result(savepointPathFuture, waitingTime);
            if (savepointResponse instanceof JobManagerMessages.TriggerSavepointSuccess) {
                break;
            }
            System.out.println(savepointResponse);
        }
        assertTrue(savepointResponse instanceof JobManagerMessages.TriggerSavepointSuccess);
        final String savepointPath = ((JobManagerMessages.TriggerSavepointSuccess) savepointResponse).savepointPath();
        Future<Object> jobRemovedFuture = jobManager.ask(new TestingJobManagerMessages.NotifyWhenJobRemoved(jobID), deadline.timeLeft());
        Future<Object> cancellationResponseFuture = jobManager.ask(new JobManagerMessages.CancelJob(jobID), deadline.timeLeft());
        Object cancellationResponse = Await.result(cancellationResponseFuture, deadline.timeLeft());
        assertTrue(cancellationResponse instanceof JobManagerMessages.CancellationSuccess);
        Await.ready(jobRemovedFuture, deadline.timeLeft());
        // job successfully removed
        jobID = null;
        JobGraph scaledJobGraph = createJobGraphWithOperatorState(parallelism2, maxParallelism, checkpointMethod);
        scaledJobGraph.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(savepointPath));
        jobID = scaledJobGraph.getJobID();
        cluster.submitJobAndWait(scaledJobGraph, false);
        int sumExp = 0;
        int sumAct = 0;
        if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION) {
            for (int c : PartitionedStateSource.CHECK_CORRECT_SNAPSHOT) {
                sumExp += c;
            }
            for (int c : PartitionedStateSource.CHECK_CORRECT_RESTORE) {
                sumAct += c;
            }
        } else if (checkpointMethod == OperatorCheckpointMethod.CHECKPOINTED_FUNCTION_BROADCAST) {
            for (int c : PartitionedStateSource.CHECK_CORRECT_SNAPSHOT) {
                sumExp += c;
            }
            for (int c : PartitionedStateSource.CHECK_CORRECT_RESTORE) {
                sumAct += c;
            }
            sumExp *= parallelism2;
        } else {
            for (int c : PartitionedStateSourceListCheckpointed.CHECK_CORRECT_SNAPSHOT) {
                sumExp += c;
            }
            for (int c : PartitionedStateSourceListCheckpointed.CHECK_CORRECT_RESTORE) {
                sumAct += c;
            }
        }
        assertEquals(sumExp, sumAct);
        jobID = null;
    } finally {
        // clear any left overs from a possibly failed job
        if (jobID != null && jobManager != null) {
            Future<Object> jobRemovedFuture = jobManager.ask(new TestingJobManagerMessages.NotifyWhenJobRemoved(jobID), timeout);
            try {
                Await.ready(jobRemovedFuture, timeout);
            } catch (TimeoutException | InterruptedException ie) {
                fail("Failed while cleaning up the cluster.");
            }
        }
    }
}
Also used : Deadline(scala.concurrent.duration.Deadline) JobManagerMessages(org.apache.flink.runtime.messages.JobManagerMessages) TestingJobManagerMessages(org.apache.flink.runtime.testingUtils.TestingJobManagerMessages) FiniteDuration(scala.concurrent.duration.FiniteDuration) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) TestingJobManagerMessages(org.apache.flink.runtime.testingUtils.TestingJobManagerMessages) ActorGateway(org.apache.flink.runtime.instance.ActorGateway) JobID(org.apache.flink.api.common.JobID) TimeoutException(java.util.concurrent.TimeoutException)

Example 20 with TimeoutException

use of java.util.concurrent.TimeoutException in project flink by apache.

the class SlotPoolRpcTest method testSlotAllocationNoResourceManager.

// ------------------------------------------------------------------------
//  tests
// ------------------------------------------------------------------------
@Test
public void testSlotAllocationNoResourceManager() throws Exception {
    final JobID jid = new JobID();
    final SlotPool pool = new SlotPool(rpcService, jid, SystemClock.getInstance(), Time.days(1), Time.days(1), // this is the timeout for the request tested here
    Time.milliseconds(100));
    pool.start(UUID.randomUUID());
    Future<SimpleSlot> future = pool.allocateSlot(mock(ScheduledUnit.class), DEFAULT_TESTING_PROFILE, null);
    try {
        future.get(4, TimeUnit.SECONDS);
        fail("We expected a ExecutionException.");
    } catch (ExecutionException e) {
        assertEquals(NoResourceAvailableException.class, e.getCause().getClass());
    } catch (TimeoutException e) {
        fail("future timed out rather than being failed");
    } catch (Exception e) {
        fail("wrong exception: " + e);
    }
}
Also used : ScheduledUnit(org.apache.flink.runtime.jobmanager.scheduler.ScheduledUnit) ExecutionException(java.util.concurrent.ExecutionException) NoResourceAvailableException(org.apache.flink.runtime.jobmanager.scheduler.NoResourceAvailableException) JobID(org.apache.flink.api.common.JobID) NoResourceAvailableException(org.apache.flink.runtime.jobmanager.scheduler.NoResourceAvailableException) TimeoutException(java.util.concurrent.TimeoutException) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) Test(org.junit.Test)

Aggregations

TimeoutException (java.util.concurrent.TimeoutException)717 ExecutionException (java.util.concurrent.ExecutionException)229 IOException (java.io.IOException)167 Test (org.junit.Test)131 CountDownLatch (java.util.concurrent.CountDownLatch)73 ArrayList (java.util.ArrayList)67 ExecutorService (java.util.concurrent.ExecutorService)62 Future (java.util.concurrent.Future)45 CancellationException (java.util.concurrent.CancellationException)44 Test (org.testng.annotations.Test)44 File (java.io.File)34 List (java.util.List)34 Map (java.util.Map)32 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)32 HashMap (java.util.HashMap)26 TimeUnit (java.util.concurrent.TimeUnit)26 AtomicReference (java.util.concurrent.atomic.AtomicReference)23 RejectedExecutionException (java.util.concurrent.RejectedExecutionException)21 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)21 URI (java.net.URI)20