Search in sources :

Example 56 with CompletionException

use of java.util.concurrent.CompletionException in project flink by apache.

the class SchedulerBase method triggerCheckpoint.

@Override
public CompletableFuture<String> triggerCheckpoint() {
    mainThreadExecutor.assertRunningInMainThread();
    final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator();
    final JobID jobID = jobGraph.getJobID();
    if (checkpointCoordinator == null) {
        throw new IllegalStateException(String.format("Job %s is not a streaming job.", jobID));
    }
    log.info("Triggering a manual checkpoint for job {}.", jobID);
    return checkpointCoordinator.triggerCheckpoint(false).thenApply(CompletedCheckpoint::getExternalPointer).handleAsync((path, throwable) -> {
        if (throwable != null) {
            throw new CompletionException(throwable);
        }
        return path;
    }, mainThreadExecutor);
}
Also used : CheckpointCoordinator(org.apache.flink.runtime.checkpoint.CheckpointCoordinator) CompletionException(java.util.concurrent.CompletionException) JobID(org.apache.flink.api.common.JobID)

Example 57 with CompletionException

use of java.util.concurrent.CompletionException in project flink by apache.

the class SchedulerBase method triggerSavepoint.

@Override
public CompletableFuture<String> triggerSavepoint(final String targetDirectory, final boolean cancelJob, final SavepointFormatType formatType) {
    mainThreadExecutor.assertRunningInMainThread();
    final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator();
    StopWithSavepointTerminationManager.checkSavepointActionPreconditions(checkpointCoordinator, targetDirectory, getJobId(), log);
    log.info("Triggering {}savepoint for job {}.", cancelJob ? "cancel-with-" : "", jobGraph.getJobID());
    if (cancelJob) {
        stopCheckpointScheduler();
    }
    return checkpointCoordinator.triggerSavepoint(targetDirectory, formatType).thenApply(CompletedCheckpoint::getExternalPointer).handleAsync((path, throwable) -> {
        if (throwable != null) {
            if (cancelJob) {
                startCheckpointScheduler();
            }
            throw new CompletionException(throwable);
        } else if (cancelJob) {
            log.info("Savepoint stored in {}. Now cancelling {}.", path, jobGraph.getJobID());
            cancel();
        }
        return path;
    }, mainThreadExecutor);
}
Also used : CheckpointCoordinator(org.apache.flink.runtime.checkpoint.CheckpointCoordinator) CompletionException(java.util.concurrent.CompletionException)

Example 58 with CompletionException

use of java.util.concurrent.CompletionException in project flink by apache.

the class StateWithExecutionGraph method triggerCheckpoint.

CompletableFuture<String> triggerCheckpoint() {
    final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator();
    final JobID jobID = executionGraph.getJobID();
    if (checkpointCoordinator == null) {
        throw new IllegalStateException(String.format("Job %s is not a streaming job.", jobID));
    }
    logger.info("Triggering a checkpoint for job {}.", jobID);
    return checkpointCoordinator.triggerCheckpoint(false).thenApply(CompletedCheckpoint::getExternalPointer).handleAsync((path, throwable) -> {
        if (throwable != null) {
            throw new CompletionException(throwable);
        }
        return path;
    }, context.getMainThreadExecutor());
}
Also used : CheckpointCoordinator(org.apache.flink.runtime.checkpoint.CheckpointCoordinator) CompletionException(java.util.concurrent.CompletionException) JobID(org.apache.flink.api.common.JobID)

Example 59 with CompletionException

use of java.util.concurrent.CompletionException in project flink by apache.

the class ParameterToolTest method testConcurrentExecutionConfigSerialization.

/**
 * Tests that we can concurrently serialize and access the ParameterTool. See FLINK-7943
 */
@Test
public void testConcurrentExecutionConfigSerialization() throws ExecutionException, InterruptedException {
    final int numInputs = 10;
    Collection<String> input = new ArrayList<>(numInputs);
    for (int i = 0; i < numInputs; i++) {
        input.add("--" + UUID.randomUUID());
        input.add(UUID.randomUUID().toString());
    }
    final String[] args = input.toArray(new String[0]);
    final ParameterTool parameterTool = (ParameterTool) createParameterToolFromArgs(args);
    final int numThreads = 5;
    final int numSerializations = 100;
    final Collection<CompletableFuture<Void>> futures = new ArrayList<>(numSerializations);
    final ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
    try {
        for (int i = 0; i < numSerializations; i++) {
            futures.add(CompletableFuture.runAsync(() -> {
                try {
                    serializeDeserialize(parameterTool);
                } catch (Exception e) {
                    throw new CompletionException(e);
                }
            }, executorService));
        }
        for (CompletableFuture<Void> future : futures) {
            future.get();
        }
    } finally {
        executorService.shutdownNow();
        executorService.awaitTermination(1000L, TimeUnit.MILLISECONDS);
    }
}
Also used : ArrayList(java.util.ArrayList) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) ExecutionException(java.util.concurrent.ExecutionException) CompletableFuture(java.util.concurrent.CompletableFuture) CompletionException(java.util.concurrent.CompletionException) ExecutorService(java.util.concurrent.ExecutorService) Test(org.junit.Test)

Example 60 with CompletionException

use of java.util.concurrent.CompletionException in project flink by apache.

the class KubernetesStateHandleStore method releaseAndTryRemoveAll.

/**
 * Remove all the state handle keys in the ConfigMap and discard the states.
 *
 * @throws Exception when removing the keys or discarding the state failed
 */
@Override
public void releaseAndTryRemoveAll() throws Exception {
    final List<RetrievableStateHandle<T>> validStateHandles = new ArrayList<>();
    kubeClient.checkAndUpdateConfigMap(configMapName, c -> {
        if (isValidOperation(c)) {
            final Map<String, String> updateData = new HashMap<>(c.getData());
            c.getData().entrySet().stream().filter(entry -> configMapKeyFilter.test(entry.getKey())).forEach(entry -> {
                try {
                    validStateHandles.add(deserializeObject(entry.getValue()));
                    updateData.remove(entry.getKey());
                } catch (IOException e) {
                    LOG.warn("ConfigMap {} contained corrupted data. Ignoring the key {}.", configMapName, entry.getKey());
                }
            });
            c.getData().clear();
            c.getData().putAll(updateData);
            return Optional.of(c);
        }
        return Optional.empty();
    }).whenComplete((succeed, ignore) -> {
        if (succeed) {
            Exception exception = null;
            for (RetrievableStateHandle<T> stateHandle : validStateHandles) {
                try {
                    stateHandle.discardState();
                } catch (Exception e) {
                    exception = ExceptionUtils.firstOrSuppressed(e, exception);
                }
            }
            if (exception != null) {
                throw new CompletionException(new KubernetesException("Could not properly remove all state handles.", exception));
            }
        }
    }).get();
}
Also used : Tuple2(org.apache.flink.api.java.tuple.Tuple2) LoggerFactory(org.slf4j.LoggerFactory) StateHandleStoreUtils.deserialize(org.apache.flink.runtime.util.StateHandleStoreUtils.deserialize) ExceptionUtils(org.apache.flink.util.ExceptionUtils) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) StringResourceVersion(org.apache.flink.runtime.persistence.StringResourceVersion) KubernetesConfigMap(org.apache.flink.kubernetes.kubeclient.resources.KubernetesConfigMap) ArrayList(java.util.ArrayList) Map(java.util.Map) StateHandleStore(org.apache.flink.runtime.persistence.StateHandleStore) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) Nullable(javax.annotation.Nullable) RetrievableStateHandle(org.apache.flink.runtime.state.RetrievableStateHandle) Logger(org.slf4j.Logger) Predicate(java.util.function.Predicate) Collection(java.util.Collection) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) Collectors(java.util.stream.Collectors) Serializable(java.io.Serializable) Base64(java.util.Base64) List(java.util.List) StateHandleStoreUtils.serializeOrDiscard(org.apache.flink.runtime.util.StateHandleStoreUtils.serializeOrDiscard) KubernetesException(org.apache.flink.kubernetes.kubeclient.resources.KubernetesException) Optional(java.util.Optional) PossibleInconsistentStateException(org.apache.flink.runtime.persistence.PossibleInconsistentStateException) RetrievableStateStorageHelper(org.apache.flink.runtime.persistence.RetrievableStateStorageHelper) KubernetesLeaderElector(org.apache.flink.kubernetes.kubeclient.resources.KubernetesLeaderElector) Collections(java.util.Collections) FlinkKubeClient(org.apache.flink.kubernetes.kubeclient.FlinkKubeClient) HashMap(java.util.HashMap) RetrievableStateHandle(org.apache.flink.runtime.state.RetrievableStateHandle) CompletionException(java.util.concurrent.CompletionException) ArrayList(java.util.ArrayList) IOException(java.io.IOException) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) KubernetesException(org.apache.flink.kubernetes.kubeclient.resources.KubernetesException) PossibleInconsistentStateException(org.apache.flink.runtime.persistence.PossibleInconsistentStateException) KubernetesException(org.apache.flink.kubernetes.kubeclient.resources.KubernetesException)

Aggregations

CompletionException (java.util.concurrent.CompletionException)199 Test (org.junit.Test)80 CompletableFuture (java.util.concurrent.CompletableFuture)62 List (java.util.List)52 ArrayList (java.util.ArrayList)51 IOException (java.io.IOException)45 Map (java.util.Map)39 Collection (java.util.Collection)31 ExecutionException (java.util.concurrent.ExecutionException)31 HashMap (java.util.HashMap)30 Collections (java.util.Collections)24 TimeUnit (java.util.concurrent.TimeUnit)22 Collectors (java.util.stream.Collectors)22 FlinkException (org.apache.flink.util.FlinkException)22 Before (org.junit.Before)21 Duration (java.time.Duration)19 Arrays (java.util.Arrays)19 BeforeClass (org.junit.BeforeClass)19 ExecutorService (java.util.concurrent.ExecutorService)18 Nonnull (javax.annotation.Nonnull)17