Search in sources :

Example 6 with NodeOperation

use of io.crate.operation.NodeOperation in project crate by crate.

the class ExecutionPhasesTask method setupContext.

private void setupContext(Map<String, Collection<NodeOperation>> operationByServer, List<ExecutionPhase> handlerPhases, List<BatchConsumer> handlerConsumers) throws Throwable {
    assert handlerPhases.size() == handlerConsumers.size() : "handlerPhases size must match handlerConsumers size";
    String localNodeId = clusterService.localNode().getId();
    Collection<NodeOperation> localNodeOperations = operationByServer.remove(localNodeId);
    if (localNodeOperations == null) {
        localNodeOperations = Collections.emptyList();
    }
    // + 1 for localJobContext which is always created
    InitializationTracker initializationTracker = new InitializationTracker(operationByServer.size() + 1);
    List<Tuple<ExecutionPhase, BatchConsumer>> handlerPhaseAndReceiver = createHandlerPhaseAndReceivers(handlerPhases, handlerConsumers, initializationTracker);
    JobExecutionContext.Builder builder = jobContextService.newBuilder(jobId(), localNodeId, operationByServer.keySet());
    List<CompletableFuture<Bucket>> directResponseFutures = contextPreparer.prepareOnHandler(localNodeOperations, builder, handlerPhaseAndReceiver, new SharedShardContexts(indicesService));
    JobExecutionContext localJobContext = jobContextService.createContext(builder);
    List<PageBucketReceiver> pageBucketReceivers = getHandlerBucketReceivers(localJobContext, handlerPhaseAndReceiver);
    int bucketIdx = 0;
    /*
         * If you touch anything here make sure the following tests pass with > 1k iterations:
         *
         * Seed: 112E1807417E925A - testInvalidPatternSyntax
         * Seed: Any              - testRegularSelectWithFewAvailableThreadsShouldNeverGetStuck
         * Seed: CC456FF5004F35D3 - testFailureOfJoinDownstream
         */
    if (!localNodeOperations.isEmpty() && !directResponseFutures.isEmpty()) {
        CompletableFutures.allAsList(directResponseFutures).whenComplete(new SetBucketCallback(pageBucketReceivers, bucketIdx, initializationTracker));
        bucketIdx++;
        try {
            // initializationTracker for localNodeOperations is triggered via SetBucketCallback
            localJobContext.start();
        } catch (Throwable t) {
            accountFailureForRemoteOperations(operationByServer, initializationTracker, handlerPhaseAndReceiver, t);
            return;
        }
    } else {
        try {
            localJobContext.start();
            initializationTracker.jobInitialized();
        } catch (Throwable t) {
            initializationTracker.jobInitializationFailed(t);
            accountFailureForRemoteOperations(operationByServer, initializationTracker, handlerPhaseAndReceiver, t);
            return;
        }
    }
    sendJobRequests(localNodeId, operationByServer, pageBucketReceivers, handlerPhaseAndReceiver, bucketIdx, initializationTracker);
}
Also used : NodeOperation(io.crate.operation.NodeOperation) CompletableFuture(java.util.concurrent.CompletableFuture) SharedShardContexts(io.crate.action.job.SharedShardContexts) Tuple(org.elasticsearch.common.collect.Tuple)

Example 7 with NodeOperation

use of io.crate.operation.NodeOperation in project crate by crate.

the class ExecutionPhasesTask method executeBulk.

@Override
public List<CompletableFuture<Long>> executeBulk() {
    FluentIterable<NodeOperation> nodeOperations = FluentIterable.from(nodeOperationTrees).transformAndConcat(new Function<NodeOperationTree, Iterable<? extends NodeOperation>>() {

        @Nullable
        @Override
        public Iterable<? extends NodeOperation> apply(NodeOperationTree input) {
            return input.nodeOperations();
        }
    });
    Map<String, Collection<NodeOperation>> operationByServer = NodeOperationGrouper.groupByServer(nodeOperations);
    List<ExecutionPhase> handlerPhases = new ArrayList<>(nodeOperationTrees.size());
    List<BatchConsumer> handlerConsumers = new ArrayList<>(nodeOperationTrees.size());
    List<CompletableFuture<Long>> results = new ArrayList<>(nodeOperationTrees.size());
    for (NodeOperationTree nodeOperationTree : nodeOperationTrees) {
        CollectingBatchConsumer<?, Long> consumer = new CollectingBatchConsumer<>(Collectors.collectingAndThen(Collectors.summingLong(r -> ((long) r.get(0))), sum -> sum));
        handlerConsumers.add(consumer);
        results.add(consumer.resultFuture());
        handlerPhases.add(nodeOperationTree.leaf());
    }
    try {
        setupContext(operationByServer, handlerPhases, handlerConsumers);
    } catch (Throwable throwable) {
        return Collections.singletonList(CompletableFutures.failedFuture(throwable));
    }
    return results;
}
Also used : java.util(java.util) SharedShardContexts(io.crate.action.job.SharedShardContexts) ExecutionPhase(io.crate.planner.node.ExecutionPhase) CompletableFuture(java.util.concurrent.CompletableFuture) TransportKillJobsNodeAction(io.crate.executor.transport.kill.TransportKillJobsNodeAction) ContextPreparer(io.crate.action.job.ContextPreparer) FluentIterable(com.google.common.collect.FluentIterable) BatchConsumer(io.crate.data.BatchConsumer) ClusterService(org.elasticsearch.cluster.ClusterService) IndicesService(org.elasticsearch.indices.IndicesService) io.crate.jobs(io.crate.jobs) ESLogger(org.elasticsearch.common.logging.ESLogger) Nullable(javax.annotation.Nullable) NodeOperation(io.crate.operation.NodeOperation) Loggers(org.elasticsearch.common.logging.Loggers) TransportJobAction(io.crate.action.job.TransportJobAction) Bucket(io.crate.data.Bucket) Function(com.google.common.base.Function) CompletableFutures(io.crate.concurrent.CompletableFutures) Collectors(java.util.stream.Collectors) JobRequest(io.crate.action.job.JobRequest) NodeOperationGrouper(io.crate.planner.node.NodeOperationGrouper) Row(io.crate.data.Row) CollectingBatchConsumer(io.crate.data.CollectingBatchConsumer) ExecutionPhases(io.crate.planner.node.ExecutionPhases) NodeOperationTree(io.crate.operation.NodeOperationTree) Tuple(org.elasticsearch.common.collect.Tuple) JobTask(io.crate.executor.JobTask) FluentIterable(com.google.common.collect.FluentIterable) NodeOperation(io.crate.operation.NodeOperation) ExecutionPhase(io.crate.planner.node.ExecutionPhase) NodeOperationTree(io.crate.operation.NodeOperationTree) CompletableFuture(java.util.concurrent.CompletableFuture) Nullable(javax.annotation.Nullable) BatchConsumer(io.crate.data.BatchConsumer) CollectingBatchConsumer(io.crate.data.CollectingBatchConsumer) CollectingBatchConsumer(io.crate.data.CollectingBatchConsumer)

Aggregations

NodeOperation (io.crate.operation.NodeOperation)7 SharedShardContexts (io.crate.action.job.SharedShardContexts)3 ExecutionPhase (io.crate.planner.node.ExecutionPhase)3 CompletableFuture (java.util.concurrent.CompletableFuture)3 ImmutableList (com.google.common.collect.ImmutableList)2 ContextPreparer (io.crate.action.job.ContextPreparer)2 Routing (io.crate.metadata.Routing)2 MergePhase (io.crate.planner.node.dql.MergePhase)2 RoutedCollectPhase (io.crate.planner.node.dql.RoutedCollectPhase)2 Tuple (org.elasticsearch.common.collect.Tuple)2 IndicesService (org.elasticsearch.indices.IndicesService)2 Function (com.google.common.base.Function)1 FluentIterable (com.google.common.collect.FluentIterable)1 JobRequest (io.crate.action.job.JobRequest)1 TransportJobAction (io.crate.action.job.TransportJobAction)1 CompletableFutures (io.crate.concurrent.CompletableFutures)1 BatchConsumer (io.crate.data.BatchConsumer)1 Bucket (io.crate.data.Bucket)1 CollectingBatchConsumer (io.crate.data.CollectingBatchConsumer)1 Row (io.crate.data.Row)1