Search in sources :

Example 1 with LValueConstraintExpression

use of org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression in project asterixdb by apache.

the class FeedOperations method combineIntakeCollectJobs.

private static JobSpecification combineIntakeCollectJobs(MetadataProvider metadataProvider, Feed feed, JobSpecification intakeJob, List<JobSpecification> jobsList, List<FeedConnection> feedConnections, String[] intakeLocations) throws AlgebricksException, HyracksDataException {
    JobSpecification jobSpec = new JobSpecification(intakeJob.getFrameSize());
    // copy ingestor
    FeedIntakeOperatorDescriptor firstOp = (FeedIntakeOperatorDescriptor) intakeJob.getOperatorMap().get(new OperatorDescriptorId(0));
    FeedIntakeOperatorDescriptor ingestionOp;
    if (firstOp.getAdaptorFactory() == null) {
        ingestionOp = new FeedIntakeOperatorDescriptor(jobSpec, feed, firstOp.getAdaptorLibraryName(), firstOp.getAdaptorFactoryClassName(), firstOp.getAdapterOutputType(), firstOp.getPolicyAccessor(), firstOp.getOutputRecordDescriptors()[0]);
    } else {
        ingestionOp = new FeedIntakeOperatorDescriptor(jobSpec, feed, firstOp.getAdaptorFactory(), firstOp.getAdapterOutputType(), firstOp.getPolicyAccessor(), firstOp.getOutputRecordDescriptors()[0]);
    }
    // create replicator
    ReplicateOperatorDescriptor replicateOp = new ReplicateOperatorDescriptor(jobSpec, ingestionOp.getOutputRecordDescriptors()[0], jobsList.size());
    jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), ingestionOp, 0, replicateOp, 0);
    PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, ingestionOp, intakeLocations);
    PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, replicateOp, intakeLocations);
    // Loop over the jobs to copy operators and connections
    Map<OperatorDescriptorId, OperatorDescriptorId> operatorIdMapping = new HashMap<>();
    Map<ConnectorDescriptorId, ConnectorDescriptorId> connectorIdMapping = new HashMap<>();
    Map<OperatorDescriptorId, List<LocationConstraint>> operatorLocations = new HashMap<>();
    Map<OperatorDescriptorId, Integer> operatorCounts = new HashMap<>();
    List<JobId> jobIds = new ArrayList<>();
    FeedMetaOperatorDescriptor metaOp;
    for (int iter1 = 0; iter1 < jobsList.size(); iter1++) {
        FeedConnection curFeedConnection = feedConnections.get(iter1);
        JobSpecification subJob = jobsList.get(iter1);
        operatorIdMapping.clear();
        Map<OperatorDescriptorId, IOperatorDescriptor> operatorsMap = subJob.getOperatorMap();
        String datasetName = feedConnections.get(iter1).getDatasetName();
        FeedConnectionId feedConnectionId = new FeedConnectionId(ingestionOp.getEntityId(), datasetName);
        FeedPolicyEntity feedPolicyEntity = FeedMetadataUtil.validateIfPolicyExists(curFeedConnection.getDataverseName(), curFeedConnection.getPolicyName(), metadataProvider.getMetadataTxnContext());
        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operatorsMap.entrySet()) {
            IOperatorDescriptor opDesc = entry.getValue();
            OperatorDescriptorId oldId = opDesc.getOperatorId();
            OperatorDescriptorId opId = null;
            if (opDesc instanceof LSMTreeInsertDeleteOperatorDescriptor && ((LSMTreeInsertDeleteOperatorDescriptor) opDesc).isPrimary()) {
                metaOp = new FeedMetaOperatorDescriptor(jobSpec, feedConnectionId, opDesc, feedPolicyEntity.getProperties(), FeedRuntimeType.STORE);
                opId = metaOp.getOperatorId();
                opDesc.setOperatorId(opId);
            } else {
                if (opDesc instanceof AlgebricksMetaOperatorDescriptor) {
                    AlgebricksMetaOperatorDescriptor algOp = (AlgebricksMetaOperatorDescriptor) opDesc;
                    IPushRuntimeFactory[] runtimeFactories = algOp.getPipeline().getRuntimeFactories();
                    // Tweak AssignOp to work with messages
                    if (runtimeFactories[0] instanceof AssignRuntimeFactory && runtimeFactories.length > 1) {
                        IConnectorDescriptor connectorDesc = subJob.getOperatorInputMap().get(opDesc.getOperatorId()).get(0);
                        // anything on the network interface needs to be message compatible
                        if (connectorDesc instanceof MToNPartitioningConnectorDescriptor) {
                            metaOp = new FeedMetaOperatorDescriptor(jobSpec, feedConnectionId, opDesc, feedPolicyEntity.getProperties(), FeedRuntimeType.COMPUTE);
                            opId = metaOp.getOperatorId();
                            opDesc.setOperatorId(opId);
                        }
                    }
                }
                if (opId == null) {
                    opId = jobSpec.createOperatorDescriptorId(opDesc);
                }
            }
            operatorIdMapping.put(oldId, opId);
        }
        // copy connectors
        connectorIdMapping.clear();
        for (Entry<ConnectorDescriptorId, IConnectorDescriptor> entry : subJob.getConnectorMap().entrySet()) {
            IConnectorDescriptor connDesc = entry.getValue();
            ConnectorDescriptorId newConnId;
            if (connDesc instanceof MToNPartitioningConnectorDescriptor) {
                MToNPartitioningConnectorDescriptor m2nConn = (MToNPartitioningConnectorDescriptor) connDesc;
                connDesc = new MToNPartitioningWithMessageConnectorDescriptor(jobSpec, m2nConn.getTuplePartitionComputerFactory());
                newConnId = connDesc.getConnectorId();
            } else {
                newConnId = jobSpec.createConnectorDescriptor(connDesc);
            }
            connectorIdMapping.put(entry.getKey(), newConnId);
        }
        // make connections between operators
        for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : subJob.getConnectorOperatorMap().entrySet()) {
            ConnectorDescriptorId newId = connectorIdMapping.get(entry.getKey());
            IConnectorDescriptor connDesc = jobSpec.getConnectorMap().get(newId);
            Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
            Pair<IOperatorDescriptor, Integer> rightOp = entry.getValue().getRight();
            IOperatorDescriptor leftOpDesc = jobSpec.getOperatorMap().get(leftOp.getLeft().getOperatorId());
            IOperatorDescriptor rightOpDesc = jobSpec.getOperatorMap().get(rightOp.getLeft().getOperatorId());
            if (leftOp.getLeft() instanceof FeedCollectOperatorDescriptor) {
                jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), replicateOp, iter1, leftOpDesc, leftOp.getRight());
            }
            jobSpec.connect(connDesc, leftOpDesc, leftOp.getRight(), rightOpDesc, rightOp.getRight());
        }
        // prepare for setting partition constraints
        operatorLocations.clear();
        operatorCounts.clear();
        for (Constraint constraint : subJob.getUserConstraints()) {
            LValueConstraintExpression lexpr = constraint.getLValue();
            ConstraintExpression cexpr = constraint.getRValue();
            OperatorDescriptorId opId;
            switch(lexpr.getTag()) {
                case PARTITION_COUNT:
                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
                    operatorCounts.put(operatorIdMapping.get(opId), (int) ((ConstantExpression) cexpr).getValue());
                    break;
                case PARTITION_LOCATION:
                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
                    IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(operatorIdMapping.get(opId));
                    List<LocationConstraint> locations = operatorLocations.get(opDesc.getOperatorId());
                    if (locations == null) {
                        locations = new ArrayList<>();
                        operatorLocations.put(opDesc.getOperatorId(), locations);
                    }
                    String location = (String) ((ConstantExpression) cexpr).getValue();
                    LocationConstraint lc = new LocationConstraint(location, ((PartitionLocationExpression) lexpr).getPartition());
                    locations.add(lc);
                    break;
                default:
                    break;
            }
        }
        // set absolute location constraints
        for (Entry<OperatorDescriptorId, List<LocationConstraint>> entry : operatorLocations.entrySet()) {
            IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(entry.getKey());
            // why do we need to sort?
            Collections.sort(entry.getValue(), (LocationConstraint o1, LocationConstraint o2) -> {
                return o1.partition - o2.partition;
            });
            String[] locations = new String[entry.getValue().size()];
            for (int j = 0; j < locations.length; ++j) {
                locations[j] = entry.getValue().get(j).location;
            }
            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, opDesc, locations);
        }
        // set count constraints
        for (Entry<OperatorDescriptorId, Integer> entry : operatorCounts.entrySet()) {
            IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(entry.getKey());
            if (!operatorLocations.keySet().contains(entry.getKey())) {
                PartitionConstraintHelper.addPartitionCountConstraint(jobSpec, opDesc, entry.getValue());
            }
        }
        // roots
        for (OperatorDescriptorId root : subJob.getRoots()) {
            jobSpec.addRoot(jobSpec.getOperatorMap().get(operatorIdMapping.get(root)));
        }
        jobIds.add(((JobEventListenerFactory) subJob.getJobletEventListenerFactory()).getJobId());
    }
    // jobEventListenerFactory
    jobSpec.setJobletEventListenerFactory(new MultiTransactionJobletEventListenerFactory(jobIds, true));
    // useConnectorSchedulingPolicy
    jobSpec.setUseConnectorPolicyForScheduling(jobsList.get(0).isUseConnectorPolicyForScheduling());
    // connectorAssignmentPolicy
    jobSpec.setConnectorPolicyAssignmentPolicy(jobsList.get(0).getConnectorPolicyAssignmentPolicy());
    return jobSpec;
}
Also used : HashMap(java.util.HashMap) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) Constraint(org.apache.hyracks.api.constraints.Constraint) LocationConstraint(org.apache.asterix.metadata.feeds.LocationConstraint) ConstantExpression(org.apache.hyracks.api.constraints.expressions.ConstantExpression) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) ArrayList(java.util.ArrayList) OneToOneConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor) LocationConstraint(org.apache.asterix.metadata.feeds.LocationConstraint) List(java.util.List) ArrayList(java.util.ArrayList) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) ReplicateOperatorDescriptor(org.apache.hyracks.dataflow.std.misc.ReplicateOperatorDescriptor) Map(java.util.Map) HashMap(java.util.HashMap) FeedCollectOperatorDescriptor(org.apache.asterix.external.operators.FeedCollectOperatorDescriptor) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) JobSpecification(org.apache.hyracks.api.job.JobSpecification) FeedIntakeOperatorDescriptor(org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor) JobId(org.apache.asterix.common.transactions.JobId) Pair(org.apache.commons.lang3.tuple.Pair) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) MToNPartitioningWithMessageConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningWithMessageConnectorDescriptor) OperatorDescriptorId(org.apache.hyracks.api.dataflow.OperatorDescriptorId) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) ConstraintExpression(org.apache.hyracks.api.constraints.expressions.ConstraintExpression) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) MultiTransactionJobletEventListenerFactory(org.apache.asterix.runtime.job.listener.MultiTransactionJobletEventListenerFactory) MToNPartitioningConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) IPushRuntimeFactory(org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) Constraint(org.apache.hyracks.api.constraints.Constraint) LocationConstraint(org.apache.asterix.metadata.feeds.LocationConstraint) FeedMetaOperatorDescriptor(org.apache.asterix.external.operators.FeedMetaOperatorDescriptor) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) FeedConnectionId(org.apache.asterix.external.feed.management.FeedConnectionId) LSMTreeInsertDeleteOperatorDescriptor(org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor)

Example 2 with LValueConstraintExpression

use of org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression in project asterixdb by apache.

the class JobExecutor method assignLocation.

private String assignLocation(ActivityClusterGraph acg, Map<TaskId, LValueConstraintExpression> locationMap, TaskId tid, TaskAttempt taskAttempt) throws HyracksException {
    ActivityId aid = tid.getActivityId();
    ActivityCluster ac = acg.getActivityMap().get(aid);
    Set<ActivityId> blockers = ac.getBlocked2BlockerMap().get(aid);
    String nodeId = null;
    if (blockers != null) {
        for (ActivityId blocker : blockers) {
            nodeId = findTaskLocation(new TaskId(blocker, tid.getPartition()));
            if (nodeId != null) {
                break;
            }
        }
    }
    INodeManager nodeManager = ccs.getNodeManager();
    Collection<String> liveNodes = nodeManager.getAllNodeIds();
    if (nodeId == null) {
        LValueConstraintExpression pLocationExpr = locationMap.get(tid);
        Object location = solver.getValue(pLocationExpr);
        if (location == null) {
            // pick any
            nodeId = liveNodes.toArray(new String[liveNodes.size()])[random.nextInt(1) % liveNodes.size()];
        } else if (location instanceof String) {
            nodeId = (String) location;
        } else if (location instanceof String[]) {
            for (String choice : (String[]) location) {
                if (liveNodes.contains(choice)) {
                    nodeId = choice;
                    break;
                }
            }
            if (nodeId == null) {
                throw new HyracksException("No satisfiable location found for " + taskAttempt.getTaskAttemptId());
            }
        } else {
            throw new HyracksException("Unknown type of value for " + pLocationExpr + ": " + location + "(" + location.getClass() + ")");
        }
    }
    if (nodeId == null) {
        throw new HyracksException("No satisfiable location found for " + taskAttempt.getTaskAttemptId());
    }
    if (!liveNodes.contains(nodeId)) {
        throw new HyracksException("Node " + nodeId + " not live");
    }
    return nodeId;
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) TaskId(org.apache.hyracks.api.dataflow.TaskId) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ActivityCluster(org.apache.hyracks.api.job.ActivityCluster)

Example 3 with LValueConstraintExpression

use of org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression in project asterixdb by apache.

the class PartitionConstraintSolver method solveLValue.

private Solution solveLValue(LValueConstraintExpression lv, Set<LValueConstraintExpression> inProcess) {
    if (inProcess.contains(lv)) {
        return new Solution(null, Solution.Status.CYCLE);
    }
    Solution result = null;
    inProcess.add(lv);
    Set<ConstraintExpression> rValues = constraints.get(lv);
    if (rValues == null) {
        return new Solution(null, Solution.Status.NOT_BOUND);
    }
    for (ConstraintExpression ce : rValues) {
        Solution solution = solve(ce, inProcess);
        if (solution != null && solution.status == Solution.Status.FOUND) {
            result = solution;
            break;
        }
    }
    if (result != null) {
        rValues.clear();
        rValues.add(new ConstantExpression(result.value));
    }
    inProcess.remove(lv);
    return result;
}
Also used : ConstraintExpression(org.apache.hyracks.api.constraints.expressions.ConstraintExpression) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) ConstantExpression(org.apache.hyracks.api.constraints.expressions.ConstantExpression)

Example 4 with LValueConstraintExpression

use of org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression in project asterixdb by apache.

the class ActivityClusterPlanner method computePartitionCounts.

private Map<ActivityId, ActivityPartitionDetails> computePartitionCounts(ActivityCluster ac) throws HyracksException {
    PartitionConstraintSolver solver = executor.getSolver();
    Set<LValueConstraintExpression> lValues = new HashSet<>();
    for (ActivityId anId : ac.getActivityMap().keySet()) {
        lValues.add(new PartitionCountExpression(anId.getOperatorDescriptorId()));
    }
    solver.solve(lValues);
    Map<OperatorDescriptorId, Integer> nPartMap = new HashMap<>();
    for (LValueConstraintExpression lv : lValues) {
        Object value = solver.getValue(lv);
        if (value == null) {
            throw new HyracksException("No value found for " + lv);
        }
        if (!(value instanceof Number)) {
            throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "(" + value + ")");
        }
        int nParts = ((Number) value).intValue();
        if (nParts <= 0) {
            throw new HyracksException("Unsatisfiable number of partitions for " + lv + ": " + nParts);
        }
        nPartMap.put(((PartitionCountExpression) lv).getOperatorDescriptorId(), nParts);
    }
    Map<ActivityId, ActivityPartitionDetails> activityPartsMap = new HashMap<>();
    for (ActivityId anId : ac.getActivityMap().keySet()) {
        int nParts = nPartMap.get(anId.getOperatorDescriptorId());
        int[] nInputPartitions = null;
        List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(anId);
        if (inputs != null) {
            nInputPartitions = new int[inputs.size()];
            for (int i = 0; i < nInputPartitions.length; ++i) {
                ConnectorDescriptorId cdId = inputs.get(i).getConnectorId();
                ActivityId aid = ac.getProducerActivity(cdId);
                Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
                nInputPartitions[i] = nPartInt;
            }
        }
        int[] nOutputPartitions = null;
        List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(anId);
        if (outputs != null) {
            nOutputPartitions = new int[outputs.size()];
            for (int i = 0; i < nOutputPartitions.length; ++i) {
                ConnectorDescriptorId cdId = outputs.get(i).getConnectorId();
                ActivityId aid = ac.getConsumerActivity(cdId);
                Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
                nOutputPartitions[i] = nPartInt;
            }
        }
        ActivityPartitionDetails apd = new ActivityPartitionDetails(nParts, nInputPartitions, nOutputPartitions);
        activityPartsMap.put(anId, apd);
    }
    return activityPartsMap;
}
Also used : IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) OperatorDescriptorId(org.apache.hyracks.api.dataflow.OperatorDescriptorId) HashMap(java.util.HashMap) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) PartitionCountExpression(org.apache.hyracks.api.constraints.expressions.PartitionCountExpression) HashSet(java.util.HashSet)

Example 5 with LValueConstraintExpression

use of org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression in project asterixdb by apache.

the class JobExecutor method assignTaskLocations.

private void assignTaskLocations(TaskCluster tc, Map<String, List<TaskAttemptDescriptor>> taskAttemptMap) throws HyracksException {
    ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
    Task[] tasks = tc.getTasks();
    List<TaskClusterAttempt> tcAttempts = tc.getAttempts();
    int attempts = tcAttempts.size();
    TaskClusterAttempt tcAttempt = new TaskClusterAttempt(tc, attempts);
    Map<TaskId, TaskAttempt> taskAttempts = new HashMap<>();
    Map<TaskId, LValueConstraintExpression> locationMap = new HashMap<>();
    for (int i = 0; i < tasks.length; ++i) {
        Task ts = tasks[i];
        TaskId tid = ts.getTaskId();
        TaskAttempt taskAttempt = new TaskAttempt(tcAttempt, new TaskAttemptId(new TaskId(tid.getActivityId(), tid.getPartition()), attempts), ts);
        taskAttempt.setStatus(TaskAttempt.TaskStatus.INITIALIZED, null);
        locationMap.put(tid, new PartitionLocationExpression(tid.getActivityId().getOperatorDescriptorId(), tid.getPartition()));
        taskAttempts.put(tid, taskAttempt);
    }
    tcAttempt.setTaskAttempts(taskAttempts);
    solver.solve(locationMap.values());
    for (int i = 0; i < tasks.length; ++i) {
        Task ts = tasks[i];
        TaskId tid = ts.getTaskId();
        TaskAttempt taskAttempt = taskAttempts.get(tid);
        String nodeId = assignLocation(acg, locationMap, tid, taskAttempt);
        taskAttempt.setNodeId(nodeId);
        taskAttempt.setStatus(TaskAttempt.TaskStatus.RUNNING, null);
        taskAttempt.setStartTime(System.currentTimeMillis());
        List<TaskAttemptDescriptor> tads = taskAttemptMap.get(nodeId);
        if (tads == null) {
            tads = new ArrayList<>();
            taskAttemptMap.put(nodeId, tads);
        }
        OperatorDescriptorId opId = tid.getActivityId().getOperatorDescriptorId();
        jobRun.registerOperatorLocation(opId, tid.getPartition(), nodeId);
        ActivityPartitionDetails apd = ts.getActivityPlan().getActivityPartitionDetails();
        TaskAttemptDescriptor tad = new TaskAttemptDescriptor(taskAttempt.getTaskAttemptId(), apd.getPartitionCount(), apd.getInputPartitionCounts(), apd.getOutputPartitionCounts());
        tads.add(tad);
    }
    tcAttempt.initializePendingTaskCounter();
    tcAttempts.add(tcAttempt);
    /**
         * Improvement for reducing master/slave message communications, for each TaskAttemptDescriptor,
         * we set the NetworkAddress[][] partitionLocations, in which each row is for an incoming connector descriptor
         * and each column is for an input channel of the connector.
         */
    INodeManager nodeManager = ccs.getNodeManager();
    for (Map.Entry<String, List<TaskAttemptDescriptor>> e : taskAttemptMap.entrySet()) {
        List<TaskAttemptDescriptor> tads = e.getValue();
        for (TaskAttemptDescriptor tad : tads) {
            TaskAttemptId taid = tad.getTaskAttemptId();
            int attempt = taid.getAttempt();
            TaskId tid = taid.getTaskId();
            ActivityId aid = tid.getActivityId();
            List<IConnectorDescriptor> inConnectors = acg.getActivityInputs(aid);
            int[] inPartitionCounts = tad.getInputPartitionCounts();
            if (inPartitionCounts == null) {
                continue;
            }
            NetworkAddress[][] partitionLocations = new NetworkAddress[inPartitionCounts.length][];
            for (int i = 0; i < inPartitionCounts.length; ++i) {
                ConnectorDescriptorId cdId = inConnectors.get(i).getConnectorId();
                IConnectorPolicy policy = jobRun.getConnectorPolicyMap().get(cdId);
                /**
                     * carry sender location information into a task
                     * when it is not the case that it is an re-attempt and the send-side
                     * is materialized blocking.
                     */
                if (attempt > 0 && policy.materializeOnSendSide() && policy.consumerWaitsForProducerToFinish()) {
                    continue;
                }
                ActivityId producerAid = acg.getProducerActivity(cdId);
                partitionLocations[i] = new NetworkAddress[inPartitionCounts[i]];
                for (int j = 0; j < inPartitionCounts[i]; ++j) {
                    TaskId producerTaskId = new TaskId(producerAid, j);
                    String nodeId = findTaskLocation(producerTaskId);
                    partitionLocations[i][j] = nodeManager.getNodeControllerState(nodeId).getDataPort();
                }
            }
            tad.setInputPartitionLocations(partitionLocations);
        }
    }
    tcAttempt.setStatus(TaskClusterAttempt.TaskClusterStatus.RUNNING);
    tcAttempt.setStartTime(System.currentTimeMillis());
    inProgressTaskClusters.add(tc);
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) Task(org.apache.hyracks.control.cc.job.Task) TaskId(org.apache.hyracks.api.dataflow.TaskId) TaskClusterAttempt(org.apache.hyracks.control.cc.job.TaskClusterAttempt) HashMap(java.util.HashMap) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) ArrayList(java.util.ArrayList) List(java.util.List) TaskAttempt(org.apache.hyracks.control.cc.job.TaskAttempt) PartitionLocationExpression(org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) OperatorDescriptorId(org.apache.hyracks.api.dataflow.OperatorDescriptorId) TaskAttemptId(org.apache.hyracks.api.dataflow.TaskAttemptId) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) Constraint(org.apache.hyracks.api.constraints.Constraint) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) TaskAttemptDescriptor(org.apache.hyracks.control.common.job.TaskAttemptDescriptor) ActivityClusterGraph(org.apache.hyracks.api.job.ActivityClusterGraph) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

LValueConstraintExpression (org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression)5 HashMap (java.util.HashMap)3 ActivityId (org.apache.hyracks.api.dataflow.ActivityId)3 ConnectorDescriptorId (org.apache.hyracks.api.dataflow.ConnectorDescriptorId)3 IConnectorDescriptor (org.apache.hyracks.api.dataflow.IConnectorDescriptor)3 OperatorDescriptorId (org.apache.hyracks.api.dataflow.OperatorDescriptorId)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 Map (java.util.Map)2 Constraint (org.apache.hyracks.api.constraints.Constraint)2 ConstantExpression (org.apache.hyracks.api.constraints.expressions.ConstantExpression)2 ConstraintExpression (org.apache.hyracks.api.constraints.expressions.ConstraintExpression)2 TaskId (org.apache.hyracks.api.dataflow.TaskId)2 HyracksException (org.apache.hyracks.api.exceptions.HyracksException)2 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)2 HashSet (java.util.HashSet)1 LSMTreeInsertDeleteOperatorDescriptor (org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor)1 JobId (org.apache.asterix.common.transactions.JobId)1 FeedConnectionId (org.apache.asterix.external.feed.management.FeedConnectionId)1 FeedCollectOperatorDescriptor (org.apache.asterix.external.operators.FeedCollectOperatorDescriptor)1