Search in sources :

Example 6 with ConnectorDescriptorId

use of org.apache.hyracks.api.dataflow.ConnectorDescriptorId in project asterixdb by apache.

the class JobBuilder method setPartitionConstraintsTopdown.

private void setPartitionConstraintsTopdown(OperatorDescriptorId opId, Map<IConnectorDescriptor, TargetConstraint> tgtConstraints, IOperatorDescriptor parentOp) {
    List<IConnectorDescriptor> opInputs = jobSpec.getOperatorInputMap().get(opId);
    AlgebricksPartitionConstraint opConstraint;
    IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(opId);
    if (opInputs != null) {
        for (IConnectorDescriptor conn : opInputs) {
            ConnectorDescriptorId cid = conn.getConnectorId();
            org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec.getConnectorOperatorMap().get(cid);
            IOperatorDescriptor src = p.getLeft().getLeft();
            TargetConstraint constraint = tgtConstraints.get(conn);
            if (constraint != null) {
                if (constraint == TargetConstraint.SAME_COUNT) {
                    opConstraint = partitionConstraintMap.get(opDesc);
                    if (partitionConstraintMap.get(src) == null) {
                        if (opConstraint != null) {
                            partitionConstraintMap.put(src, opConstraint);
                            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, src, opConstraint);
                        }
                    }
                }
            }
            // Post Order DFS
            setPartitionConstraintsTopdown(src.getOperatorId(), tgtConstraints, opDesc);
        }
    }
}
Also used : IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 7 with ConnectorDescriptorId

use of org.apache.hyracks.api.dataflow.ConnectorDescriptorId in project asterixdb by apache.

the class JobExecutor method findDoomedTaskClusters.

private boolean findDoomedTaskClusters(TaskCluster tc, Set<TaskCluster> doomedTaskClusters) {
    if (doomedTaskClusters.contains(tc)) {
        return true;
    }
    TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(tc);
    if (lastAttempt != null) {
        switch(lastAttempt.getStatus()) {
            case ABORTED:
            case FAILED:
            case COMPLETED:
                return false;
            default:
                break;
        }
    }
    Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicyMap = jobRun.getConnectorPolicyMap();
    PartitionMatchMaker pmm = jobRun.getPartitionMatchMaker();
    boolean doomed = false;
    for (TaskCluster depTC : tc.getDependencyTaskClusters()) {
        if (findDoomedTaskClusters(depTC, doomedTaskClusters)) {
            doomed = true;
        }
    }
    for (PartitionId pid : tc.getRequiredPartitions()) {
        ConnectorDescriptorId cdId = pid.getConnectorDescriptorId();
        IConnectorPolicy cPolicy = connectorPolicyMap.get(cdId);
        PartitionState maxState = pmm.getMaximumAvailableState(pid);
        if ((maxState == null || (cPolicy.consumerWaitsForProducerToFinish() && maxState != PartitionState.COMMITTED)) && findDoomedTaskClusters(partitionProducingTaskClusterMap.get(pid), doomedTaskClusters)) {
            doomed = true;
        }
    }
    if (doomed) {
        doomedTaskClusters.add(tc);
    }
    return doomed;
}
Also used : TaskClusterAttempt(org.apache.hyracks.control.cc.job.TaskClusterAttempt) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) TaskCluster(org.apache.hyracks.control.cc.job.TaskCluster) PartitionState(org.apache.hyracks.control.common.job.PartitionState) PartitionMatchMaker(org.apache.hyracks.control.cc.partitions.PartitionMatchMaker) PartitionId(org.apache.hyracks.api.partitions.PartitionId)

Example 8 with ConnectorDescriptorId

use of org.apache.hyracks.api.dataflow.ConnectorDescriptorId in project asterixdb by apache.

the class ActivityClusterPlanner method assignConnectorPolicy.

private void assignConnectorPolicy(ActivityCluster ac, Map<ActivityId, ActivityPlan> taskMap) {
    Map<ConnectorDescriptorId, IConnectorPolicy> cPolicyMap = new HashMap<>();
    Set<ActivityId> activities = ac.getActivityMap().keySet();
    BitSet targetBitmap = new BitSet();
    for (ActivityId a1 : activities) {
        Task[] ac1TaskStates = taskMap.get(a1).getTasks();
        int nProducers = ac1TaskStates.length;
        List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(a1);
        if (outputConns == null) {
            continue;
        }
        for (IConnectorDescriptor c : outputConns) {
            ConnectorDescriptorId cdId = c.getConnectorId();
            ActivityId a2 = ac.getConsumerActivity(cdId);
            Task[] ac2TaskStates = taskMap.get(a2).getTasks();
            int nConsumers = ac2TaskStates.length;
            int[] fanouts = new int[nProducers];
            if (c.allProducersToAllConsumers()) {
                for (int i = 0; i < nProducers; ++i) {
                    fanouts[i] = nConsumers;
                }
            } else {
                for (int i = 0; i < nProducers; ++i) {
                    c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
                    fanouts[i] = targetBitmap.cardinality();
                }
            }
            IConnectorPolicy cp = assignConnectorPolicy(ac, c, nProducers, nConsumers, fanouts);
            cPolicyMap.put(cdId, cp);
        }
    }
    executor.getJobRun().getConnectorPolicyMap().putAll(cPolicyMap);
}
Also used : IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) Task(org.apache.hyracks.control.cc.job.Task) HashMap(java.util.HashMap) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) BitSet(java.util.BitSet)

Example 9 with ConnectorDescriptorId

use of org.apache.hyracks.api.dataflow.ConnectorDescriptorId in project asterixdb by apache.

the class JobBuilder method setPartitionConstraintsBottomup.

private void setPartitionConstraintsBottomup(OperatorDescriptorId opId, Map<IConnectorDescriptor, TargetConstraint> tgtConstraints, IOperatorDescriptor parentOp, boolean finalPass) {
    List<IConnectorDescriptor> opInputs = jobSpec.getOperatorInputMap().get(opId);
    AlgebricksPartitionConstraint opConstraint = null;
    IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(opId);
    if (opInputs != null) {
        for (IConnectorDescriptor conn : opInputs) {
            ConnectorDescriptorId cid = conn.getConnectorId();
            org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec.getConnectorOperatorMap().get(cid);
            IOperatorDescriptor src = p.getLeft().getLeft();
            // Pre-order DFS
            setPartitionConstraintsBottomup(src.getOperatorId(), tgtConstraints, opDesc, finalPass);
            TargetConstraint constraint = tgtConstraints.get(conn);
            if (constraint != null) {
                switch(constraint) {
                    case ONE:
                        opConstraint = countOneLocation;
                        break;
                    case SAME_COUNT:
                        opConstraint = partitionConstraintMap.get(src);
                        break;
                }
            }
        }
    }
    if (partitionConstraintMap.get(opDesc) == null) {
        if (finalPass && opConstraint == null && (opInputs == null || opInputs.isEmpty())) {
            opConstraint = countOneLocation;
        }
        if (finalPass && opConstraint == null) {
            opConstraint = clusterLocations;
        }
        // Sets up the location constraint.
        if (opConstraint != null) {
            partitionConstraintMap.put(opDesc, opConstraint);
            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, opDesc, opConstraint);
        }
    }
}
Also used : IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 10 with ConnectorDescriptorId

use of org.apache.hyracks.api.dataflow.ConnectorDescriptorId in project asterixdb by apache.

the class NetworkManager method readInitialMessage.

private static PartitionId readInitialMessage(ByteBuffer buffer) {
    JobId jobId = new JobId(buffer.getLong());
    ConnectorDescriptorId cdid = new ConnectorDescriptorId(buffer.getInt());
    int senderIndex = buffer.getInt();
    int receiverIndex = buffer.getInt();
    return new PartitionId(jobId, cdid, senderIndex, receiverIndex);
}
Also used : ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) PartitionId(org.apache.hyracks.api.partitions.PartitionId) JobId(org.apache.hyracks.api.job.JobId)

Aggregations

ConnectorDescriptorId (org.apache.hyracks.api.dataflow.ConnectorDescriptorId)18 HashMap (java.util.HashMap)10 IConnectorDescriptor (org.apache.hyracks.api.dataflow.IConnectorDescriptor)9 List (java.util.List)7 ActivityId (org.apache.hyracks.api.dataflow.ActivityId)7 ArrayList (java.util.ArrayList)6 Map (java.util.Map)6 Pair (org.apache.commons.lang3.tuple.Pair)6 IConnectorPolicy (org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy)6 PartitionId (org.apache.hyracks.api.partitions.PartitionId)5 IOperatorDescriptor (org.apache.hyracks.api.dataflow.IOperatorDescriptor)4 OperatorDescriptorId (org.apache.hyracks.api.dataflow.OperatorDescriptorId)4 TaskId (org.apache.hyracks.api.dataflow.TaskId)4 ActivityClusterGraph (org.apache.hyracks.api.job.ActivityClusterGraph)4 Task (org.apache.hyracks.control.cc.job.Task)4 BitSet (java.util.BitSet)3 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)3 Constraint (org.apache.hyracks.api.constraints.Constraint)3 LValueConstraintExpression (org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression)3 JobId (org.apache.hyracks.api.job.JobId)3