Search in sources :

Example 36 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class DeploymentUtils method loadClass.

/**
     * Load a class from its class name
     *
     * @param className
     * @param deploymentId
     * @param serviceCtx
     * @return the loaded class
     * @throws HyracksException
     */
public static Class<?> loadClass(String className, DeploymentId deploymentId, IServiceContext serviceCtx) throws HyracksException {
    try {
        IJobSerializerDeserializerContainer jobSerDeContainer = serviceCtx.getJobSerializerDeserializerContainer();
        IJobSerializerDeserializer jobSerDe = deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
        return jobSerDe == null ? JavaSerializationUtils.loadClass(className) : jobSerDe.loadClass(className);
    } catch (ClassNotFoundException | IOException e) {
        throw new HyracksException(e);
    }
}
Also used : HyracksException(org.apache.hyracks.api.exceptions.HyracksException) IOException(java.io.IOException) IJobSerializerDeserializerContainer(org.apache.hyracks.api.job.IJobSerializerDeserializerContainer) IJobSerializerDeserializer(org.apache.hyracks.api.job.IJobSerializerDeserializer)

Example 37 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class NodeManager method addNode.

@Override
public void addNode(String nodeId, NodeControllerState ncState) throws HyracksException {
    if (nodeId == null || ncState == null) {
        throw HyracksException.create(ErrorCode.INVALID_INPUT_PARAMETER);
    }
    // Updates the node registry.
    if (nodeRegistry.containsKey(nodeId)) {
        LOGGER.warning("Node with name " + nodeId + " has already registered; re-registering");
    }
    nodeRegistry.put(nodeId, ncState);
    // Updates the IP address to node names map.
    try {
        InetAddress ipAddress = getIpAddress(ncState);
        Set<String> nodes = ipAddressNodeNameMap.computeIfAbsent(ipAddress, k -> new HashSet<>());
        nodes.add(nodeId);
    } catch (HyracksException e) {
        // If anything fails, we ignore the node.
        nodeRegistry.remove(nodeId);
        throw e;
    }
    // Updates the cluster capacity.
    resourceManager.update(nodeId, ncState.getCapacity());
}
Also used : HyracksException(org.apache.hyracks.api.exceptions.HyracksException) InetAddress(java.net.InetAddress)

Example 38 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class FIFOJobQueue method pull.

@Override
public List<JobRun> pull() {
    List<JobRun> jobRuns = new ArrayList<>();
    Iterator<JobRun> runIterator = jobListMap.values().iterator();
    while (runIterator.hasNext()) {
        JobRun run = runIterator.next();
        JobSpecification job = run.getJobSpecification();
        // or not.
        try {
            IJobCapacityController.JobSubmissionStatus status = jobCapacityController.allocate(job);
            // Checks if the job can be executed immediately.
            if (status == IJobCapacityController.JobSubmissionStatus.EXECUTE) {
                jobRuns.add(run);
                // Removes the selected job.
                runIterator.remove();
            }
        } catch (HyracksException exception) {
            // The required capacity exceeds maximum capacity.
            List<Exception> exceptions = new ArrayList<>();
            exceptions.add(exception);
            // Removes the job from the queue.
            runIterator.remove();
            try {
                // Fails the job.
                jobManager.prepareComplete(run, JobStatus.FAILURE_BEFORE_EXECUTION, exceptions);
            } catch (HyracksException e) {
                LOGGER.log(Level.SEVERE, e.getMessage(), e);
            }
        }
    }
    return jobRuns;
}
Also used : IJobCapacityController(org.apache.hyracks.api.job.resource.IJobCapacityController) ArrayList(java.util.ArrayList) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ArrayList(java.util.ArrayList) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) JobRun(org.apache.hyracks.control.cc.job.JobRun)

Example 39 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class ActivityClusterPlanner method computePartitionCounts.

private Map<ActivityId, ActivityPartitionDetails> computePartitionCounts(ActivityCluster ac) throws HyracksException {
    PartitionConstraintSolver solver = executor.getSolver();
    Set<LValueConstraintExpression> lValues = new HashSet<>();
    for (ActivityId anId : ac.getActivityMap().keySet()) {
        lValues.add(new PartitionCountExpression(anId.getOperatorDescriptorId()));
    }
    solver.solve(lValues);
    Map<OperatorDescriptorId, Integer> nPartMap = new HashMap<>();
    for (LValueConstraintExpression lv : lValues) {
        Object value = solver.getValue(lv);
        if (value == null) {
            throw new HyracksException("No value found for " + lv);
        }
        if (!(value instanceof Number)) {
            throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "(" + value + ")");
        }
        int nParts = ((Number) value).intValue();
        if (nParts <= 0) {
            throw new HyracksException("Unsatisfiable number of partitions for " + lv + ": " + nParts);
        }
        nPartMap.put(((PartitionCountExpression) lv).getOperatorDescriptorId(), nParts);
    }
    Map<ActivityId, ActivityPartitionDetails> activityPartsMap = new HashMap<>();
    for (ActivityId anId : ac.getActivityMap().keySet()) {
        int nParts = nPartMap.get(anId.getOperatorDescriptorId());
        int[] nInputPartitions = null;
        List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(anId);
        if (inputs != null) {
            nInputPartitions = new int[inputs.size()];
            for (int i = 0; i < nInputPartitions.length; ++i) {
                ConnectorDescriptorId cdId = inputs.get(i).getConnectorId();
                ActivityId aid = ac.getProducerActivity(cdId);
                Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
                nInputPartitions[i] = nPartInt;
            }
        }
        int[] nOutputPartitions = null;
        List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(anId);
        if (outputs != null) {
            nOutputPartitions = new int[outputs.size()];
            for (int i = 0; i < nOutputPartitions.length; ++i) {
                ConnectorDescriptorId cdId = outputs.get(i).getConnectorId();
                ActivityId aid = ac.getConsumerActivity(cdId);
                Integer nPartInt = nPartMap.get(aid.getOperatorDescriptorId());
                nOutputPartitions[i] = nPartInt;
            }
        }
        ActivityPartitionDetails apd = new ActivityPartitionDetails(nParts, nInputPartitions, nOutputPartitions);
        activityPartsMap.put(anId, apd);
    }
    return activityPartsMap;
}
Also used : IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) OperatorDescriptorId(org.apache.hyracks.api.dataflow.OperatorDescriptorId) HashMap(java.util.HashMap) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) PartitionCountExpression(org.apache.hyracks.api.constraints.expressions.PartitionCountExpression) HashSet(java.util.HashSet)

Example 40 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class JobExecutor method startTasks.

private void startTasks(Map<String, List<TaskAttemptDescriptor>> taskAttemptMap) throws HyracksException {
    final DeploymentId deploymentId = jobRun.getDeploymentId();
    final JobId jobId = jobRun.getJobId();
    final ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
    final Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = new HashMap<>(jobRun.getConnectorPolicyMap());
    INodeManager nodeManager = ccs.getNodeManager();
    try {
        byte[] acgBytes = predistributed ? null : JavaSerializationUtils.serialize(acg);
        for (Map.Entry<String, List<TaskAttemptDescriptor>> entry : taskAttemptMap.entrySet()) {
            String nodeId = entry.getKey();
            final List<TaskAttemptDescriptor> taskDescriptors = entry.getValue();
            final NodeControllerState node = nodeManager.getNodeControllerState(nodeId);
            if (node != null) {
                node.getActiveJobIds().add(jobRun.getJobId());
                boolean changed = jobRun.getParticipatingNodeIds().add(nodeId);
                if (LOGGER.isLoggable(Level.FINE)) {
                    LOGGER.fine("Starting: " + taskDescriptors + " at " + entry.getKey());
                }
                byte[] jagBytes = changed ? acgBytes : null;
                node.getNodeController().startTasks(deploymentId, jobId, jagBytes, taskDescriptors, connectorPolicies, jobRun.getFlags());
            }
        }
    } catch (Exception e) {
        throw new HyracksException(e);
    }
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) DeploymentId(org.apache.hyracks.api.deployment.DeploymentId) HashMap(java.util.HashMap) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) ConnectorDescriptorId(org.apache.hyracks.api.dataflow.ConnectorDescriptorId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) TaskAttemptDescriptor(org.apache.hyracks.control.common.job.TaskAttemptDescriptor) ActivityClusterGraph(org.apache.hyracks.api.job.ActivityClusterGraph) ArrayList(java.util.ArrayList) List(java.util.List) NodeControllerState(org.apache.hyracks.control.cc.NodeControllerState) HashMap(java.util.HashMap) Map(java.util.Map) JobId(org.apache.hyracks.api.job.JobId)

Aggregations

HyracksException (org.apache.hyracks.api.exceptions.HyracksException)48 IOException (java.io.IOException)10 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)8 JobId (org.apache.hyracks.api.job.JobId)8 HashMap (java.util.HashMap)7 Test (org.junit.Test)6 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 IJobCapacityController (org.apache.hyracks.api.job.resource.IJobCapacityController)5 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)5 JobRun (org.apache.hyracks.control.cc.job.JobRun)5 URL (java.net.URL)4 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)4 NodeControllerState (org.apache.hyracks.control.cc.NodeControllerState)4 File (java.io.File)3 HashSet (java.util.HashSet)3 NodeControllerInfo (org.apache.hyracks.api.client.NodeControllerInfo)3 JobSpecification (org.apache.hyracks.api.job.JobSpecification)3 PrintWriter (java.io.PrintWriter)2 StringWriter (java.io.StringWriter)2