Search in sources :

Example 41 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class JobManager method prepareComplete.

@Override
public void prepareComplete(JobRun run, JobStatus status, List<Exception> exceptions) throws HyracksException {
    checkJob(run);
    if (status == JobStatus.FAILURE_BEFORE_EXECUTION) {
        run.setPendingStatus(JobStatus.FAILURE, exceptions);
        finalComplete(run);
        return;
    }
    JobId jobId = run.getJobId();
    HyracksException caughtException = null;
    if (run.getPendingStatus() != null && run.getCleanupPendingNodeIds().isEmpty()) {
        finalComplete(run);
        return;
    }
    if (run.getPendingStatus() != null) {
        LOGGER.warning("Ignoring duplicate cleanup for JobRun with id: " + jobId);
        return;
    }
    Set<String> targetNodes = run.getParticipatingNodeIds();
    run.getCleanupPendingNodeIds().addAll(targetNodes);
    if (run.getPendingStatus() != JobStatus.FAILURE && run.getPendingStatus() != JobStatus.TERMINATED) {
        run.setPendingStatus(status, exceptions);
    }
    if (targetNodes != null && !targetNodes.isEmpty()) {
        INodeManager nodeManager = ccs.getNodeManager();
        Set<String> toDelete = new HashSet<>();
        for (String n : targetNodes) {
            NodeControllerState ncs = nodeManager.getNodeControllerState(n);
            try {
                if (ncs == null) {
                    toDelete.add(n);
                } else {
                    ncs.getNodeController().cleanUpJoblet(jobId, status);
                }
            } catch (Exception e) {
                LOGGER.log(Level.SEVERE, e.getMessage(), e);
                if (caughtException == null) {
                    caughtException = new HyracksException(e);
                } else {
                    caughtException.addSuppressed(e);
                }
            }
        }
        targetNodes.removeAll(toDelete);
        run.getCleanupPendingNodeIds().removeAll(toDelete);
        if (run.getCleanupPendingNodeIds().isEmpty()) {
            finalComplete(run);
        }
    } else {
        finalComplete(run);
    }
    // throws caught exceptions if any
    if (caughtException != null) {
        throw caughtException;
    }
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) NodeControllerState(org.apache.hyracks.control.cc.NodeControllerState) JobId(org.apache.hyracks.api.job.JobId) InvocationTargetException(java.lang.reflect.InvocationTargetException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HashSet(java.util.HashSet)

Example 42 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class JobManagerTest method testNullJob.

@Test
public void testNullJob() throws HyracksException {
    IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
    IJobManager jobManager = new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController);
    boolean invalidParameter = false;
    try {
        jobManager.add(null);
    } catch (HyracksException e) {
        invalidParameter = e.getErrorCode() == ErrorCode.INVALID_INPUT_PARAMETER;
    }
    Assert.assertTrue(invalidParameter);
    Assert.assertTrue(jobManager.getRunningJobs().isEmpty());
    Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
}
Also used : IJobCapacityController(org.apache.hyracks.api.job.resource.IJobCapacityController) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) Test(org.junit.Test)

Example 43 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class TaskCompleteWork method performEvent.

@Override
protected void performEvent(TaskAttempt ta) {
    try {
        IJobManager jobManager = ccs.getJobManager();
        JobRun run = jobManager.get(jobId);
        if (statistics != null) {
            JobProfile jobProfile = run.getJobProfile();
            Map<String, JobletProfile> jobletProfiles = jobProfile.getJobletProfiles();
            JobletProfile jobletProfile = jobletProfiles.get(nodeId);
            if (jobletProfile == null) {
                jobletProfile = new JobletProfile(nodeId);
                jobletProfiles.put(nodeId, jobletProfile);
            }
            jobletProfile.getTaskProfiles().put(taId, statistics);
        }
        run.getExecutor().notifyTaskComplete(ta);
    } catch (HyracksException e) {
        e.printStackTrace();
    }
}
Also used : JobletProfile(org.apache.hyracks.control.common.job.profiling.om.JobletProfile) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) IJobManager(org.apache.hyracks.control.cc.job.IJobManager) JobProfile(org.apache.hyracks.control.common.job.profiling.om.JobProfile) JobRun(org.apache.hyracks.control.cc.job.JobRun)

Example 44 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class JobCapacityControllerTest method test.

@Test
public void test() throws HyracksException {
    IResourceManager resourceManager = makeResourceManagerWithCapacity(4294967296L, 33);
    JobCapacityController capacityController = new JobCapacityController(resourceManager);
    // Verifies the correctness of the allocate method.
    Assert.assertTrue(capacityController.allocate(makeJobWithRequiredCapacity(4294967296L, 16)) == IJobCapacityController.JobSubmissionStatus.EXECUTE);
    Assert.assertTrue(capacityController.allocate(makeJobWithRequiredCapacity(2147483648L, 16)) == IJobCapacityController.JobSubmissionStatus.QUEUE);
    Assert.assertTrue(capacityController.allocate(makeJobWithRequiredCapacity(2147483648L, 32)) == IJobCapacityController.JobSubmissionStatus.QUEUE);
    boolean exceedCapacity = false;
    try {
        capacityController.allocate(makeJobWithRequiredCapacity(2147483648L, 64));
    } catch (HyracksException e) {
        exceedCapacity = e.getErrorCode() == ErrorCode.JOB_REQUIREMENTS_EXCEED_CAPACITY;
    }
    Assert.assertTrue(exceedCapacity);
    Assert.assertTrue(capacityController.allocate(makeJobWithRequiredCapacity(4294967296L, 32)) == IJobCapacityController.JobSubmissionStatus.QUEUE);
    exceedCapacity = false;
    try {
        capacityController.allocate(makeJobWithRequiredCapacity(4294967297L, 33));
    } catch (HyracksException e) {
        exceedCapacity = e.getErrorCode() == ErrorCode.JOB_REQUIREMENTS_EXCEED_CAPACITY;
    }
    Assert.assertTrue(exceedCapacity);
    // Verifies that the release method does not leak resource.
    capacityController.release(makeJobWithRequiredCapacity(4294967296L, 16));
    Assert.assertTrue(resourceManager.getCurrentCapacity().getAggregatedMemoryByteSize() == 4294967296L);
    Assert.assertTrue(resourceManager.getCurrentCapacity().getAggregatedCores() == 33);
}
Also used : IJobCapacityController(org.apache.hyracks.api.job.resource.IJobCapacityController) IResourceManager(org.apache.hyracks.control.cc.scheduler.IResourceManager) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) Test(org.junit.Test)

Example 45 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class Scheduler method getLocationConstraints.

/**
     * Set location constraints for a file scan operator with a list of file
     * splits. It guarantees the maximum slots a machine can is at most one more
     * than the minimum slots a machine can get.
     *
     * @throws HyracksDataException
     */
public String[] getLocationConstraints(InputSplit[] splits) throws HyracksException {
    if (splits == null) {
        /** deal the case when the splits array is null */
        return new String[] {};
    }
    int[] workloads = new int[NCs.length];
    Arrays.fill(workloads, 0);
    String[] locations = new String[splits.length];
    Map<String, IntWritable> locationToNumOfSplits = new HashMap<String, IntWritable>();
    /**
         * upper bound number of slots that a machine can get
         */
    int upperBoundSlots = splits.length % workloads.length == 0 ? (splits.length / workloads.length) : (splits.length / workloads.length + 1);
    /**
         * lower bound number of slots that a machine can get
         */
    int lowerBoundSlots = splits.length % workloads.length == 0 ? upperBoundSlots : upperBoundSlots - 1;
    try {
        Random random = new Random(System.currentTimeMillis());
        boolean[] scheduled = new boolean[splits.length];
        Arrays.fill(scheduled, false);
        /**
             * scan the splits and build the popularity map
             * give the machines with less local splits more scheduling priority
             */
        buildPopularityMap(splits, locationToNumOfSplits);
        /**
             * push data-local lower-bounds slots to each machine
             */
        scheduleLocalSlots(splits, workloads, locations, lowerBoundSlots, random, scheduled, locationToNumOfSplits);
        /**
             * push data-local upper-bounds slots to each machine
             */
        scheduleLocalSlots(splits, workloads, locations, upperBoundSlots, random, scheduled, locationToNumOfSplits);
        int dataLocalCount = 0;
        for (int i = 0; i < scheduled.length; i++) {
            if (scheduled[i] == true) {
                dataLocalCount++;
            }
        }
        LOGGER.info("Data local rate: " + (scheduled.length == 0 ? 0.0 : ((float) dataLocalCount / (float) (scheduled.length))));
        /**
             * push non-data-local lower-bounds slots to each machine
             */
        scheduleNonLocalSlots(splits, workloads, locations, lowerBoundSlots, scheduled);
        /**
             * push non-data-local upper-bounds slots to each machine
             */
        scheduleNonLocalSlots(splits, workloads, locations, upperBoundSlots, scheduled);
        return locations;
    } catch (IOException e) {
        throw new HyracksException(e);
    }
}
Also used : Random(java.util.Random) HashMap(java.util.HashMap) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) IOException(java.io.IOException) IntWritable(org.apache.hadoop.io.IntWritable)

Aggregations

HyracksException (org.apache.hyracks.api.exceptions.HyracksException)48 IOException (java.io.IOException)10 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)8 JobId (org.apache.hyracks.api.job.JobId)8 HashMap (java.util.HashMap)7 Test (org.junit.Test)6 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 IJobCapacityController (org.apache.hyracks.api.job.resource.IJobCapacityController)5 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)5 JobRun (org.apache.hyracks.control.cc.job.JobRun)5 URL (java.net.URL)4 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)4 NodeControllerState (org.apache.hyracks.control.cc.NodeControllerState)4 File (java.io.File)3 HashSet (java.util.HashSet)3 NodeControllerInfo (org.apache.hyracks.api.client.NodeControllerInfo)3 JobSpecification (org.apache.hyracks.api.job.JobSpecification)3 PrintWriter (java.io.PrintWriter)2 StringWriter (java.io.StringWriter)2