Search in sources :

Example 6 with TaskCluster

use of org.apache.hyracks.control.cc.job.TaskCluster in project asterixdb by apache.

the class JobExecutor method abortJob.

private void abortJob(List<Exception> exceptions) {
    Set<TaskCluster> inProgressTaskClustersCopy = new HashSet<>(inProgressTaskClusters);
    for (TaskCluster tc : inProgressTaskClustersCopy) {
        abortTaskCluster(findLastTaskClusterAttempt(tc), TaskClusterAttempt.TaskClusterStatus.ABORTED);
    }
    assert inProgressTaskClusters.isEmpty();
    ccs.getWorkQueue().schedule(new JobCleanupWork(ccs.getJobManager(), jobRun.getJobId(), JobStatus.FAILURE, exceptions));
}
Also used : JobCleanupWork(org.apache.hyracks.control.cc.work.JobCleanupWork) TaskCluster(org.apache.hyracks.control.cc.job.TaskCluster) HashSet(java.util.HashSet)

Example 7 with TaskCluster

use of org.apache.hyracks.control.cc.job.TaskCluster in project asterixdb by apache.

the class JobExecutor method startRunnableActivityClusters.

private void startRunnableActivityClusters() throws HyracksException {
    Set<TaskCluster> taskClusterRoots = new HashSet<>();
    findRunnableTaskClusterRoots(taskClusterRoots, jobRun.getActivityClusterGraph().getActivityClusterMap().values());
    if (LOGGER.isLoggable(Level.FINE)) {
        LOGGER.fine("Runnable TC roots: " + taskClusterRoots + ", inProgressTaskClusters: " + inProgressTaskClusters);
    }
    if (taskClusterRoots.isEmpty() && inProgressTaskClusters.isEmpty()) {
        ccs.getWorkQueue().schedule(new JobCleanupWork(ccs.getJobManager(), jobRun.getJobId(), JobStatus.TERMINATED, null));
        return;
    }
    startRunnableTaskClusters(taskClusterRoots);
}
Also used : JobCleanupWork(org.apache.hyracks.control.cc.work.JobCleanupWork) TaskCluster(org.apache.hyracks.control.cc.job.TaskCluster) HashSet(java.util.HashSet)

Example 8 with TaskCluster

use of org.apache.hyracks.control.cc.job.TaskCluster in project asterixdb by apache.

the class JobExecutor method findRunnableTaskClusterRoots.

private void findRunnableTaskClusterRoots(Set<TaskCluster> frontier, ActivityCluster candidate) throws HyracksException {
    boolean depsComplete = true;
    for (ActivityCluster depAC : candidate.getDependencies()) {
        if (!isPlanned(depAC)) {
            depsComplete = false;
            findRunnableTaskClusterRoots(frontier, depAC);
        } else {
            boolean tcRootsComplete = true;
            for (TaskCluster tc : getActivityClusterPlan(depAC).getTaskClusters()) {
                if (!tc.getProducedPartitions().isEmpty()) {
                    continue;
                }
                TaskClusterAttempt tca = findLastTaskClusterAttempt(tc);
                if (tca == null || tca.getStatus() != TaskClusterAttempt.TaskClusterStatus.COMPLETED) {
                    tcRootsComplete = false;
                    break;
                }
            }
            if (!tcRootsComplete) {
                depsComplete = false;
                findRunnableTaskClusterRoots(frontier, depAC);
            }
        }
    }
    if (!depsComplete) {
        return;
    }
    if (!isPlanned(candidate)) {
        ActivityClusterPlanner acp = new ActivityClusterPlanner(this);
        ActivityClusterPlan acPlan = acp.planActivityCluster(candidate);
        jobRun.getActivityClusterPlanMap().put(candidate.getId(), acPlan);
        partitionProducingTaskClusterMap.putAll(acp.getPartitionProducingTaskClusterMap());
    }
    for (TaskCluster tc : getActivityClusterPlan(candidate).getTaskClusters()) {
        if (!tc.getProducedPartitions().isEmpty()) {
            continue;
        }
        TaskClusterAttempt tca = findLastTaskClusterAttempt(tc);
        if (tca == null || tca.getStatus() != TaskClusterAttempt.TaskClusterStatus.COMPLETED) {
            frontier.add(tc);
        }
    }
}
Also used : ActivityClusterPlan(org.apache.hyracks.control.cc.job.ActivityClusterPlan) TaskClusterAttempt(org.apache.hyracks.control.cc.job.TaskClusterAttempt) TaskCluster(org.apache.hyracks.control.cc.job.TaskCluster) ActivityCluster(org.apache.hyracks.api.job.ActivityCluster)

Example 9 with TaskCluster

use of org.apache.hyracks.control.cc.job.TaskCluster in project asterixdb by apache.

the class ActivityClusterPlanner method buildConnectorPolicyUnawareTaskClusters.

private TaskCluster[] buildConnectorPolicyUnawareTaskClusters(ActivityCluster ac, Map<ActivityId, ActivityPlan> activityPlanMap) {
    List<Task> taskStates = new ArrayList<>();
    for (ActivityId anId : ac.getActivityMap().keySet()) {
        ActivityPlan ap = activityPlanMap.get(anId);
        Task[] tasks = ap.getTasks();
        taskStates.addAll(Arrays.asList(tasks));
    }
    TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates.size()]));
    for (Task t : tc.getTasks()) {
        t.setTaskCluster(tc);
    }
    return new TaskCluster[] { tc };
}
Also used : Task(org.apache.hyracks.control.cc.job.Task) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ActivityPlan(org.apache.hyracks.control.cc.job.ActivityPlan) ArrayList(java.util.ArrayList) TaskCluster(org.apache.hyracks.control.cc.job.TaskCluster) TaskClusterId(org.apache.hyracks.control.cc.job.TaskClusterId)

Example 10 with TaskCluster

use of org.apache.hyracks.control.cc.job.TaskCluster in project asterixdb by apache.

the class ActivityClusterPlanner method planActivityCluster.

ActivityClusterPlan planActivityCluster(ActivityCluster ac) throws HyracksException {
    JobRun jobRun = executor.getJobRun();
    Map<ActivityId, ActivityPartitionDetails> pcMap = computePartitionCounts(ac);
    Map<ActivityId, ActivityPlan> activityPlanMap = buildActivityPlanMap(ac, jobRun, pcMap);
    assignConnectorPolicy(ac, activityPlanMap);
    TaskCluster[] taskClusters = computeTaskClusters(ac, jobRun, activityPlanMap);
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Plan for " + ac);
        LOGGER.info("Built " + taskClusters.length + " Task Clusters");
        for (TaskCluster tc : taskClusters) {
            LOGGER.info("Tasks: " + Arrays.toString(tc.getTasks()));
        }
    }
    return new ActivityClusterPlan(taskClusters, activityPlanMap);
}
Also used : ActivityClusterPlan(org.apache.hyracks.control.cc.job.ActivityClusterPlan) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ActivityPlan(org.apache.hyracks.control.cc.job.ActivityPlan) TaskCluster(org.apache.hyracks.control.cc.job.TaskCluster) JobRun(org.apache.hyracks.control.cc.job.JobRun)

Aggregations

TaskCluster (org.apache.hyracks.control.cc.job.TaskCluster)14 TaskClusterAttempt (org.apache.hyracks.control.cc.job.TaskClusterAttempt)7 ArrayList (java.util.ArrayList)5 ActivityId (org.apache.hyracks.api.dataflow.ActivityId)5 HashSet (java.util.HashSet)4 List (java.util.List)4 ActivityPlan (org.apache.hyracks.control.cc.job.ActivityPlan)4 Task (org.apache.hyracks.control.cc.job.Task)4 TaskAttempt (org.apache.hyracks.control.cc.job.TaskAttempt)4 HashMap (java.util.HashMap)3 Map (java.util.Map)3 ConnectorDescriptorId (org.apache.hyracks.api.dataflow.ConnectorDescriptorId)3 TaskAttemptId (org.apache.hyracks.api.dataflow.TaskAttemptId)3 TaskId (org.apache.hyracks.api.dataflow.TaskId)3 ActivityCluster (org.apache.hyracks.api.job.ActivityCluster)3 JobRun (org.apache.hyracks.control.cc.job.JobRun)3 IConnectorPolicy (org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy)2 HyracksException (org.apache.hyracks.api.exceptions.HyracksException)2 PartitionId (org.apache.hyracks.api.partitions.PartitionId)2 ActivityClusterPlan (org.apache.hyracks.control.cc.job.ActivityClusterPlan)2