Search in sources :

Example 11 with TaskRunner

use of org.apache.hadoop.hive.ql.exec.TaskRunner in project hive by apache.

the class TaskQueue method pollFinished.

/**
 * Polls running tasks to see if a task has ended.
 *
 * @return The result object for any completed/failed task
 */
public synchronized TaskRunner pollFinished() throws InterruptedException {
    while (!shutdown) {
        Iterator<TaskRunner> it = running.iterator();
        while (it.hasNext()) {
            TaskRunner runner = it.next();
            if (runner != null && !runner.isRunning()) {
                it.remove();
                return runner;
            }
        }
        wait(SLEEP_TIME);
    }
    return null;
}
Also used : TaskRunner(org.apache.hadoop.hive.ql.exec.TaskRunner)

Example 12 with TaskRunner

use of org.apache.hadoop.hive.ql.exec.TaskRunner in project hive by apache.

the class MapJoinCounterHook method run.

public void run(HookContext hookContext) {
    HiveConf conf = hookContext.getConf();
    boolean enableConvert = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVECONVERTJOIN);
    if (!enableConvert) {
        return;
    }
    QueryPlan plan = hookContext.getQueryPlan();
    String queryID = plan.getQueryId();
    // String query = SessionState.get().getCmd();
    int commonJoin = 0;
    int hintedMapJoin = 0;
    int convertedMapJoin = 0;
    int hintedMapJoinLocal = 0;
    int convertedMapJoinLocal = 0;
    int backupCommonJoin = 0;
    List<TaskRunner> list = hookContext.getCompleteTaskList();
    for (TaskRunner tskRunner : list) {
        Task tsk = tskRunner.getTask();
        int tag = tsk.getTaskTag();
        switch(tag) {
            case Task.COMMON_JOIN:
                commonJoin++;
                break;
            case Task.HINTED_MAPJOIN:
                hintedMapJoin++;
                break;
            case Task.HINTED_MAPJOIN_LOCAL:
                hintedMapJoinLocal++;
                break;
            case Task.CONVERTED_MAPJOIN:
                convertedMapJoin++;
                break;
            case Task.CONVERTED_MAPJOIN_LOCAL:
                convertedMapJoinLocal++;
                break;
            case Task.BACKUP_COMMON_JOIN:
                backupCommonJoin++;
                break;
        }
    }
    LogHelper console = SessionState.getConsole();
    console.printError("[MapJoinCounter PostHook] COMMON_JOIN: " + commonJoin + " HINTED_MAPJOIN: " + hintedMapJoin + " HINTED_MAPJOIN_LOCAL: " + hintedMapJoinLocal + " CONVERTED_MAPJOIN: " + convertedMapJoin + " CONVERTED_MAPJOIN_LOCAL: " + convertedMapJoinLocal + " BACKUP_COMMON_JOIN: " + backupCommonJoin);
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) HiveConf(org.apache.hadoop.hive.conf.HiveConf) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) TaskRunner(org.apache.hadoop.hive.ql.exec.TaskRunner)

Example 13 with TaskRunner

use of org.apache.hadoop.hive.ql.exec.TaskRunner in project ambrose by twitter.

the class AmbroseHiveFailHook method run.

@Override
public void run(HookContext hookContext) throws Exception {
    HiveConf conf = hookContext.getConf();
    Properties allConfProps = conf.getAllProperties();
    String queryId = AmbroseHiveUtil.getHiveQueryId(conf);
    EmbeddedAmbroseHiveProgressReporter reporter = getEmbeddedProgressReporter();
    List<TaskRunner> completeTaskList = hookContext.getCompleteTaskList();
    Field _taskResultField = accessTaskResultField();
    for (TaskRunner taskRunner : completeTaskList) {
        TaskResult taskResult = (TaskResult) _taskResultField.get(taskRunner);
        // get non-running, failed jobs
        if (!taskResult.isRunning() && taskResult.getExitVal() != 0) {
            Task<? extends Serializable> task = taskRunner.getTask();
            String nodeId = AmbroseHiveUtil.getNodeIdFromNodeName(conf, task.getId());
            DAGNode<Job> dagNode = reporter.getDAGNodeFromNodeId(nodeId);
            HiveJob job = (HiveJob) dagNode.getJob();
            job.setConfiguration(allConfProps);
            MapReduceJobState mrJobState = getJobState(job);
            mrJobState.setSuccessful(false);
            reporter.addJob((Job) job);
            reporter.pushEvent(queryId, new Event.JobFailedEvent(dagNode));
        }
    }
    reporter.restoreEventStack();
    String sleepTime = System.getProperty(POST_SCRIPT_SLEEP_SECS_PARAM, "10");
    try {
        int sleepTimeSeconds = Integer.parseInt(sleepTime);
        LOG.info("Script failed but sleeping for " + sleepTimeSeconds + " seconds to keep the HiveStats REST server running. Hit ctrl-c to exit.");
        Thread.sleep(sleepTimeSeconds * 1000L);
        reporter.stopServer();
    } catch (NumberFormatException e) {
        LOG.warn(POST_SCRIPT_SLEEP_SECS_PARAM + " param is not a valid number, not sleeping: " + sleepTime);
    } catch (InterruptedException e) {
        LOG.warn("Sleep interrupted", e);
    }
}
Also used : MapReduceJobState(com.twitter.ambrose.model.hadoop.MapReduceJobState) Properties(java.util.Properties) TaskRunner(org.apache.hadoop.hive.ql.exec.TaskRunner) Field(java.lang.reflect.Field) TaskResult(org.apache.hadoop.hive.ql.exec.TaskResult) Event(com.twitter.ambrose.model.Event) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Job(com.twitter.ambrose.model.Job) EmbeddedAmbroseHiveProgressReporter(com.twitter.ambrose.hive.reporter.EmbeddedAmbroseHiveProgressReporter)

Aggregations

TaskRunner (org.apache.hadoop.hive.ql.exec.TaskRunner)13 TaskResult (org.apache.hadoop.hive.ql.exec.TaskResult)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 SessionState (org.apache.hadoop.hive.ql.session.SessionState)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 IOException (java.io.IOException)2 HashMap (java.util.HashMap)2 LinkedHashMap (java.util.LinkedHashMap)2 LinkedHashSet (java.util.LinkedHashSet)2 Map (java.util.Map)2 Metrics (org.apache.hadoop.hive.common.metrics.common.Metrics)2 QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)2 ConditionalTask (org.apache.hadoop.hive.ql.exec.ConditionalTask)2 HookContext (org.apache.hadoop.hive.ql.hooks.HookContext)2 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)2 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)2 PerfLogger (org.apache.hadoop.hive.ql.log.PerfLogger)2 AuthorizationException (org.apache.hadoop.hive.ql.metadata.AuthorizationException)2 HiveSemanticAnalyzerHookContext (org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext)2