Search in sources :

Example 1 with WorkflowProgressField

use of com.twitter.ambrose.model.Event.WorkflowProgressField in project ambrose by twitter.

the class AmbroseHivePreHook method sendFilteredJobsStatus.

private void sendFilteredJobsStatus(String queryId, EmbeddedAmbroseHiveProgressReporter reporter, Map<String, DAGNode<Job>> nodeIdToDAGNode) {
    if (nodeIdToDAGNode == null) {
        return;
    }
    Map<WorkflowProgressField, String> eventData = new HashMap<Event.WorkflowProgressField, String>(1);
    int skipped = 0;
    for (DAGNode<Job> dagNode : nodeIdToDAGNode.values()) {
        Job job = dagNode.getJob();
        // filtered jobs don't have assigned jobId
        if (job.getId() != null) {
            continue;
        }
        String nodeId = dagNode.getName();
        job.setId(AmbroseHiveUtil.asDisplayId(queryId, "filtered out", nodeId));
        reporter.addJobIdToProgress(nodeId, 100);
        reporter.pushEvent(queryId, new Event.JobFinishedEvent(dagNode));
        skipped++;
    }
    // sleep so that all these events will be visible on GUI before going on
    try {
        Thread.sleep(skipped * 1000L);
    } catch (InterruptedException e) {
        LOG.warn("Sleep interrupted", e);
    }
    eventData.put(WorkflowProgressField.workflowProgress, Integer.toString(reporter.getOverallProgress()));
    reporter.pushEvent(queryId, new Event.WorkflowProgressEvent(eventData));
}
Also used : WorkflowProgressField(com.twitter.ambrose.model.Event.WorkflowProgressField) HashMap(java.util.HashMap) Event(com.twitter.ambrose.model.Event) Job(com.twitter.ambrose.model.Job)

Example 2 with WorkflowProgressField

use of com.twitter.ambrose.model.Event.WorkflowProgressField in project ambrose by twitter.

the class AmbroseHivePreHook method waitBetween.

/**
     * Waiting <tt>ambrose.wf.between.sleep.seconds</tt> before processing the
     * next statement (workflow) in the submitted script
     * 
     * @param hookContext
     * @param reporter
     * @param queryId
     */
private void waitBetween(HookContext hookContext, EmbeddedAmbroseHiveProgressReporter reporter, String queryId) {
    Configuration conf = hookContext.getConf();
    boolean justStarted = conf.getBoolean(SCRIPT_STARTED_PARAM, true);
    if (justStarted) {
        conf.setBoolean(SCRIPT_STARTED_PARAM, false);
    } else {
        // sleeping between workflows
        int sleepTimeMs = conf.getInt(WF_BETWEEN_SLEEP_SECS_PARAM, 10);
        try {
            LOG.info("One workflow complete, sleeping for " + sleepTimeMs + " sec(s) before moving to the next one if exists. Hit ctrl-c to exit.");
            Thread.sleep(sleepTimeMs * 1000L);
            //send progressbar reset event
            Map<WorkflowProgressField, String> eventData = Maps.newHashMapWithExpectedSize(1);
            eventData.put(WorkflowProgressField.workflowProgress, "0");
            reporter.pushEvent(queryId, new Event.WorkflowProgressEvent(eventData));
            reporter.saveEventStack();
            reporter.reset();
        } catch (InterruptedException e) {
            LOG.warn("Sleep interrupted", e);
        }
    }
}
Also used : WorkflowProgressField(com.twitter.ambrose.model.Event.WorkflowProgressField) Configuration(org.apache.hadoop.conf.Configuration) Event(com.twitter.ambrose.model.Event)

Aggregations

Event (com.twitter.ambrose.model.Event)2 WorkflowProgressField (com.twitter.ambrose.model.Event.WorkflowProgressField)2 Job (com.twitter.ambrose.model.Job)1 HashMap (java.util.HashMap)1 Configuration (org.apache.hadoop.conf.Configuration)1