use of com.twitter.ambrose.model.DAGNode in project ambrose by twitter.
the class AmbroseCascadingGraphConverter method convert.
/**
* Converts the flowStep that generated from cascading to a Map of DAGNode and its name to be used
* to build Ambrose Graph.
*/
public void convert() {
// returns a set of the nodes contained in this graph
Set vertices = jobsGraph.vertexSet();
// create ambrose nodes
for (Object vertex : vertices) {
BaseFlowStep step = (BaseFlowStep) vertex;
CascadingJob job = new CascadingJob();
job.setFeatures(getNodeFeatures(step));
String name = step.getName();
DAGNode<CascadingJob> node = new DAGNode<CascadingJob>(name, job);
dagNamesMap.put(name, node);
}
// loop again to set the successors for each node after nodes are created
for (Object vertex : vertices) {
BaseFlowStep step = (BaseFlowStep) vertex;
String name = step.getName();
DAGNode<CascadingJob> node = dagNamesMap.get(name);
node.setSuccessors(getNodeSuccessors(vertex));
}
}
use of com.twitter.ambrose.model.DAGNode in project ambrose by twitter.
the class TestHRavenStatsReadService method main.
/**
* Main method for testing reading from hraven
*/
public static void main(String[] args) throws IOException {
//cluster!userName!appId!runId!timestamp!flowId
String workflowId = args[0];
HRavenStatsReadService service = new HRavenStatsReadService();
Map<String, DAGNode> dagMap = service.getDagNodeNameMap(workflowId);
if (dagMap == null) {
print("No dagNodeNameMap found for " + workflowId);
} else {
print(String.format("Found %d dapMap entries", dagMap.size()));
for (Map.Entry<String, DAGNode> entry : dagMap.entrySet()) {
DAGNode node = entry.getValue();
String jobId = node.getJob() != null ? node.getJob().getId() : null;
print(String.format("%s: nodeName=%s jobId=%s successors=%s", entry.getKey(), node.getName(), jobId, node.getSuccessorNames()));
}
}
List<Event> events = service.getEventsSinceId(workflowId, -1);
print(String.format("Found %d events", events.size()));
for (Event event : events) {
print(String.format("%d %d %s %s", event.getId(), event.getTimestamp(), event.getType(), event.getPayload()));
}
}
use of com.twitter.ambrose.model.DAGNode in project ambrose by twitter.
the class AmbroseCascadingGraphConverter method getNodeSuccessors.
/**
* Return a Collection of successor nodes of a certain vertex.
*
* @param vertex the step or node its successors nodes will be returned.
* @return collection of successor DAGNodes for each node.
*/
protected Collection<DAGNode<? extends Job>> getNodeSuccessors(Object vertex) {
Collection<DAGNode<? extends Job>> nodeSuccessors = Sets.newHashSet();
List successorNodes = Graphs.successorListOf(jobsGraph, vertex);
for (Object node : successorNodes) {
BaseFlowStep step = (BaseFlowStep) node;
String name = step.getName();
nodeSuccessors.add(dagNamesMap.get(name));
}
return nodeSuccessors;
}
use of com.twitter.ambrose.model.DAGNode in project ambrose by twitter.
the class AmbroseCascadingGraphConverter method getNodeSuccessors.
/**
* Return a Collection of successor nodes of a certain vertex.
*
* @param vertex the step or node its successors nodes will be returned.
* @return collection of successor DAGNodes for each node.
*/
protected Collection<DAGNode<? extends Job>> getNodeSuccessors(Object vertex) {
Collection<DAGNode<? extends Job>> nodeSuccessors = Sets.newHashSet();
List successorNodes = Graphs.successorListOf(jobsGraph, vertex);
for (Object node : successorNodes) {
BaseFlowStep step = (BaseFlowStep) node;
String name = step.getName();
nodeSuccessors.add(dagNamesMap.get(name));
}
return nodeSuccessors;
}
use of com.twitter.ambrose.model.DAGNode in project ambrose by twitter.
the class AmbroseHivePreHook method run.
@Override
public void run(HookContext hookContext) throws Exception {
String queryId = AmbroseHiveUtil.getHiveQueryId(hookContext.getConf());
EmbeddedAmbroseHiveProgressReporter reporter = getEmbeddedProgressReporter();
HiveDAGTransformer transformer = new HiveDAGTransformer(hookContext);
//conditional tasks may be filtered out by Hive at runtime. We them as
//'complete'
Map<String, DAGNode<Job>> nodeIdToDAGNode = reporter.getNodeIdToDAGNode();
sendFilteredJobsStatus(queryId, reporter, nodeIdToDAGNode);
if (transformer.getTotalMRJobs() == 0) {
return;
}
waitBetween(hookContext, reporter, queryId);
nodeIdToDAGNode = transformer.getNodeIdToDAGNode();
reporter.setNodeIdToDAGNode(nodeIdToDAGNode);
reporter.setTotalMRJobs(transformer.getTotalMRJobs());
reporter.sendDagNodeNameMap(queryId, nodeIdToDAGNode);
}
Aggregations