use of org.apache.pig.tools.pigstats.mapreduce.MRScriptState in project ambrose by twitter.
the class AmbrosePigProgressNotificationListener method initialPlanNotification.
/**
* Called after the job DAG has been created, but before any jobs are fired.
* @param plan the MROperPlan that represents the DAG of operations. Each operation will become
* a MapReduce job when it's launched.
*/
@Override
public void initialPlanNotification(String scriptId, OperatorPlan<?> plan) {
log.info("initialPlanNotification - scriptId " + scriptId + " plan " + plan);
// For ambrose to work above 3 must be non-null
Preconditions.checkNotNull(pigConfig.getJobClient());
Preconditions.checkNotNull(pigConfig.getJobGraph());
Preconditions.checkNotNull(pigConfig.getPigProperties());
try {
statsWriteService.initWriteService(pigConfig.getPigProperties());
} catch (IOException ioe) {
throw new RuntimeException("Exception while initializing statsWriteService", ioe);
}
this.workflowVersion = pigConfig.getPigProperties().getProperty("pig.logical.plan.signature");
OperatorPlan<MapReduceOper> mrPlan;
try {
mrPlan = (OperatorPlan<MapReduceOper>) plan;
} catch (Exception e) {
log.error(String.format("Failed to cast OperatorPlan: %s", plan), e);
return;
}
Map<OperatorKey, MapReduceOper> planKeys = mrPlan.getKeys();
Configuration flowConfig = new Configuration(false);
boolean initialized = false;
// first pass builds all nodes
for (Map.Entry<OperatorKey, MapReduceOper> entry : planKeys.entrySet()) {
String nodeName = entry.getKey().toString();
MapReduceOper op = entry.getValue();
MRScriptState scriptState = MRScriptState.get();
String[] aliases = toArray(scriptState.getAlias(op).trim());
String[] features = toArray(scriptState.getPigFeature(op).trim());
if (!initialized) {
scriptState.addSettingsToConf(op, flowConfig);
pigConfig.getPigProperties().putAll(ConfigurationUtil.toProperties(flowConfig));
initialized = true;
}
PigJob job = new PigJob();
job.setAliases(aliases);
job.setFeatures(features);
job.setConfiguration(pigConfig.getPigProperties());
DAGNode<PigJob> node = new DAGNode<PigJob>(nodeName, job);
this.dagNodeNameMap.put(node.getName(), node);
// this shows how we can get the basic info about all nameless jobs before any execute.
// we can traverse the plan to build a DAG of this info
log.info("initialPlanNotification: aliases: " + Arrays.toString(aliases) + ", name: " + node.getName() + ", features: " + Arrays.toString(features));
}
// second pass connects the edges
for (Map.Entry<OperatorKey, MapReduceOper> entry : planKeys.entrySet()) {
DAGNode node = this.dagNodeNameMap.get(entry.getKey().toString());
List<DAGNode<? extends Job>> successorNodeList = Lists.newArrayList();
List<MapReduceOper> successors = mrPlan.getSuccessors(entry.getValue());
if (successors != null) {
for (MapReduceOper successor : successors) {
DAGNode<? extends Job> successorNode = this.dagNodeNameMap.get(successor.getOperatorKey().toString());
successorNodeList.add(successorNode);
}
}
node.setSuccessors(successorNodeList);
}
AmbroseUtils.sendDagNodeNameMap(statsWriteService, scriptId, dagNodeNameMap);
}
Aggregations