use of org.apache.tez.history.parser.datamodel.VertexInfo in project tez by apache.
the class CriticalPathAnalyzer method createCriticalPath.
private void createCriticalPath(DagInfo dagInfo, TaskAttemptInfo lastAttempt, long lastAttemptFinishTime, Map<String, TaskAttemptInfo> attempts) {
List<CriticalPathStep> tempCP = Lists.newLinkedList();
if (lastAttempt != null) {
TaskAttemptInfo currentAttempt = lastAttempt;
CriticalPathStep currentStep = new CriticalPathStep(currentAttempt, EntityType.DAG_COMMIT);
long currentAttemptStopCriticalPathTime = lastAttemptFinishTime;
// add the commit step
if (dagInfo.getFinishTime() > 0) {
currentStep.stopCriticalPathTime = dagInfo.getFinishTime();
} else {
// AM crashed and no dag finished written
currentStep.stopCriticalPathTime = currentAttemptStopCriticalPathTime;
}
currentStep.startCriticalPathTime = currentAttemptStopCriticalPathTime;
currentStep.reason = CriticalPathDependency.COMMIT_DEPENDENCY;
tempCP.add(currentStep);
while (true) {
Preconditions.checkState(currentAttempt != null);
Preconditions.checkState(currentAttemptStopCriticalPathTime > 0);
LOG.debug("Step: " + tempCP.size() + " Attempt: " + currentAttempt.getTaskAttemptId());
currentStep = new CriticalPathStep(currentAttempt, EntityType.ATTEMPT);
currentStep.stopCriticalPathTime = currentAttemptStopCriticalPathTime;
// consider the last data event seen immediately preceding the current critical path
// stop time for this attempt
long currentStepLastDataEventTime = 0;
String currentStepLastDataTA = null;
DataDependencyEvent item = currentAttempt.getLastDataEventInfo(currentStep.stopCriticalPathTime);
if (item != null) {
currentStepLastDataEventTime = item.getTimestamp();
currentStepLastDataTA = item.getTaskAttemptId();
}
// sanity check
for (CriticalPathStep previousStep : tempCP) {
if (previousStep.type == EntityType.ATTEMPT) {
if (previousStep.attempt.getTaskAttemptId().equals(currentAttempt.getTaskAttemptId())) {
// found loop.
// this should only happen for read errors in currentAttempt
List<DataDependencyEvent> dataEvents = currentAttempt.getLastDataEvents();
// received
Preconditions.checkState(dataEvents.size() > 1);
// original and
// retry data events
Preconditions.checkState(currentStepLastDataEventTime < dataEvents.get(dataEvents.size() - 1).getTimestamp());
// earlier than
// last
}
}
}
tempCP.add(currentStep);
// find the next attempt on the critical path
boolean dataDependency = false;
// find out predecessor dependency
if (currentStepLastDataEventTime > currentAttempt.getCreationTime()) {
dataDependency = true;
}
long startCriticalPathTime = 0;
String nextAttemptId = null;
CriticalPathDependency reason = null;
if (dataDependency) {
// last data event was produced after the attempt was scheduled. use
// data dependency
// typically the case when scheduling ahead of time
LOG.debug("Has data dependency");
if (!Strings.isNullOrEmpty(currentStepLastDataTA)) {
// there is a valid data causal TA. Use it.
nextAttemptId = currentStepLastDataTA;
reason = CriticalPathDependency.DATA_DEPENDENCY;
startCriticalPathTime = currentStepLastDataEventTime;
LOG.debug("Using data dependency " + nextAttemptId);
} else {
// there is no valid data causal TA. This means data event came from the same vertex
VertexInfo vertex = currentAttempt.getTaskInfo().getVertexInfo();
Preconditions.checkState(!vertex.getAdditionalInputInfoList().isEmpty(), "Vertex: " + vertex.getVertexId() + " has no external inputs but the last data event " + "TA is null for " + currentAttempt.getTaskAttemptId());
nextAttemptId = null;
reason = CriticalPathDependency.INIT_DEPENDENCY;
LOG.debug("Using init dependency");
}
} else {
// attempt was scheduled after last data event. use scheduling dependency
// typically happens for retries
LOG.debug("Has scheduling dependency");
if (!Strings.isNullOrEmpty(currentAttempt.getCreationCausalTA())) {
// there is a scheduling causal TA. Use it.
nextAttemptId = currentAttempt.getCreationCausalTA();
reason = CriticalPathDependency.RETRY_DEPENDENCY;
TaskAttemptInfo nextAttempt = attempts.get(nextAttemptId);
if (nextAttemptId != null) {
VertexInfo currentVertex = currentAttempt.getTaskInfo().getVertexInfo();
VertexInfo nextVertex = nextAttempt.getTaskInfo().getVertexInfo();
if (!nextVertex.getVertexName().equals(currentVertex.getVertexName())) {
// cause from different vertex. Might be rerun to re-generate outputs
for (VertexInfo outVertex : currentVertex.getOutputVertices()) {
if (nextVertex.getVertexName().equals(outVertex.getVertexName())) {
// next vertex is an output vertex
reason = CriticalPathDependency.OUTPUT_RECREATE_DEPENDENCY;
break;
}
}
}
}
if (reason == CriticalPathDependency.OUTPUT_RECREATE_DEPENDENCY) {
// rescheduled due to read error. start critical at read error report time.
// for now proxy own creation time for read error report time
startCriticalPathTime = currentAttempt.getCreationTime();
} else {
// rescheduled due to own previous attempt failure
// we are critical when the previous attempt fails
Preconditions.checkState(nextAttempt != null);
Preconditions.checkState(nextAttempt.getTaskInfo().getTaskId().equals(currentAttempt.getTaskInfo().getTaskId()));
startCriticalPathTime = nextAttempt.getFinishTime();
}
LOG.debug("Using scheduling dependency " + nextAttemptId);
} else {
// there is no scheduling causal TA.
if (!Strings.isNullOrEmpty(currentStepLastDataTA)) {
// there is a data event going to the vertex. Count the time between data event and
// creation time as Initializer/Manager overhead and follow data dependency
nextAttemptId = currentStepLastDataTA;
reason = CriticalPathDependency.DATA_DEPENDENCY;
startCriticalPathTime = currentStepLastDataEventTime;
long overhead = currentAttempt.getCreationTime() - currentStepLastDataEventTime;
currentStep.notes.add("Initializer/VertexManager scheduling overhead " + SVGUtils.getTimeStr(overhead));
LOG.debug("Using data dependency " + nextAttemptId);
} else {
// there is no scheduling causal TA and no data event casual TA.
// the vertex has external input that sent the last data events
// or the vertex has external input but does not use events
// or the vertex has no external inputs or edges
nextAttemptId = null;
reason = CriticalPathDependency.INIT_DEPENDENCY;
LOG.debug("Using init dependency");
}
}
}
currentStep.startCriticalPathTime = startCriticalPathTime;
currentStep.reason = reason;
Preconditions.checkState(currentStep.stopCriticalPathTime >= currentStep.startCriticalPathTime);
if (Strings.isNullOrEmpty(nextAttemptId)) {
Preconditions.checkState(reason.equals(CriticalPathDependency.INIT_DEPENDENCY));
Preconditions.checkState(startCriticalPathTime == 0);
// no predecessor attempt found. this is the last step in the critical path
// assume attempts start critical path time is when its scheduled. before that is
// vertex initialization time
currentStep.startCriticalPathTime = currentStep.attempt.getCreationTime();
// add vertex init step
long initStepStopCriticalTime = currentStep.startCriticalPathTime;
currentStep = new CriticalPathStep(currentAttempt, EntityType.VERTEX_INIT);
currentStep.stopCriticalPathTime = initStepStopCriticalTime;
currentStep.startCriticalPathTime = dagInfo.getStartTime();
currentStep.reason = CriticalPathDependency.INIT_DEPENDENCY;
tempCP.add(currentStep);
if (!tempCP.isEmpty()) {
for (int i = tempCP.size() - 1; i >= 0; --i) {
criticalPath.add(tempCP.get(i));
}
}
return;
}
currentAttempt = attempts.get(nextAttemptId);
currentAttemptStopCriticalPathTime = startCriticalPathTime;
}
}
}
use of org.apache.tez.history.parser.datamodel.VertexInfo in project tez by apache.
the class OneOnOneEdgeAnalyzer method analyze.
@Override
public void analyze(DagInfo dagInfo) throws TezException {
for (VertexInfo v : dagInfo.getVertices()) {
for (EdgeInfo e : v.getOutputEdges()) {
if (e.getDataMovementType() != null && e.getDataMovementType().equals(ONE_TO_ONE)) {
LOG.info("Src --> Dest : {} --> {}", e.getSourceVertex(), e.getDestinationVertex());
VertexInfo sourceVertex = e.getSourceVertex();
VertexInfo destinationVertex = e.getDestinationVertex();
Map<Integer, String> sourceTaskToContainerMap = getContainerMappingForVertex(sourceVertex);
Map<Integer, String> downStreamTaskToContainerMap = getContainerMappingForVertex(destinationVertex);
int missedCounter = 0;
List<String> result = Lists.newLinkedList();
for (Map.Entry<Integer, String> entry : sourceTaskToContainerMap.entrySet()) {
Integer taskId = entry.getKey();
String sourceContainerHost = entry.getValue();
// check on downstream vertex.
String downstreamContainerHost = downStreamTaskToContainerMap.get(taskId);
if (downstreamContainerHost != null) {
if (!sourceContainerHost.equalsIgnoreCase(downstreamContainerHost)) {
// downstream task got scheduled on different machine than src
LOG.info("TaskID: {}, source: {}, downStream:{}", taskId, sourceContainerHost, downstreamContainerHost);
result.add(sourceVertex.getVertexName());
result.add(destinationVertex.getVertexName());
result.add(taskId + "");
result.add(sourceContainerHost);
result.add(downstreamContainerHost);
csvResult.addRecord(result.toArray(new String[result.size()]));
missedCounter++;
}
}
result.clear();
}
LOG.info("Total tasks:{}, miss: {}", sourceTaskToContainerMap.size(), missedCounter);
}
}
}
}
use of org.apache.tez.history.parser.datamodel.VertexInfo in project tez by apache.
the class SkewAnalyzer method analyzeReducers.
private void analyzeReducers(DagInfo dagInfo) {
for (VertexInfo vertexInfo : dagInfo.getVertices()) {
for (TaskAttemptInfo attemptInfo : vertexInfo.getTaskAttempts()) {
analyzeGroupSkewPerSource(attemptInfo);
analyzeRecordSkewPerSource(attemptInfo);
analyzeForParallelism(attemptInfo);
}
}
}
use of org.apache.tez.history.parser.datamodel.VertexInfo in project tez by apache.
the class SlowTaskIdentifier method analyze.
@Override
public void analyze(DagInfo dagInfo) throws TezException {
List<TaskAttemptInfo> taskAttempts = Lists.newArrayList();
for (VertexInfo vertexInfo : dagInfo.getVertices()) {
taskAttempts.addAll(vertexInfo.getTaskAttempts());
}
// sort them by runtime in descending order
Collections.sort(taskAttempts, new Comparator<TaskAttemptInfo>() {
@Override
public int compare(TaskAttemptInfo o1, TaskAttemptInfo o2) {
return (o1.getTimeTaken() > o2.getTimeTaken()) ? -1 : ((o1.getTimeTaken() == o2.getTimeTaken()) ? 0 : 1);
}
});
int limit = Math.min(taskAttempts.size(), Math.max(0, config.getInt(NO_OF_TASKS, NO_OF_TASKS_DEFAULT)));
if (limit == 0) {
return;
}
for (int i = 0; i < limit - 1; i++) {
List<String> record = Lists.newLinkedList();
record.add(taskAttempts.get(i).getTaskInfo().getVertexInfo().getVertexName());
record.add(taskAttempts.get(i).getTaskAttemptId());
record.add(taskAttempts.get(i).getContainer().getHost());
record.add(taskAttempts.get(i).getTimeTaken() + "");
record.add(taskAttempts.get(i).getStatus());
record.add(taskAttempts.get(i).getDiagnostics());
record.add(taskAttempts.get(i).getTaskInfo().getVertexInfo().getInputEdges().size() + "");
csvResult.addRecord(record.toArray(new String[record.size()]));
}
}
use of org.apache.tez.history.parser.datamodel.VertexInfo in project tez by apache.
the class TaskConcurrencyAnalyzer method analyze.
@Override
public void analyze(DagInfo dagInfo) throws TezException {
// For each vertex find the concurrent tasks running at any point
for (VertexInfo vertexInfo : dagInfo.getVertices()) {
List<TaskAttemptInfo> taskAttempts = Lists.newLinkedList(vertexInfo.getTaskAttempts(true, null));
String vertexName = vertexInfo.getVertexName();
/**
* - Get sorted multi-set of timestamps (S1, S2,...E1, E2..). Possible to have multiple
* tasks starting/ending at same time.
* - Walk through the set
* - Increment concurrent tasks when start event is encountered
* - Decrement concurrent tasks when start event is encountered
*/
TreeMultiset<TimeInfo> timeInfoSet = TreeMultiset.create(new Comparator<TimeInfo>() {
@Override
public int compare(TimeInfo o1, TimeInfo o2) {
if (o1.timestamp < o2.timestamp) {
return -1;
}
if (o1.timestamp > o2.timestamp) {
return 1;
}
if (o1.timestamp == o2.timestamp) {
// check event type
if (o1.eventType.equals(o2.eventType)) {
return 0;
}
if (o1.eventType.equals(EventType.START) && o2.eventType.equals(EventType.FINISH)) {
return -1;
} else {
return 1;
}
}
return 0;
}
});
for (TaskAttemptInfo attemptInfo : taskAttempts) {
TimeInfo startTimeInfo = new TimeInfo(EventType.START, attemptInfo.getStartTime());
TimeInfo stopTimeInfo = new TimeInfo(EventType.FINISH, attemptInfo.getFinishTime());
timeInfoSet.add(startTimeInfo);
timeInfoSet.add(stopTimeInfo);
}
// Compute concurrent tasks in the list now.
int concurrentTasks = 0;
for (TimeInfo timeInfo : timeInfoSet.elementSet()) {
switch(timeInfo.eventType) {
case START:
concurrentTasks += timeInfoSet.count(timeInfo);
break;
case FINISH:
concurrentTasks -= timeInfoSet.count(timeInfo);
break;
default:
break;
}
timeInfo.concurrentTasks = concurrentTasks;
addToResult(vertexName, timeInfo.timestamp, timeInfo.concurrentTasks);
}
}
}
Aggregations