use of org.apache.flink.runtime.rest.messages.JobVertexTaskManagersInfo in project flink by apache.
the class JobVertexTaskManagersHandler method createJobVertexTaskManagersInfo.
private static JobVertexTaskManagersInfo createJobVertexTaskManagersInfo(AccessExecutionJobVertex jobVertex, JobID jobID, @Nullable MetricFetcher metricFetcher) {
// Build a map that groups tasks by TaskManager
Map<String, String> taskManagerId2Host = new HashMap<>();
Map<String, List<AccessExecutionVertex>> taskManagerVertices = new HashMap<>();
for (AccessExecutionVertex vertex : jobVertex.getTaskVertices()) {
TaskManagerLocation location = vertex.getCurrentAssignedResourceLocation();
String taskManagerHost = location == null ? "(unassigned)" : location.getHostname() + ':' + location.dataPort();
String taskmanagerId = location == null ? "(unassigned)" : location.getResourceID().toString();
taskManagerId2Host.put(taskmanagerId, taskManagerHost);
List<AccessExecutionVertex> vertices = taskManagerVertices.computeIfAbsent(taskmanagerId, ignored -> new ArrayList<>(4));
vertices.add(vertex);
}
final long now = System.currentTimeMillis();
List<JobVertexTaskManagersInfo.TaskManagersInfo> taskManagersInfoList = new ArrayList<>(4);
for (Map.Entry<String, List<AccessExecutionVertex>> entry : taskManagerVertices.entrySet()) {
String taskmanagerId = entry.getKey();
String host = taskManagerId2Host.get(taskmanagerId);
List<AccessExecutionVertex> taskVertices = entry.getValue();
int[] tasksPerState = new int[ExecutionState.values().length];
long startTime = Long.MAX_VALUE;
long endTime = 0;
boolean allFinished = true;
MutableIOMetrics counts = new MutableIOMetrics();
for (AccessExecutionVertex vertex : taskVertices) {
final ExecutionState state = vertex.getExecutionState();
tasksPerState[state.ordinal()]++;
// take the earliest start time
long started = vertex.getStateTimestamp(ExecutionState.DEPLOYING);
if (started > 0) {
startTime = Math.min(startTime, started);
}
allFinished &= state.isTerminal();
endTime = Math.max(endTime, vertex.getStateTimestamp(state));
counts.addIOMetrics(vertex.getCurrentExecutionAttempt(), metricFetcher, jobID.toString(), jobVertex.getJobVertexId().toString());
}
long duration;
if (startTime < Long.MAX_VALUE) {
if (allFinished) {
duration = endTime - startTime;
} else {
endTime = -1L;
duration = now - startTime;
}
} else {
startTime = -1L;
endTime = -1L;
duration = -1L;
}
ExecutionState jobVertexState = ExecutionJobVertex.getAggregateJobVertexState(tasksPerState, taskVertices.size());
final IOMetricsInfo jobVertexMetrics = new IOMetricsInfo(counts.getNumBytesIn(), counts.isNumBytesInComplete(), counts.getNumBytesOut(), counts.isNumBytesOutComplete(), counts.getNumRecordsIn(), counts.isNumRecordsInComplete(), counts.getNumRecordsOut(), counts.isNumRecordsOutComplete());
Map<ExecutionState, Integer> statusCounts = new HashMap<>(ExecutionState.values().length);
for (ExecutionState state : ExecutionState.values()) {
statusCounts.put(state, tasksPerState[state.ordinal()]);
}
taskManagersInfoList.add(new JobVertexTaskManagersInfo.TaskManagersInfo(host, jobVertexState, startTime, endTime, duration, jobVertexMetrics, statusCounts, taskmanagerId));
}
return new JobVertexTaskManagersInfo(jobVertex.getJobVertexId(), jobVertex.getName(), now, taskManagersInfoList);
}
Aggregations