use of org.apache.hadoop.yarn.api.records.ApplicationAttemptReport in project hadoop by apache.
the class TestYarnClient method testGetApplicationAttempt.
@Test(timeout = 10000)
public void testGetApplicationAttempt() throws YarnException, IOException {
Configuration conf = new Configuration();
final YarnClient client = new MockYarnClient();
client.init(conf);
client.start();
List<ApplicationReport> expectedReports = ((MockYarnClient) client).getReports();
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(applicationId, 1);
ApplicationAttemptReport report = client.getApplicationAttemptReport(appAttemptId);
Assert.assertNotNull(report);
Assert.assertEquals(report.getApplicationAttemptId().toString(), expectedReports.get(0).getCurrentApplicationAttemptId().toString());
client.stop();
}
use of org.apache.hadoop.yarn.api.records.ApplicationAttemptReport in project hadoop by apache.
the class ClientRMService method getApplicationAttempts.
@Override
public GetApplicationAttemptsResponse getApplicationAttempts(GetApplicationAttemptsRequest request) throws YarnException, IOException {
ApplicationId appId = request.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(appId);
if (application == null) {
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '" + appId + "' doesn't exist in RM. Please check that the job submission " + "was successful.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(), ApplicationAccessType.VIEW_APP, application);
GetApplicationAttemptsResponse response = null;
if (allowAccess) {
Map<ApplicationAttemptId, RMAppAttempt> attempts = application.getAppAttempts();
List<ApplicationAttemptReport> listAttempts = new ArrayList<ApplicationAttemptReport>();
Iterator<Map.Entry<ApplicationAttemptId, RMAppAttempt>> iter = attempts.entrySet().iterator();
while (iter.hasNext()) {
listAttempts.add(iter.next().getValue().createApplicationAttemptReport());
}
response = GetApplicationAttemptsResponse.newInstance(listAttempts);
} else {
throw new YarnException("User " + callerUGI.getShortUserName() + " does not have privilege to see this application " + appId);
}
return response;
}
use of org.apache.hadoop.yarn.api.records.ApplicationAttemptReport in project hadoop by apache.
the class ClientRMService method getApplicationAttemptReport.
@Override
public GetApplicationAttemptReportResponse getApplicationAttemptReport(GetApplicationAttemptReportRequest request) throws YarnException, IOException {
ApplicationAttemptId appAttemptId = request.getApplicationAttemptId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(appAttemptId.getApplicationId());
if (application == null) {
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '" + request.getApplicationAttemptId().getApplicationId() + "' doesn't exist in RM. Please check that the job " + "submission was successful.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(), ApplicationAccessType.VIEW_APP, application);
GetApplicationAttemptReportResponse response = null;
if (allowAccess) {
RMAppAttempt appAttempt = application.getAppAttempts().get(appAttemptId);
if (appAttempt == null) {
throw new ApplicationAttemptNotFoundException("ApplicationAttempt with id '" + appAttemptId + "' doesn't exist in RM.");
}
ApplicationAttemptReport attemptReport = appAttempt.createApplicationAttemptReport();
response = GetApplicationAttemptReportResponse.newInstance(attemptReport);
} else {
throw new YarnException("User " + callerUGI.getShortUserName() + " does not have privilege to see this attempt " + appAttemptId);
}
return response;
}
use of org.apache.hadoop.yarn.api.records.ApplicationAttemptReport in project hadoop by apache.
the class RMAppBlock method generateApplicationTable.
@Override
protected void generateApplicationTable(Block html, UserGroupInformation callerUGI, Collection<ApplicationAttemptReport> attempts) {
// Application Attempt Table
Hamlet.TBODY<Hamlet.TABLE<Hamlet>> tbody = html.table("#attempts").thead().tr().th(".id", "Attempt ID").th(".started", "Started").th(".node", "Node").th(".logs", "Logs").th(".appBlacklistednodes", "Nodes blacklisted by the application", "Nodes blacklisted by the app").th(".rmBlacklistednodes", "Nodes blacklisted by the RM for the" + " app", "Nodes blacklisted by the system")._()._().tbody();
RMApp rmApp = this.rm.getRMContext().getRMApps().get(this.appID);
if (rmApp == null) {
return;
}
StringBuilder attemptsTableData = new StringBuilder("[\n");
for (final ApplicationAttemptReport appAttemptReport : attempts) {
RMAppAttempt rmAppAttempt = rmApp.getRMAppAttempt(appAttemptReport.getApplicationAttemptId());
if (rmAppAttempt == null) {
continue;
}
AppAttemptInfo attemptInfo = new AppAttemptInfo(this.rm, rmAppAttempt, rmApp.getUser(), WebAppUtils.getHttpSchemePrefix(conf));
Set<String> nodes = rmAppAttempt.getBlacklistedNodes();
// nodes which are blacklisted by the application
String appBlacklistedNodesCount = String.valueOf(nodes.size());
// nodes which are blacklisted by the RM for AM launches
String rmBlacklistedNodesCount = String.valueOf(rmAppAttempt.getAMBlacklistManager().getBlacklistUpdates().getBlacklistAdditions().size());
String nodeLink = attemptInfo.getNodeHttpAddress();
if (nodeLink != null) {
nodeLink = WebAppUtils.getHttpSchemePrefix(conf) + nodeLink;
}
String logsLink = attemptInfo.getLogsLink();
attemptsTableData.append("[\"<a href='").append(url("appattempt", rmAppAttempt.getAppAttemptId().toString())).append("'>").append(String.valueOf(rmAppAttempt.getAppAttemptId())).append("</a>\",\"").append(attemptInfo.getStartTime()).append("\",\"<a ").append(nodeLink == null ? "#" : "href='" + nodeLink).append("'>").append(nodeLink == null ? "N/A" : StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(nodeLink))).append("</a>\",\"<a ").append(logsLink == null ? "#" : "href='" + logsLink).append("'>").append(logsLink == null ? "N/A" : "Logs").append("</a>\",").append("\"").append(appBlacklistedNodesCount).append("\",").append("\"").append(rmBlacklistedNodesCount).append("\"],\n");
}
if (attemptsTableData.charAt(attemptsTableData.length() - 2) == ',') {
attemptsTableData.delete(attemptsTableData.length() - 2, attemptsTableData.length() - 1);
}
attemptsTableData.append("]");
html.script().$type("text/javascript")._("var attemptsTableData=" + attemptsTableData)._();
tbody._()._();
}
use of org.apache.hadoop.yarn.api.records.ApplicationAttemptReport in project hadoop by apache.
the class TestApplicationHistoryClientService method testApplicationAttemptReport.
@Test
public void testApplicationAttemptReport() throws IOException, YarnException {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
GetApplicationAttemptReportRequest request = GetApplicationAttemptReportRequest.newInstance(appAttemptId);
GetApplicationAttemptReportResponse response = clientService.getApplicationAttemptReport(request);
ApplicationAttemptReport attemptReport = response.getApplicationAttemptReport();
Assert.assertNotNull(attemptReport);
Assert.assertEquals("appattempt_0_0001_000001", attemptReport.getApplicationAttemptId().toString());
}
Aggregations