use of java.security.PrivilegedExceptionAction in project hadoop by apache.
the class TestApplicationHistoryManagerOnTimelineStore method testGetApplicationReportWithNotAttempt.
@Test
public void testGetApplicationReportWithNotAttempt() throws Exception {
final ApplicationId appId = ApplicationId.newInstance(0, SCALE + 1);
ApplicationReport app;
if (callerUGI == null) {
app = historyManager.getApplication(appId);
} else {
app = callerUGI.doAs(new PrivilegedExceptionAction<ApplicationReport>() {
@Override
public ApplicationReport run() throws Exception {
return historyManager.getApplication(appId);
}
});
}
Assert.assertNotNull(app);
Assert.assertEquals(appId, app.getApplicationId());
Assert.assertEquals(ApplicationAttemptId.newInstance(appId, -1), app.getCurrentApplicationAttemptId());
}
use of java.security.PrivilegedExceptionAction in project hadoop by apache.
the class FSDownload method call.
@Override
public Path call() throws Exception {
final Path sCopy;
try {
sCopy = resource.getResource().toPath();
} catch (URISyntaxException e) {
throw new IOException("Invalid resource", e);
}
createDir(destDirPath, cachePerms);
final Path dst_work = new Path(destDirPath + "_tmp");
createDir(dst_work, cachePerms);
Path dFinal = files.makeQualified(new Path(dst_work, sCopy.getName()));
try {
Path dTmp = null == userUgi ? files.makeQualified(copy(sCopy, dst_work)) : userUgi.doAs(new PrivilegedExceptionAction<Path>() {
public Path run() throws Exception {
return files.makeQualified(copy(sCopy, dst_work));
}
;
});
unpack(new File(dTmp.toUri()), new File(dFinal.toUri()));
changePermissions(dFinal.getFileSystem(conf), dFinal);
files.rename(dst_work, destDirPath, Rename.OVERWRITE);
} catch (Exception e) {
try {
files.delete(destDirPath, true);
} catch (IOException ignore) {
}
throw e;
} finally {
try {
files.delete(dst_work, true);
} catch (FileNotFoundException ignore) {
}
conf = null;
resource = null;
}
return files.makeQualified(new Path(destDirPath, sCopy.getName()));
}
use of java.security.PrivilegedExceptionAction in project hadoop by apache.
the class AppBlock method generateApplicationTable.
protected void generateApplicationTable(Block html, UserGroupInformation callerUGI, Collection<ApplicationAttemptReport> attempts) {
// Application Attempt Table
TBODY<TABLE<Hamlet>> tbody = html.table("#attempts").thead().tr().th(".id", "Attempt ID").th(".started", "Started").th(".node", "Node").th(".logs", "Logs")._()._().tbody();
StringBuilder attemptsTableData = new StringBuilder("[\n");
for (final ApplicationAttemptReport appAttemptReport : attempts) {
AppAttemptInfo appAttempt = new AppAttemptInfo(appAttemptReport);
ContainerReport containerReport;
try {
final GetContainerReportRequest request = GetContainerReportRequest.newInstance(appAttemptReport.getAMContainerId());
if (callerUGI == null) {
containerReport = appBaseProt.getContainerReport(request).getContainerReport();
} else {
containerReport = callerUGI.doAs(new PrivilegedExceptionAction<ContainerReport>() {
@Override
public ContainerReport run() throws Exception {
ContainerReport report = null;
if (request.getContainerId() != null) {
try {
report = appBaseProt.getContainerReport(request).getContainerReport();
} catch (ContainerNotFoundException ex) {
LOG.warn(ex.getMessage());
}
}
return report;
}
});
}
} catch (Exception e) {
String message = "Failed to read the AM container of the application attempt " + appAttemptReport.getApplicationAttemptId() + ".";
LOG.error(message, e);
html.p()._(message)._();
return;
}
long startTime = 0L;
String logsLink = null;
String nodeLink = null;
if (containerReport != null) {
ContainerInfo container = new ContainerInfo(containerReport);
startTime = container.getStartedTime();
logsLink = containerReport.getLogUrl();
nodeLink = containerReport.getNodeHttpAddress();
}
attemptsTableData.append("[\"<a href='").append(url("appattempt", appAttempt.getAppAttemptId())).append("'>").append(appAttempt.getAppAttemptId()).append("</a>\",\"").append(startTime).append("\",\"<a ").append(nodeLink == null ? "#" : "href='" + nodeLink).append("'>").append(nodeLink == null ? "N/A" : StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(nodeLink))).append("</a>\",\"<a ").append(logsLink == null ? "#" : "href='" + logsLink).append("'>").append(logsLink == null ? "N/A" : "Logs").append("</a>\"],\n");
}
if (attemptsTableData.charAt(attemptsTableData.length() - 2) == ',') {
attemptsTableData.delete(attemptsTableData.length() - 2, attemptsTableData.length() - 1);
}
attemptsTableData.append("]");
html.script().$type("text/javascript")._("var attemptsTableData=" + attemptsTableData)._();
tbody._()._();
}
use of java.security.PrivilegedExceptionAction in project hadoop by apache.
the class AppsBlock method fetchData.
protected void fetchData() throws YarnException, IOException, InterruptedException {
reqAppStates = EnumSet.noneOf(YarnApplicationState.class);
String reqStateString = $(APP_STATE);
if (reqStateString != null && !reqStateString.isEmpty()) {
String[] appStateStrings = reqStateString.split(",");
for (String stateString : appStateStrings) {
reqAppStates.add(YarnApplicationState.valueOf(stateString.trim()));
}
}
callerUGI = getCallerUGI();
final GetApplicationsRequest request = GetApplicationsRequest.newInstance(reqAppStates);
String appsNumStr = $(APPS_NUM);
if (appsNumStr != null && !appsNumStr.isEmpty()) {
long appsNum = Long.parseLong(appsNumStr);
request.setLimit(appsNum);
}
String appStartedTimeBegainStr = $(APP_START_TIME_BEGIN);
long appStartedTimeBegain = 0;
if (appStartedTimeBegainStr != null && !appStartedTimeBegainStr.isEmpty()) {
appStartedTimeBegain = Long.parseLong(appStartedTimeBegainStr);
if (appStartedTimeBegain < 0) {
throw new BadRequestException("app.started-time.begin must be greater than 0");
}
}
String appStartedTimeEndStr = $(APP_START_TIME_END);
long appStartedTimeEnd = Long.MAX_VALUE;
if (appStartedTimeEndStr != null && !appStartedTimeEndStr.isEmpty()) {
appStartedTimeEnd = Long.parseLong(appStartedTimeEndStr);
if (appStartedTimeEnd < 0) {
throw new BadRequestException("app.started-time.end must be greater than 0");
}
}
if (appStartedTimeBegain > appStartedTimeEnd) {
throw new BadRequestException("app.started-time.end must be greater than app.started-time.begin");
}
request.setStartRange(new LongRange(appStartedTimeBegain, appStartedTimeEnd));
if (callerUGI == null) {
appReports = appBaseProt.getApplications(request).getApplicationList();
} else {
appReports = callerUGI.doAs(new PrivilegedExceptionAction<Collection<ApplicationReport>>() {
@Override
public Collection<ApplicationReport> run() throws Exception {
return appBaseProt.getApplications(request).getApplicationList();
}
});
}
}
use of java.security.PrivilegedExceptionAction in project hadoop by apache.
the class WebServices method getContainer.
public ContainerInfo getContainer(HttpServletRequest req, HttpServletResponse res, String appId, String appAttemptId, String containerId) {
UserGroupInformation callerUGI = getUser(req);
ApplicationId aid = parseApplicationId(appId);
ApplicationAttemptId aaid = parseApplicationAttemptId(appAttemptId);
final ContainerId cid = parseContainerId(containerId);
validateIds(aid, aaid, cid);
ContainerReport container = null;
try {
if (callerUGI == null) {
GetContainerReportRequest request = GetContainerReportRequest.newInstance(cid);
container = appBaseProt.getContainerReport(request).getContainerReport();
} else {
container = callerUGI.doAs(new PrivilegedExceptionAction<ContainerReport>() {
@Override
public ContainerReport run() throws Exception {
GetContainerReportRequest request = GetContainerReportRequest.newInstance(cid);
return appBaseProt.getContainerReport(request).getContainerReport();
}
});
}
} catch (Exception e) {
rewrapAndThrowException(e);
}
if (container == null) {
throw new NotFoundException("container with id: " + containerId + " not found");
}
return new ContainerInfo(container);
}
Aggregations