use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.
the class TimelineCollectorManager method putIfAbsent.
/**
* Put the collector into the collection if an collector mapped by id does
* not exist.
*
* @param appId Application Id for which collector needs to be put.
* @param collector timeline collector to be put.
* @throws YarnRuntimeException if there was any exception in initializing
* and starting the app level service
* @return the collector associated with id after the potential put.
*/
public TimelineCollector putIfAbsent(ApplicationId appId, TimelineCollector collector) {
TimelineCollector collectorInTable = null;
synchronized (collectors) {
collectorInTable = collectors.get(appId);
if (collectorInTable == null) {
try {
// initialize, start, and add it to the collection so it can be
// cleaned up when the parent shuts down
collector.init(getConfig());
collector.setWriter(writer);
collector.start();
collectors.put(appId, collector);
LOG.info("the collector for " + appId + " was added");
collectorInTable = collector;
postPut(appId, collectorInTable);
} catch (Exception e) {
throw new YarnRuntimeException(e);
}
} else {
LOG.info("the collector for " + appId + " already exists!");
}
}
return collectorInTable;
}
use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.
the class TimelineCollectorManager method createTimelineWriter.
private TimelineWriter createTimelineWriter(final Configuration conf) {
String timelineWriterClassName = conf.get(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WRITER_CLASS);
LOG.info("Using TimelineWriter: " + timelineWriterClassName);
try {
Class<?> timelineWriterClazz = Class.forName(timelineWriterClassName);
if (TimelineWriter.class.isAssignableFrom(timelineWriterClazz)) {
return (TimelineWriter) ReflectionUtils.newInstance(timelineWriterClazz, conf);
} else {
throw new YarnRuntimeException("Class: " + timelineWriterClassName + " not instance of " + TimelineWriter.class.getCanonicalName());
}
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException("Could not instantiate TimelineWriter: " + timelineWriterClassName, e);
}
}
use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.
the class NodeTimelineCollectorManager method doPostPut.
@Override
protected void doPostPut(ApplicationId appId, TimelineCollector collector) {
try {
// Get context info from NM
updateTimelineCollectorContext(appId, collector);
// Report to NM if a new collector is added.
reportNewCollectorToNM(appId);
} catch (YarnException | IOException e) {
// throw exception here as it cannot be used if failed communicate with NM
LOG.error("Failed to communicate with NM Collector Service for " + appId);
throw new YarnRuntimeException(e);
}
}
use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.
the class ApplicationHistoryServer method serviceInit.
@Override
protected void serviceInit(Configuration conf) throws Exception {
// do security login first.
try {
doSecureLogin(conf);
} catch (IOException ie) {
throw new YarnRuntimeException("Failed to login", ie);
}
// init timeline services
timelineStore = createTimelineStore(conf);
addIfService(timelineStore);
secretManagerService = createTimelineDelegationTokenSecretManagerService(conf);
addService(secretManagerService);
timelineDataManager = createTimelineDataManager(conf);
addService(timelineDataManager);
// init generic history service afterwards
aclsManager = createApplicationACLsManager(conf);
historyManager = createApplicationHistoryManager(conf);
ahsClientService = createApplicationHistoryClientService(historyManager);
addService(ahsClientService);
addService((Service) historyManager);
DefaultMetricsSystem.initialize("ApplicationHistoryServer");
JvmMetrics jm = JvmMetrics.initSingleton("ApplicationHistoryServer", null);
pauseMonitor = new JvmPauseMonitor();
addService(pauseMonitor);
jm.setPauseMonitor(pauseMonitor);
super.serviceInit(conf);
}
use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.
the class ApplicationHistoryServer method startWebApp.
@SuppressWarnings("unchecked")
private void startWebApp() {
Configuration conf = getConfig();
TimelineAuthenticationFilter.setTimelineDelegationTokenSecretManager(secretManagerService.getTimelineDelegationTokenSecretManager());
// Always load pseudo authentication filter to parse "user.name" in an URL
// to identify a HTTP request's user in insecure mode.
// When Kerberos authentication type is set (i.e., secure mode is turned on),
// the customized filter will be loaded by the timeline server to do Kerberos
// + DT authentication.
String initializers = conf.get("hadoop.http.filter.initializers");
boolean modifiedInitializers = false;
initializers = initializers == null || initializers.length() == 0 ? "" : initializers;
if (!initializers.contains(CrossOriginFilterInitializer.class.getName())) {
if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED, YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) {
if (initializers.contains(HttpCrossOriginFilterInitializer.class.getName())) {
initializers = initializers.replaceAll(HttpCrossOriginFilterInitializer.class.getName(), CrossOriginFilterInitializer.class.getName());
} else {
if (initializers.length() != 0) {
initializers += ",";
}
initializers += CrossOriginFilterInitializer.class.getName();
}
modifiedInitializers = true;
}
}
if (!initializers.contains(TimelineAuthenticationFilterInitializer.class.getName())) {
if (initializers.length() != 0) {
initializers += ",";
}
initializers += TimelineAuthenticationFilterInitializer.class.getName();
modifiedInitializers = true;
}
String[] parts = initializers.split(",");
ArrayList<String> target = new ArrayList<String>();
for (String filterInitializer : parts) {
filterInitializer = filterInitializer.trim();
if (filterInitializer.equals(AuthenticationFilterInitializer.class.getName())) {
modifiedInitializers = true;
continue;
}
target.add(filterInitializer);
}
String actualInitializers = org.apache.commons.lang.StringUtils.join(target, ",");
if (modifiedInitializers) {
conf.set("hadoop.http.filter.initializers", actualInitializers);
}
String bindAddress = WebAppUtils.getWebAppBindURL(conf, YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, WebAppUtils.getAHSWebAppURLWithoutScheme(conf));
try {
AHSWebApp ahsWebApp = new AHSWebApp(timelineDataManager, ahsClientService);
webApp = WebApps.$for("applicationhistory", ApplicationHistoryClientService.class, ahsClientService, "ws").with(conf).withAttribute(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, conf.get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS)).withCSRFProtection(YarnConfiguration.TIMELINE_CSRF_PREFIX).withXFSProtection(YarnConfiguration.TIMELINE_XFS_PREFIX).at(bindAddress).build(ahsWebApp);
HttpServer2 httpServer = webApp.httpServer();
String[] names = conf.getTrimmedStrings(YarnConfiguration.TIMELINE_SERVICE_UI_NAMES);
WebAppContext webAppContext = httpServer.getWebAppContext();
for (String name : names) {
String webPath = conf.get(YarnConfiguration.TIMELINE_SERVICE_UI_WEB_PATH_PREFIX + name);
String onDiskPath = conf.get(YarnConfiguration.TIMELINE_SERVICE_UI_ON_DISK_PATH_PREFIX + name);
WebAppContext uiWebAppContext = new WebAppContext();
uiWebAppContext.setContextPath(webPath);
if (onDiskPath.endsWith(".war")) {
uiWebAppContext.setWar(onDiskPath);
} else {
uiWebAppContext.setResourceBase(onDiskPath);
}
final String[] ALL_URLS = { "/*" };
FilterHolder[] filterHolders = webAppContext.getServletHandler().getFilters();
for (FilterHolder filterHolder : filterHolders) {
if (!"guice".equals(filterHolder.getName())) {
HttpServer2.defineFilter(uiWebAppContext, filterHolder.getName(), filterHolder.getClassName(), filterHolder.getInitParameters(), ALL_URLS);
}
}
LOG.info("Hosting " + name + " from " + onDiskPath + " at " + webPath);
httpServer.addHandlerAtFront(uiWebAppContext);
}
httpServer.start();
conf.updateConnectAddr(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS, this.getListenerAddress());
LOG.info("Instantiating AHSWebApp at " + getPort());
} catch (Exception e) {
String msg = "AHSWebApp failed to start.";
LOG.error(msg, e);
throw new YarnRuntimeException(msg, e);
}
}
Aggregations