Search in sources :

Example 86 with YarnRuntimeException

use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.

the class TimelineCollectorManager method putIfAbsent.

/**
   * Put the collector into the collection if an collector mapped by id does
   * not exist.
   *
   * @param appId Application Id for which collector needs to be put.
   * @param collector timeline collector to be put.
   * @throws YarnRuntimeException if there  was any exception in initializing
   *                              and starting the app level service
   * @return the collector associated with id after the potential put.
   */
public TimelineCollector putIfAbsent(ApplicationId appId, TimelineCollector collector) {
    TimelineCollector collectorInTable = null;
    synchronized (collectors) {
        collectorInTable = collectors.get(appId);
        if (collectorInTable == null) {
            try {
                // initialize, start, and add it to the collection so it can be
                // cleaned up when the parent shuts down
                collector.init(getConfig());
                collector.setWriter(writer);
                collector.start();
                collectors.put(appId, collector);
                LOG.info("the collector for " + appId + " was added");
                collectorInTable = collector;
                postPut(appId, collectorInTable);
            } catch (Exception e) {
                throw new YarnRuntimeException(e);
            }
        } else {
            LOG.info("the collector for " + appId + " already exists!");
        }
    }
    return collectorInTable;
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException)

Example 87 with YarnRuntimeException

use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.

the class TimelineCollectorManager method createTimelineWriter.

private TimelineWriter createTimelineWriter(final Configuration conf) {
    String timelineWriterClassName = conf.get(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WRITER_CLASS);
    LOG.info("Using TimelineWriter: " + timelineWriterClassName);
    try {
        Class<?> timelineWriterClazz = Class.forName(timelineWriterClassName);
        if (TimelineWriter.class.isAssignableFrom(timelineWriterClazz)) {
            return (TimelineWriter) ReflectionUtils.newInstance(timelineWriterClazz, conf);
        } else {
            throw new YarnRuntimeException("Class: " + timelineWriterClassName + " not instance of " + TimelineWriter.class.getCanonicalName());
        }
    } catch (ClassNotFoundException e) {
        throw new YarnRuntimeException("Could not instantiate TimelineWriter: " + timelineWriterClassName, e);
    }
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) TimelineWriter(org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter)

Example 88 with YarnRuntimeException

use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.

the class NodeTimelineCollectorManager method doPostPut.

@Override
protected void doPostPut(ApplicationId appId, TimelineCollector collector) {
    try {
        // Get context info from NM
        updateTimelineCollectorContext(appId, collector);
        // Report to NM if a new collector is added.
        reportNewCollectorToNM(appId);
    } catch (YarnException | IOException e) {
        // throw exception here as it cannot be used if failed communicate with NM
        LOG.error("Failed to communicate with NM Collector Service for " + appId);
        throw new YarnRuntimeException(e);
    }
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException)

Example 89 with YarnRuntimeException

use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.

the class ApplicationHistoryServer method serviceInit.

@Override
protected void serviceInit(Configuration conf) throws Exception {
    // do security login first.
    try {
        doSecureLogin(conf);
    } catch (IOException ie) {
        throw new YarnRuntimeException("Failed to login", ie);
    }
    // init timeline services
    timelineStore = createTimelineStore(conf);
    addIfService(timelineStore);
    secretManagerService = createTimelineDelegationTokenSecretManagerService(conf);
    addService(secretManagerService);
    timelineDataManager = createTimelineDataManager(conf);
    addService(timelineDataManager);
    // init generic history service afterwards
    aclsManager = createApplicationACLsManager(conf);
    historyManager = createApplicationHistoryManager(conf);
    ahsClientService = createApplicationHistoryClientService(historyManager);
    addService(ahsClientService);
    addService((Service) historyManager);
    DefaultMetricsSystem.initialize("ApplicationHistoryServer");
    JvmMetrics jm = JvmMetrics.initSingleton("ApplicationHistoryServer", null);
    pauseMonitor = new JvmPauseMonitor();
    addService(pauseMonitor);
    jm.setPauseMonitor(pauseMonitor);
    super.serviceInit(conf);
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) JvmMetrics(org.apache.hadoop.metrics2.source.JvmMetrics) IOException(java.io.IOException) JvmPauseMonitor(org.apache.hadoop.util.JvmPauseMonitor)

Example 90 with YarnRuntimeException

use of org.apache.hadoop.yarn.exceptions.YarnRuntimeException in project hadoop by apache.

the class ApplicationHistoryServer method startWebApp.

@SuppressWarnings("unchecked")
private void startWebApp() {
    Configuration conf = getConfig();
    TimelineAuthenticationFilter.setTimelineDelegationTokenSecretManager(secretManagerService.getTimelineDelegationTokenSecretManager());
    // Always load pseudo authentication filter to parse "user.name" in an URL
    // to identify a HTTP request's user in insecure mode.
    // When Kerberos authentication type is set (i.e., secure mode is turned on),
    // the customized filter will be loaded by the timeline server to do Kerberos
    // + DT authentication.
    String initializers = conf.get("hadoop.http.filter.initializers");
    boolean modifiedInitializers = false;
    initializers = initializers == null || initializers.length() == 0 ? "" : initializers;
    if (!initializers.contains(CrossOriginFilterInitializer.class.getName())) {
        if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED, YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) {
            if (initializers.contains(HttpCrossOriginFilterInitializer.class.getName())) {
                initializers = initializers.replaceAll(HttpCrossOriginFilterInitializer.class.getName(), CrossOriginFilterInitializer.class.getName());
            } else {
                if (initializers.length() != 0) {
                    initializers += ",";
                }
                initializers += CrossOriginFilterInitializer.class.getName();
            }
            modifiedInitializers = true;
        }
    }
    if (!initializers.contains(TimelineAuthenticationFilterInitializer.class.getName())) {
        if (initializers.length() != 0) {
            initializers += ",";
        }
        initializers += TimelineAuthenticationFilterInitializer.class.getName();
        modifiedInitializers = true;
    }
    String[] parts = initializers.split(",");
    ArrayList<String> target = new ArrayList<String>();
    for (String filterInitializer : parts) {
        filterInitializer = filterInitializer.trim();
        if (filterInitializer.equals(AuthenticationFilterInitializer.class.getName())) {
            modifiedInitializers = true;
            continue;
        }
        target.add(filterInitializer);
    }
    String actualInitializers = org.apache.commons.lang.StringUtils.join(target, ",");
    if (modifiedInitializers) {
        conf.set("hadoop.http.filter.initializers", actualInitializers);
    }
    String bindAddress = WebAppUtils.getWebAppBindURL(conf, YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, WebAppUtils.getAHSWebAppURLWithoutScheme(conf));
    try {
        AHSWebApp ahsWebApp = new AHSWebApp(timelineDataManager, ahsClientService);
        webApp = WebApps.$for("applicationhistory", ApplicationHistoryClientService.class, ahsClientService, "ws").with(conf).withAttribute(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, conf.get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS)).withCSRFProtection(YarnConfiguration.TIMELINE_CSRF_PREFIX).withXFSProtection(YarnConfiguration.TIMELINE_XFS_PREFIX).at(bindAddress).build(ahsWebApp);
        HttpServer2 httpServer = webApp.httpServer();
        String[] names = conf.getTrimmedStrings(YarnConfiguration.TIMELINE_SERVICE_UI_NAMES);
        WebAppContext webAppContext = httpServer.getWebAppContext();
        for (String name : names) {
            String webPath = conf.get(YarnConfiguration.TIMELINE_SERVICE_UI_WEB_PATH_PREFIX + name);
            String onDiskPath = conf.get(YarnConfiguration.TIMELINE_SERVICE_UI_ON_DISK_PATH_PREFIX + name);
            WebAppContext uiWebAppContext = new WebAppContext();
            uiWebAppContext.setContextPath(webPath);
            if (onDiskPath.endsWith(".war")) {
                uiWebAppContext.setWar(onDiskPath);
            } else {
                uiWebAppContext.setResourceBase(onDiskPath);
            }
            final String[] ALL_URLS = { "/*" };
            FilterHolder[] filterHolders = webAppContext.getServletHandler().getFilters();
            for (FilterHolder filterHolder : filterHolders) {
                if (!"guice".equals(filterHolder.getName())) {
                    HttpServer2.defineFilter(uiWebAppContext, filterHolder.getName(), filterHolder.getClassName(), filterHolder.getInitParameters(), ALL_URLS);
                }
            }
            LOG.info("Hosting " + name + " from " + onDiskPath + " at " + webPath);
            httpServer.addHandlerAtFront(uiWebAppContext);
        }
        httpServer.start();
        conf.updateConnectAddr(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS, this.getListenerAddress());
        LOG.info("Instantiating AHSWebApp at " + getPort());
    } catch (Exception e) {
        String msg = "AHSWebApp failed to start.";
        LOG.error(msg, e);
        throw new YarnRuntimeException(msg, e);
    }
}
Also used : HttpCrossOriginFilterInitializer(org.apache.hadoop.security.HttpCrossOriginFilterInitializer) FilterHolder(org.eclipse.jetty.servlet.FilterHolder) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) TimelineAuthenticationFilterInitializer(org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer) AuthenticationFilterInitializer(org.apache.hadoop.security.AuthenticationFilterInitializer) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) WebAppContext(org.eclipse.jetty.webapp.WebAppContext) CrossOriginFilterInitializer(org.apache.hadoop.yarn.server.timeline.webapp.CrossOriginFilterInitializer) HttpCrossOriginFilterInitializer(org.apache.hadoop.security.HttpCrossOriginFilterInitializer) AHSWebApp(org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp) TimelineAuthenticationFilterInitializer(org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer) HttpServer2(org.apache.hadoop.http.HttpServer2)

Aggregations

YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)147 IOException (java.io.IOException)56 Configuration (org.apache.hadoop.conf.Configuration)38 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)28 Test (org.junit.Test)28 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)17 InetSocketAddress (java.net.InetSocketAddress)12 Path (org.apache.hadoop.fs.Path)12 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)9 InvocationTargetException (java.lang.reflect.InvocationTargetException)8 Server (org.apache.hadoop.ipc.Server)8 FileSystem (org.apache.hadoop.fs.FileSystem)7 FsPermission (org.apache.hadoop.fs.permission.FsPermission)7 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)7 FileNotFoundException (java.io.FileNotFoundException)6 ArrayList (java.util.ArrayList)6 HashMap (java.util.HashMap)6 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)6 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)6 ConnectException (java.net.ConnectException)5