Search in sources :

Example 11 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class ApplicationHistoryServer method startWebApp.

@SuppressWarnings("unchecked")
private void startWebApp() {
    Configuration conf = getConfig();
    TimelineAuthenticationFilter.setTimelineDelegationTokenSecretManager(secretManagerService.getTimelineDelegationTokenSecretManager());
    // Always load pseudo authentication filter to parse "user.name" in an URL
    // to identify a HTTP request's user in insecure mode.
    // When Kerberos authentication type is set (i.e., secure mode is turned on),
    // the customized filter will be loaded by the timeline server to do Kerberos
    // + DT authentication.
    String initializers = conf.get("hadoop.http.filter.initializers");
    boolean modifiedInitializers = false;
    initializers = initializers == null || initializers.length() == 0 ? "" : initializers;
    if (!initializers.contains(CrossOriginFilterInitializer.class.getName())) {
        if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED, YarnConfiguration.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) {
            if (initializers.contains(HttpCrossOriginFilterInitializer.class.getName())) {
                initializers = initializers.replaceAll(HttpCrossOriginFilterInitializer.class.getName(), CrossOriginFilterInitializer.class.getName());
            } else {
                if (initializers.length() != 0) {
                    initializers += ",";
                }
                initializers += CrossOriginFilterInitializer.class.getName();
            }
            modifiedInitializers = true;
        }
    }
    if (!initializers.contains(TimelineAuthenticationFilterInitializer.class.getName())) {
        if (initializers.length() != 0) {
            initializers += ",";
        }
        initializers += TimelineAuthenticationFilterInitializer.class.getName();
        modifiedInitializers = true;
    }
    String[] parts = initializers.split(",");
    ArrayList<String> target = new ArrayList<String>();
    for (String filterInitializer : parts) {
        filterInitializer = filterInitializer.trim();
        if (filterInitializer.equals(AuthenticationFilterInitializer.class.getName())) {
            modifiedInitializers = true;
            continue;
        }
        target.add(filterInitializer);
    }
    String actualInitializers = org.apache.commons.lang.StringUtils.join(target, ",");
    if (modifiedInitializers) {
        conf.set("hadoop.http.filter.initializers", actualInitializers);
    }
    String bindAddress = WebAppUtils.getWebAppBindURL(conf, YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, WebAppUtils.getAHSWebAppURLWithoutScheme(conf));
    try {
        AHSWebApp ahsWebApp = new AHSWebApp(timelineDataManager, ahsClientService);
        webApp = WebApps.$for("applicationhistory", ApplicationHistoryClientService.class, ahsClientService, "ws").with(conf).withAttribute(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, conf.get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS)).withCSRFProtection(YarnConfiguration.TIMELINE_CSRF_PREFIX).withXFSProtection(YarnConfiguration.TIMELINE_XFS_PREFIX).at(bindAddress).build(ahsWebApp);
        HttpServer2 httpServer = webApp.httpServer();
        String[] names = conf.getTrimmedStrings(YarnConfiguration.TIMELINE_SERVICE_UI_NAMES);
        WebAppContext webAppContext = httpServer.getWebAppContext();
        for (String name : names) {
            String webPath = conf.get(YarnConfiguration.TIMELINE_SERVICE_UI_WEB_PATH_PREFIX + name);
            String onDiskPath = conf.get(YarnConfiguration.TIMELINE_SERVICE_UI_ON_DISK_PATH_PREFIX + name);
            WebAppContext uiWebAppContext = new WebAppContext();
            uiWebAppContext.setContextPath(webPath);
            if (onDiskPath.endsWith(".war")) {
                uiWebAppContext.setWar(onDiskPath);
            } else {
                uiWebAppContext.setResourceBase(onDiskPath);
            }
            final String[] ALL_URLS = { "/*" };
            FilterHolder[] filterHolders = webAppContext.getServletHandler().getFilters();
            for (FilterHolder filterHolder : filterHolders) {
                if (!"guice".equals(filterHolder.getName())) {
                    HttpServer2.defineFilter(uiWebAppContext, filterHolder.getName(), filterHolder.getClassName(), filterHolder.getInitParameters(), ALL_URLS);
                }
            }
            LOG.info("Hosting " + name + " from " + onDiskPath + " at " + webPath);
            httpServer.addHandlerAtFront(uiWebAppContext);
        }
        httpServer.start();
        conf.updateConnectAddr(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS, this.getListenerAddress());
        LOG.info("Instantiating AHSWebApp at " + getPort());
    } catch (Exception e) {
        String msg = "AHSWebApp failed to start.";
        LOG.error(msg, e);
        throw new YarnRuntimeException(msg, e);
    }
}
Also used : HttpCrossOriginFilterInitializer(org.apache.hadoop.security.HttpCrossOriginFilterInitializer) FilterHolder(org.eclipse.jetty.servlet.FilterHolder) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) TimelineAuthenticationFilterInitializer(org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer) AuthenticationFilterInitializer(org.apache.hadoop.security.AuthenticationFilterInitializer) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) WebAppContext(org.eclipse.jetty.webapp.WebAppContext) CrossOriginFilterInitializer(org.apache.hadoop.yarn.server.timeline.webapp.CrossOriginFilterInitializer) HttpCrossOriginFilterInitializer(org.apache.hadoop.security.HttpCrossOriginFilterInitializer) AHSWebApp(org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp) TimelineAuthenticationFilterInitializer(org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer) HttpServer2(org.apache.hadoop.http.HttpServer2)

Example 12 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class TestTransferFsImage method testGetImageTimeout.

/**
   * Test to verify the read timeout
   */
@Test(timeout = 10000)
public void testGetImageTimeout() throws Exception {
    HttpServer2 testServer = HttpServerFunctionalTest.createServer("hdfs");
    try {
        testServer.addServlet("ImageTransfer", ImageServlet.PATH_SPEC, TestImageTransferServlet.class);
        testServer.start();
        URL serverURL = HttpServerFunctionalTest.getServerURL(testServer);
        TransferFsImage.timeout = 2000;
        try {
            TransferFsImage.getFileClient(serverURL, "txid=1", null, null, false);
            fail("TransferImage Should fail with timeout");
        } catch (SocketTimeoutException e) {
            assertEquals("Read should timeout", "Read timed out", e.getMessage());
        }
    } finally {
        if (testServer != null) {
            testServer.stop();
        }
    }
}
Also used : SocketTimeoutException(java.net.SocketTimeoutException) HttpServer2(org.apache.hadoop.http.HttpServer2) URL(java.net.URL) HttpServerFunctionalTest(org.apache.hadoop.http.HttpServerFunctionalTest) Test(org.junit.Test)

Example 13 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class TestJobEndNotifier method testNotificationOnLastRetryNormalShutdown.

@Test
public void testNotificationOnLastRetryNormalShutdown() throws Exception {
    HttpServer2 server = startHttpServer();
    // Act like it is the second attempt. Default max attempts is 2
    MRApp app = spy(new MRAppWithCustomContainerAllocator(2, 2, true, this.getClass().getName(), true, 2, true));
    doNothing().when(app).sysexit();
    JobConf conf = new JobConf();
    conf.set(JobContext.MR_JOB_END_NOTIFICATION_URL, JobEndServlet.baseUrl + "jobend?jobid=$jobId&status=$jobStatus");
    JobImpl job = (JobImpl) app.submit(conf);
    app.waitForInternalState(job, JobStateInternal.SUCCEEDED);
    // Unregistration succeeds: successfullyUnregistered is set
    app.shutDownJob();
    Assert.assertTrue(app.isLastAMRetry());
    Assert.assertEquals(1, JobEndServlet.calledTimes);
    Assert.assertEquals("jobid=" + job.getID() + "&status=SUCCEEDED", JobEndServlet.requestUri.getQuery());
    Assert.assertEquals(JobState.SUCCEEDED.toString(), JobEndServlet.foundJobState);
    server.stop();
}
Also used : JobImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl) HttpServer2(org.apache.hadoop.http.HttpServer2) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Example 14 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class TestJobEndNotifier method testAbsentNotificationOnNotLastRetryUnregistrationFailure.

@Test
public void testAbsentNotificationOnNotLastRetryUnregistrationFailure() throws Exception {
    HttpServer2 server = startHttpServer();
    MRApp app = spy(new MRAppWithCustomContainerAllocator(2, 2, false, this.getClass().getName(), true, 1, false));
    doNothing().when(app).sysexit();
    JobConf conf = new JobConf();
    conf.set(JobContext.MR_JOB_END_NOTIFICATION_URL, JobEndServlet.baseUrl + "jobend?jobid=$jobId&status=$jobStatus");
    JobImpl job = (JobImpl) app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    app.getContext().getEventHandler().handle(new JobEvent(app.getJobId(), JobEventType.JOB_AM_REBOOT));
    app.waitForInternalState(job, JobStateInternal.REBOOT);
    // Now shutdown.
    // Unregistration fails: isLastAMRetry is recalculated, this is not
    app.shutDownJob();
    // Not the last AM attempt. So user should that the job is still running.
    app.waitForState(job, JobState.RUNNING);
    Assert.assertFalse(app.isLastAMRetry());
    Assert.assertEquals(0, JobEndServlet.calledTimes);
    Assert.assertNull(JobEndServlet.requestUri);
    Assert.assertNull(JobEndServlet.foundJobState);
    server.stop();
}
Also used : JobImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl) JobEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent) HttpServer2(org.apache.hadoop.http.HttpServer2) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Example 15 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class TestJobEndNotifier method startHttpServer.

private static HttpServer2 startHttpServer() throws Exception {
    new File(System.getProperty("build.webapps", "build/webapps") + "/test").mkdirs();
    HttpServer2 server = new HttpServer2.Builder().setName("test").addEndpoint(URI.create("http://localhost:0")).setFindPort(true).build();
    server.addServlet("jobend", "/jobend", JobEndServlet.class);
    server.start();
    JobEndServlet.calledTimes = 0;
    JobEndServlet.requestUri = null;
    JobEndServlet.baseUrl = "http://localhost:" + server.getConnectorAddress(0).getPort() + "/";
    JobEndServlet.foundJobState = null;
    return server;
}
Also used : HttpServer2(org.apache.hadoop.http.HttpServer2) File(java.io.File)

Aggregations

HttpServer2 (org.apache.hadoop.http.HttpServer2)20 Configuration (org.apache.hadoop.conf.Configuration)7 Test (org.junit.Test)6 IOException (java.io.IOException)4 InetSocketAddress (java.net.InetSocketAddress)4 HttpConfig (org.apache.hadoop.http.HttpConfig)4 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)4 File (java.io.File)3 SocketTimeoutException (java.net.SocketTimeoutException)3 URI (java.net.URI)3 URL (java.net.URL)3 HttpServerFunctionalTest (org.apache.hadoop.http.HttpServerFunctionalTest)3 JobConf (org.apache.hadoop.mapred.JobConf)3 JobImpl (org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl)3 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)3 SocketException (java.net.SocketException)2 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)2 JobEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent)2 AccessControlList (org.apache.hadoop.security.authorize.AccessControlList)2 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)2