Search in sources :

Example 16 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class TestLogLevel method createServer.

/**
   * Creates and starts a Jetty server binding at an ephemeral port to run
   * LogLevel servlet.
   * @param protocol "http" or "https"
   * @param isSpnego true if SPNEGO is enabled
   * @return a created HttpServer2 object
   * @throws Exception if unable to create or start a Jetty server
   */
private HttpServer2 createServer(String protocol, boolean isSpnego) throws Exception {
    HttpServer2.Builder builder = new HttpServer2.Builder().setName("..").addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(conf);
    if (isSpnego) {
        // Set up server Kerberos credentials.
        // Since the server may fall back to simple authentication,
        // use ACL to make sure the connection is Kerberos/SPNEGO authenticated.
        builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB).setACL(new AccessControlList(clientPrincipal));
    }
    // if using HTTPS, configure keystore/truststore properties.
    if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) {
        builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword")).keyStore(sslConf.get("ssl.server.keystore.location"), sslConf.get("ssl.server.keystore.password"), sslConf.get("ssl.server.keystore.type", "jks")).trustStore(sslConf.get("ssl.server.truststore.location"), sslConf.get("ssl.server.truststore.password"), sslConf.get("ssl.server.truststore.type", "jks"));
    }
    HttpServer2 server = builder.build();
    // Enable SPNEGO for LogLevel servlet
    if (isSpnego) {
        server.addInternalServlet("logLevel", "/logLevel", LogLevel.Servlet.class, true);
    }
    server.start();
    return server;
}
Also used : AccessControlList(org.apache.hadoop.security.authorize.AccessControlList) HttpServer2(org.apache.hadoop.http.HttpServer2) URI(java.net.URI)

Example 17 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class TestLogLevel method testDynamicLogLevel.

/**
   * Run both client and server using the given protocol.
   *
   * @param bindProtocol specify either http or https for server
   * @param connectProtocol specify either http or https for client
   * @param isSpnego true if SPNEGO is enabled
   * @throws Exception
   */
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, final boolean isSpnego, final String newLevel) throws Exception {
    if (!LogLevel.isValidProtocol(bindProtocol)) {
        throw new Exception("Invalid server protocol " + bindProtocol);
    }
    if (!LogLevel.isValidProtocol(connectProtocol)) {
        throw new Exception("Invalid client protocol " + connectProtocol);
    }
    Level oldLevel = log.getEffectiveLevel();
    Assert.assertNotEquals("Get default Log Level which shouldn't be ERROR.", Level.ERROR, oldLevel);
    // configs needed for SPNEGO at server side
    if (isSpnego) {
        conf.set(PRINCIPAL, KerberosTestUtils.getServerPrincipal());
        conf.set(KEYTAB, KerberosTestUtils.getKeytabFile());
        conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
        conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
        UserGroupInformation.setConfiguration(conf);
    }
    final HttpServer2 server = createServer(bindProtocol, isSpnego);
    // get server port
    final String authority = NetUtils.getHostPortString(server.getConnectorAddress(0));
    KerberosTestUtils.doAsClient(new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            // client command line
            getLevel(connectProtocol, authority);
            setLevel(connectProtocol, authority, newLevel);
            return null;
        }
    });
    server.stop();
    // restore log level
    GenericTestUtils.setLogLevel(log, oldLevel);
}
Also used : Level(org.apache.log4j.Level) HttpServer2(org.apache.hadoop.http.HttpServer2) SocketException(java.net.SocketException) HadoopIllegalArgumentException(org.apache.hadoop.HadoopIllegalArgumentException) SSLException(javax.net.ssl.SSLException)

Example 18 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class DFSUtil method httpServerTemplateForNNAndJN.

/**
   * Return a HttpServer.Builder that the journalnode / namenode / secondary
   * namenode can use to initialize their HTTP / HTTPS server.
   *
   */
public static HttpServer2.Builder httpServerTemplateForNNAndJN(Configuration conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr, String name, String spnegoUserNameKey, String spnegoKeytabFileKey) throws IOException {
    HttpConfig.Policy policy = getHttpPolicy(conf);
    HttpServer2.Builder builder = new HttpServer2.Builder().setName(name).setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))).setSecurityEnabled(UserGroupInformation.isSecurityEnabled()).setUsernameConfKey(spnegoUserNameKey).setKeytabConfKey(getSpnegoKeytabKey(conf, spnegoKeytabFileKey));
    // initialize the webserver for uploading/downloading files.
    if (UserGroupInformation.isSecurityEnabled()) {
        LOG.info("Starting web server as: " + SecurityUtil.getServerPrincipal(conf.get(spnegoUserNameKey), httpAddr.getHostName()));
    }
    if (policy.isHttpEnabled()) {
        if (httpAddr.getPort() == 0) {
            builder.setFindPort(true);
        }
        URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
        builder.addEndpoint(uri);
        LOG.info("Starting Web-server for " + name + " at: " + uri);
    }
    if (policy.isHttpsEnabled() && httpsAddr != null) {
        Configuration sslConf = loadSslConfiguration(conf);
        loadSslConfToHttpServerBuilder(builder, sslConf);
        if (httpsAddr.getPort() == 0) {
            builder.setFindPort(true);
        }
        URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
        builder.addEndpoint(uri);
        LOG.info("Starting Web-server for " + name + " at: " + uri);
    }
    return builder;
}
Also used : AccessControlList(org.apache.hadoop.security.authorize.AccessControlList) Configuration(org.apache.hadoop.conf.Configuration) HttpConfig(org.apache.hadoop.http.HttpConfig) HttpServer2(org.apache.hadoop.http.HttpServer2) URI(java.net.URI)

Example 19 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class SecondaryNameNode method startInfoServer.

/**
   * Start the web server.
   */
@VisibleForTesting
public void startInfoServer() throws IOException {
    final InetSocketAddress httpAddr = getHttpAddress(conf);
    final String httpsAddrString = conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY, DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_DEFAULT);
    InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
    HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "secondary", DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
    infoServer = builder.build();
    infoServer.setAttribute("secondary.name.node", this);
    infoServer.setAttribute("name.system.image", checkpointImage);
    infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
    infoServer.addInternalServlet("imagetransfer", ImageServlet.PATH_SPEC, ImageServlet.class, true);
    infoServer.start();
    LOG.info("Web server init done");
    HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
    int connIdx = 0;
    if (policy.isHttpEnabled()) {
        InetSocketAddress httpAddress = infoServer.getConnectorAddress(connIdx++);
        conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, NetUtils.getHostPortString(httpAddress));
    }
    if (policy.isHttpsEnabled()) {
        InetSocketAddress httpsAddress = infoServer.getConnectorAddress(connIdx);
        conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY, NetUtils.getHostPortString(httpsAddress));
    }
}
Also used : InetSocketAddress(java.net.InetSocketAddress) HttpConfig(org.apache.hadoop.http.HttpConfig) HttpServer2(org.apache.hadoop.http.HttpServer2) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 20 with HttpServer2

use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.

the class NameNodeHttpServer method start.

/**
   * @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration)
   * for information related to the different configuration options and
   * Http Policy is decided.
   */
void start() throws IOException {
    HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
    final String infoHost = bindAddress.getHostName();
    final InetSocketAddress httpAddr = bindAddress;
    final String httpsAddrString = conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT);
    InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
    if (httpsAddr != null) {
        // If DFS_NAMENODE_HTTPS_BIND_HOST_KEY exists then it overrides the
        // host name portion of DFS_NAMENODE_HTTPS_ADDRESS_KEY.
        final String bindHost = conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY);
        if (bindHost != null && !bindHost.isEmpty()) {
            httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
        }
    }
    HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "hdfs", DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
    final boolean xFrameEnabled = conf.getBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
    final String xFrameOptionValue = conf.getTrimmed(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
    builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
    httpServer = builder.build();
    if (policy.isHttpsEnabled()) {
        // assume same ssl port for all datanodes
        InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.getTrimmed(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
        httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, datanodeSslPort.getPort());
    }
    initWebHdfs(conf);
    httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
    httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
    setupServlets(httpServer, conf);
    httpServer.start();
    int connIdx = 0;
    if (policy.isHttpEnabled()) {
        httpAddress = httpServer.getConnectorAddress(connIdx++);
        conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, NetUtils.getHostPortString(httpAddress));
    }
    if (policy.isHttpsEnabled()) {
        httpsAddress = httpServer.getConnectorAddress(connIdx);
        conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, NetUtils.getHostPortString(httpsAddress));
    }
}
Also used : InetSocketAddress(java.net.InetSocketAddress) HttpConfig(org.apache.hadoop.http.HttpConfig) HttpServer2(org.apache.hadoop.http.HttpServer2)

Aggregations

HttpServer2 (org.apache.hadoop.http.HttpServer2)20 Configuration (org.apache.hadoop.conf.Configuration)7 Test (org.junit.Test)6 IOException (java.io.IOException)4 InetSocketAddress (java.net.InetSocketAddress)4 HttpConfig (org.apache.hadoop.http.HttpConfig)4 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)4 File (java.io.File)3 SocketTimeoutException (java.net.SocketTimeoutException)3 URI (java.net.URI)3 URL (java.net.URL)3 HttpServerFunctionalTest (org.apache.hadoop.http.HttpServerFunctionalTest)3 JobConf (org.apache.hadoop.mapred.JobConf)3 JobImpl (org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl)3 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)3 SocketException (java.net.SocketException)2 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)2 JobEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent)2 AccessControlList (org.apache.hadoop.security.authorize.AccessControlList)2 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)2