use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.
the class TestLogLevel method createServer.
/**
* Creates and starts a Jetty server binding at an ephemeral port to run
* LogLevel servlet.
* @param protocol "http" or "https"
* @param isSpnego true if SPNEGO is enabled
* @return a created HttpServer2 object
* @throws Exception if unable to create or start a Jetty server
*/
private HttpServer2 createServer(String protocol, boolean isSpnego) throws Exception {
HttpServer2.Builder builder = new HttpServer2.Builder().setName("..").addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(conf);
if (isSpnego) {
// Set up server Kerberos credentials.
// Since the server may fall back to simple authentication,
// use ACL to make sure the connection is Kerberos/SPNEGO authenticated.
builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB).setACL(new AccessControlList(clientPrincipal));
}
// if using HTTPS, configure keystore/truststore properties.
if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) {
builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword")).keyStore(sslConf.get("ssl.server.keystore.location"), sslConf.get("ssl.server.keystore.password"), sslConf.get("ssl.server.keystore.type", "jks")).trustStore(sslConf.get("ssl.server.truststore.location"), sslConf.get("ssl.server.truststore.password"), sslConf.get("ssl.server.truststore.type", "jks"));
}
HttpServer2 server = builder.build();
// Enable SPNEGO for LogLevel servlet
if (isSpnego) {
server.addInternalServlet("logLevel", "/logLevel", LogLevel.Servlet.class, true);
}
server.start();
return server;
}
use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.
the class TestLogLevel method testDynamicLogLevel.
/**
* Run both client and server using the given protocol.
*
* @param bindProtocol specify either http or https for server
* @param connectProtocol specify either http or https for client
* @param isSpnego true if SPNEGO is enabled
* @throws Exception
*/
private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, final boolean isSpnego, final String newLevel) throws Exception {
if (!LogLevel.isValidProtocol(bindProtocol)) {
throw new Exception("Invalid server protocol " + bindProtocol);
}
if (!LogLevel.isValidProtocol(connectProtocol)) {
throw new Exception("Invalid client protocol " + connectProtocol);
}
Level oldLevel = log.getEffectiveLevel();
Assert.assertNotEquals("Get default Log Level which shouldn't be ERROR.", Level.ERROR, oldLevel);
// configs needed for SPNEGO at server side
if (isSpnego) {
conf.set(PRINCIPAL, KerberosTestUtils.getServerPrincipal());
conf.set(KEYTAB, KerberosTestUtils.getKeytabFile());
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
UserGroupInformation.setConfiguration(conf);
}
final HttpServer2 server = createServer(bindProtocol, isSpnego);
// get server port
final String authority = NetUtils.getHostPortString(server.getConnectorAddress(0));
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
// client command line
getLevel(connectProtocol, authority);
setLevel(connectProtocol, authority, newLevel);
return null;
}
});
server.stop();
// restore log level
GenericTestUtils.setLogLevel(log, oldLevel);
}
use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.
the class DFSUtil method httpServerTemplateForNNAndJN.
/**
* Return a HttpServer.Builder that the journalnode / namenode / secondary
* namenode can use to initialize their HTTP / HTTPS server.
*
*/
public static HttpServer2.Builder httpServerTemplateForNNAndJN(Configuration conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr, String name, String spnegoUserNameKey, String spnegoKeytabFileKey) throws IOException {
HttpConfig.Policy policy = getHttpPolicy(conf);
HttpServer2.Builder builder = new HttpServer2.Builder().setName(name).setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))).setSecurityEnabled(UserGroupInformation.isSecurityEnabled()).setUsernameConfKey(spnegoUserNameKey).setKeytabConfKey(getSpnegoKeytabKey(conf, spnegoKeytabFileKey));
// initialize the webserver for uploading/downloading files.
if (UserGroupInformation.isSecurityEnabled()) {
LOG.info("Starting web server as: " + SecurityUtil.getServerPrincipal(conf.get(spnegoUserNameKey), httpAddr.getHostName()));
}
if (policy.isHttpEnabled()) {
if (httpAddr.getPort() == 0) {
builder.setFindPort(true);
}
URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
builder.addEndpoint(uri);
LOG.info("Starting Web-server for " + name + " at: " + uri);
}
if (policy.isHttpsEnabled() && httpsAddr != null) {
Configuration sslConf = loadSslConfiguration(conf);
loadSslConfToHttpServerBuilder(builder, sslConf);
if (httpsAddr.getPort() == 0) {
builder.setFindPort(true);
}
URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
builder.addEndpoint(uri);
LOG.info("Starting Web-server for " + name + " at: " + uri);
}
return builder;
}
use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.
the class SecondaryNameNode method startInfoServer.
/**
* Start the web server.
*/
@VisibleForTesting
public void startInfoServer() throws IOException {
final InetSocketAddress httpAddr = getHttpAddress(conf);
final String httpsAddrString = conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY, DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "secondary", DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
infoServer = builder.build();
infoServer.setAttribute("secondary.name.node", this);
infoServer.setAttribute("name.system.image", checkpointImage);
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
infoServer.addInternalServlet("imagetransfer", ImageServlet.PATH_SPEC, ImageServlet.class, true);
infoServer.start();
LOG.info("Web server init done");
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
int connIdx = 0;
if (policy.isHttpEnabled()) {
InetSocketAddress httpAddress = infoServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
InetSocketAddress httpsAddress = infoServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY, NetUtils.getHostPortString(httpsAddress));
}
}
use of org.apache.hadoop.http.HttpServer2 in project hadoop by apache.
the class NameNodeHttpServer method start.
/**
* @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration)
* for information related to the different configuration options and
* Http Policy is decided.
*/
void start() throws IOException {
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
final String infoHost = bindAddress.getHostName();
final InetSocketAddress httpAddr = bindAddress;
final String httpsAddrString = conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT);
InetSocketAddress httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
if (httpsAddr != null) {
// If DFS_NAMENODE_HTTPS_BIND_HOST_KEY exists then it overrides the
// host name portion of DFS_NAMENODE_HTTPS_ADDRESS_KEY.
final String bindHost = conf.getTrimmed(DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY);
if (bindHost != null && !bindHost.isEmpty()) {
httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
}
}
HttpServer2.Builder builder = DFSUtil.httpServerTemplateForNNAndJN(conf, httpAddr, httpsAddr, "hdfs", DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
final boolean xFrameEnabled = conf.getBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
final String xFrameOptionValue = conf.getTrimmed(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
httpServer = builder.build();
if (policy.isHttpsEnabled()) {
// assume same ssl port for all datanodes
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.getTrimmed(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, datanodeSslPort.getPort());
}
initWebHdfs(conf);
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
setupServlets(httpServer, conf);
httpServer.start();
int connIdx = 0;
if (policy.isHttpEnabled()) {
httpAddress = httpServer.getConnectorAddress(connIdx++);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, NetUtils.getHostPortString(httpAddress));
}
if (policy.isHttpsEnabled()) {
httpsAddress = httpServer.getConnectorAddress(connIdx);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, NetUtils.getHostPortString(httpsAddress));
}
}
Aggregations