use of org.apache.hadoop.security.authentication.client.ConnectionConfigurator in project hadoop by apache.
the class TestWebHdfsTokens method testSetTokenServiceAndKind.
@Test
public void testSetTokenServiceAndKind() throws Exception {
MiniDFSCluster cluster = null;
try {
final Configuration clusterConf = new HdfsConfiguration(conf);
SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
clusterConf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
// trick the NN into thinking s[ecurity is enabled w/o it trying
// to login from a keytab
UserGroupInformation.setConfiguration(clusterConf);
cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build();
cluster.waitActive();
SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(clusterConf, "webhdfs");
Whitebox.setInternalState(fs, "canRefreshDelegationToken", true);
URLConnectionFactory factory = new URLConnectionFactory(new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
return conn;
}
}) {
@Override
public URLConnection openConnection(URL url) throws IOException {
return super.openConnection(new URL(url + "&service=foo&kind=bar"));
}
};
Whitebox.setInternalState(fs, "connectionFactory", factory);
Token<?> token1 = fs.getDelegationToken();
Assert.assertEquals(new Text("bar"), token1.getKind());
final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
Token<DelegationTokenIdentifier> token2 = fs.new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(op, null, new RenewerParam(null)) {
@Override
Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json) throws IOException {
return JsonUtilClient.toDelegationToken(json);
}
}.run();
Assert.assertEquals(new Text("bar"), token2.getKind());
Assert.assertEquals(new Text("foo"), token2.getService());
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.security.authentication.client.ConnectionConfigurator in project hadoop by apache.
the class TestURLConnectionFactory method testConnConfiguratior.
@Test
public void testConnConfiguratior() throws IOException {
final URL u = new URL("http://localhost");
final List<HttpURLConnection> conns = Lists.newArrayList();
URLConnectionFactory fc = new URLConnectionFactory(new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
Assert.assertEquals(u, conn.getURL());
conns.add(conn);
return conn;
}
});
fc.openConnection(u);
Assert.assertEquals(1, conns.size());
}
use of org.apache.hadoop.security.authentication.client.ConnectionConfigurator in project hadoop by apache.
the class TimelineConnector method initSslConnConfigurator.
private static ConnectionConfigurator initSslConnConfigurator(final int timeout, SSLFactory sslFactory) throws IOException, GeneralSecurityException {
final SSLSocketFactory sf;
final HostnameVerifier hv;
sf = sslFactory.createSSLSocketFactory();
hv = sslFactory.getHostnameVerifier();
return new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
c.setHostnameVerifier(hv);
}
setTimeouts(conn, timeout);
return conn;
}
};
}
use of org.apache.hadoop.security.authentication.client.ConnectionConfigurator in project hadoop by apache.
the class URLConnectionFactory method newOAuth2URLConnectionFactory.
/**
* Construct a new URLConnectionFactory that supports OAut-based connections.
* It will also try to load the SSL configuration when they are specified.
*/
public static URLConnectionFactory newOAuth2URLConnectionFactory(Configuration conf) throws IOException {
ConnectionConfigurator conn;
try {
ConnectionConfigurator sslConnConfigurator = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
conn = new OAuth2ConnectionConfigurator(conf, sslConnConfigurator);
} catch (Exception e) {
throw new IOException("Unable to load OAuth2 connection factory.", e);
}
return new URLConnectionFactory(conn);
}
use of org.apache.hadoop.security.authentication.client.ConnectionConfigurator in project hadoop by apache.
the class URLConnectionFactory method newSslConnConfigurator.
/**
* Create a new ConnectionConfigurator for SSL connections
*/
private static ConnectionConfigurator newSslConnConfigurator(final int defaultTimeout, Configuration conf) throws IOException, GeneralSecurityException {
final SSLFactory factory;
final SSLSocketFactory sf;
final HostnameVerifier hv;
final int connectTimeout;
final int readTimeout;
factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
factory.init();
sf = factory.createSSLSocketFactory();
hv = factory.getHostnameVerifier();
connectTimeout = (int) conf.getTimeDuration(HdfsClientConfigKeys.DFS_WEBHDFS_SOCKET_CONNECT_TIMEOUT_KEY, defaultTimeout, TimeUnit.MILLISECONDS);
readTimeout = (int) conf.getTimeDuration(HdfsClientConfigKeys.DFS_WEBHDFS_SOCKET_READ_TIMEOUT_KEY, defaultTimeout, TimeUnit.MILLISECONDS);
return new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn) throws IOException {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
c.setHostnameVerifier(hv);
}
URLConnectionFactory.setTimeouts(conn, connectTimeout, readTimeout);
return conn;
}
};
}
Aggregations