use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestGetConf method testIncludeInternalNameServices.
@Test
public void testIncludeInternalNameServices() throws Exception {
final int nsCount = 10;
final int remoteNsCount = 4;
HdfsConfiguration conf = new HdfsConfiguration();
setupNameServices(conf, nsCount);
setupAddress(conf, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, nsCount, 1000);
setupAddress(conf, DFS_NAMENODE_RPC_ADDRESS_KEY, nsCount, 1500);
conf.set(DFS_INTERNAL_NAMESERVICES_KEY, "ns1");
setupStaticHostResolution(nsCount);
String[] includedNN = new String[] { "nn1:1001" };
verifyAddresses(conf, TestType.NAMENODE, false, includedNN);
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, includedNN);
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class SaslDataTransferTestCase method createSecureConfig.
/**
* Creates configuration for starting a secure cluster.
*
* @param dataTransferProtection supported QOPs
* @return configuration for starting a secure cluster
* @throws Exception if there is any failure
*/
protected HdfsConfiguration createSecureConfig(String dataTransferProtection) throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, hdfsKeytab);
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, hdfsKeytab);
conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
keystoresDir = baseDir.getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
conf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
conf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
return conf;
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestSaslDataTransfer method TestPeerFromSocketAndKeyReadTimeout.
/**
* Verifies that peerFromSocketAndKey honors socket read timeouts.
*/
@Test(timeout = 60000)
public void TestPeerFromSocketAndKeyReadTimeout() throws Exception {
HdfsConfiguration conf = createSecureConfig("authentication,integrity,privacy");
AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false);
SaslDataTransferClient saslClient = new SaslDataTransferClient(conf, DataTransferSaslUtil.getSaslPropertiesResolver(conf), TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuth);
DatanodeID fakeDatanodeId = new DatanodeID("127.0.0.1", "localhost", "beefbeef-beef-beef-beef-beefbeefbeef", 1, 2, 3, 4);
DataEncryptionKeyFactory dataEncKeyFactory = new DataEncryptionKeyFactory() {
@Override
public DataEncryptionKey newDataEncryptionKey() {
return new DataEncryptionKey(123, "456", new byte[8], new byte[8], 1234567, "fakeAlgorithm");
}
};
ServerSocket serverSocket = null;
Socket socket = null;
try {
serverSocket = new ServerSocket(0, -1);
socket = new Socket(serverSocket.getInetAddress(), serverSocket.getLocalPort());
Peer peer = DFSUtilClient.peerFromSocketAndKey(saslClient, socket, dataEncKeyFactory, new Token(), fakeDatanodeId, 1);
peer.close();
Assert.fail("Expected DFSClient#peerFromSocketAndKey to time out.");
} catch (SocketTimeoutException e) {
GenericTestUtils.assertExceptionContains("Read timed out", e);
} finally {
IOUtils.cleanup(null, socket, serverSocket);
}
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestSaslDataTransfer method testDataNodeAbortsIfNotHttpsOnly.
@Test
public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
HdfsConfiguration clusterConf = createSecureConfig("authentication");
clusterConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
exception.expect(RuntimeException.class);
exception.expectMessage("Cannot start secure DataNode");
startCluster(clusterConf);
}
use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.
the class TestSaslDataTransfer method testPrivacy.
@Test
public void testPrivacy() throws Exception {
HdfsConfiguration clusterConf = createSecureConfig("authentication,integrity,privacy");
startCluster(clusterConf);
HdfsConfiguration clientConf = new HdfsConfiguration(clusterConf);
clientConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "privacy");
doTest(clientConf);
}
Aggregations