Search in sources :

Example 26 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestGetConf method testIncludeInternalNameServices.

@Test
public void testIncludeInternalNameServices() throws Exception {
    final int nsCount = 10;
    final int remoteNsCount = 4;
    HdfsConfiguration conf = new HdfsConfiguration();
    setupNameServices(conf, nsCount);
    setupAddress(conf, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, nsCount, 1000);
    setupAddress(conf, DFS_NAMENODE_RPC_ADDRESS_KEY, nsCount, 1500);
    conf.set(DFS_INTERNAL_NAMESERVICES_KEY, "ns1");
    setupStaticHostResolution(nsCount);
    String[] includedNN = new String[] { "nn1:1001" };
    verifyAddresses(conf, TestType.NAMENODE, false, includedNN);
    verifyAddresses(conf, TestType.NNRPCADDRESSES, true, includedNN);
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Test(org.junit.Test)

Example 27 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class SaslDataTransferTestCase method createSecureConfig.

/**
   * Creates configuration for starting a secure cluster.
   *
   * @param dataTransferProtection supported QOPs
   * @return configuration for starting a secure cluster
   * @throws Exception if there is any failure
   */
protected HdfsConfiguration createSecureConfig(String dataTransferProtection) throws Exception {
    HdfsConfiguration conf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
    conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, hdfsKeytab);
    conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, hdfsKeytab);
    conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection);
    conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    keystoresDir = baseDir.getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass());
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
    conf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
    conf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
    return conf;
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration)

Example 28 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestSaslDataTransfer method TestPeerFromSocketAndKeyReadTimeout.

/**
   * Verifies that peerFromSocketAndKey honors socket read timeouts.
   */
@Test(timeout = 60000)
public void TestPeerFromSocketAndKeyReadTimeout() throws Exception {
    HdfsConfiguration conf = createSecureConfig("authentication,integrity,privacy");
    AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false);
    SaslDataTransferClient saslClient = new SaslDataTransferClient(conf, DataTransferSaslUtil.getSaslPropertiesResolver(conf), TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuth);
    DatanodeID fakeDatanodeId = new DatanodeID("127.0.0.1", "localhost", "beefbeef-beef-beef-beef-beefbeefbeef", 1, 2, 3, 4);
    DataEncryptionKeyFactory dataEncKeyFactory = new DataEncryptionKeyFactory() {

        @Override
        public DataEncryptionKey newDataEncryptionKey() {
            return new DataEncryptionKey(123, "456", new byte[8], new byte[8], 1234567, "fakeAlgorithm");
        }
    };
    ServerSocket serverSocket = null;
    Socket socket = null;
    try {
        serverSocket = new ServerSocket(0, -1);
        socket = new Socket(serverSocket.getInetAddress(), serverSocket.getLocalPort());
        Peer peer = DFSUtilClient.peerFromSocketAndKey(saslClient, socket, dataEncKeyFactory, new Token(), fakeDatanodeId, 1);
        peer.close();
        Assert.fail("Expected DFSClient#peerFromSocketAndKey to time out.");
    } catch (SocketTimeoutException e) {
        GenericTestUtils.assertExceptionContains("Read timed out", e);
    } finally {
        IOUtils.cleanup(null, socket, serverSocket);
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DatanodeID(org.apache.hadoop.hdfs.protocol.DatanodeID) DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) SocketTimeoutException(java.net.SocketTimeoutException) Peer(org.apache.hadoop.hdfs.net.Peer) ServerSocket(java.net.ServerSocket) Token(org.apache.hadoop.security.token.Token) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Socket(java.net.Socket) ServerSocket(java.net.ServerSocket) Test(org.junit.Test)

Example 29 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestSaslDataTransfer method testDataNodeAbortsIfNotHttpsOnly.

@Test
public void testDataNodeAbortsIfNotHttpsOnly() throws Exception {
    HdfsConfiguration clusterConf = createSecureConfig("authentication");
    clusterConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
    exception.expect(RuntimeException.class);
    exception.expectMessage("Cannot start secure DataNode");
    startCluster(clusterConf);
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Test(org.junit.Test)

Example 30 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestSaslDataTransfer method testPrivacy.

@Test
public void testPrivacy() throws Exception {
    HdfsConfiguration clusterConf = createSecureConfig("authentication,integrity,privacy");
    startCluster(clusterConf);
    HdfsConfiguration clientConf = new HdfsConfiguration(clusterConf);
    clientConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "privacy");
    doTest(clientConf);
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Test(org.junit.Test)

Aggregations

HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)454 Configuration (org.apache.hadoop.conf.Configuration)311 Test (org.junit.Test)311 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)267 Path (org.apache.hadoop.fs.Path)152 FileSystem (org.apache.hadoop.fs.FileSystem)94 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)92 File (java.io.File)72 IOException (java.io.IOException)69 Before (org.junit.Before)56 ExtendedBlock (org.apache.hadoop.hdfs.protocol.ExtendedBlock)40 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)35 MetricsRecordBuilder (org.apache.hadoop.metrics2.MetricsRecordBuilder)33 DataNode (org.apache.hadoop.hdfs.server.datanode.DataNode)30 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)27 RandomAccessFile (java.io.RandomAccessFile)22 ArrayList (java.util.ArrayList)20 NameNodeFile (org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile)20 URI (java.net.URI)19 FsPermission (org.apache.hadoop.fs.permission.FsPermission)19