Search in sources :

Example 31 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hadoop by apache.

the class TestTrashWithSecureEncryptionZones method init.

@BeforeClass
public static void init() throws Exception {
    baseDir = getTestDir();
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();
    baseConf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, baseConf);
    UserGroupInformation.setConfiguration(baseConf);
    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
    File keytabFile = new File(baseDir, "test.keytab");
    keytab = keytabFile.getAbsolutePath();
    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
    kdc.createPrincipal(keytabFile, HDFS_USER_NAME + "/" + krbInstance, SPNEGO_USER_NAME + "/" + krbInstance, OOZIE_USER_NAME + "/" + krbInstance, OOZIE_PROXIED_USER_NAME + "/" + krbInstance);
    hdfsPrincipal = HDFS_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    spnegoPrincipal = SPNEGO_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    // Set a small (2=4*0.5) KMSClient EDEK cache size to trigger
    // on demand refill upon the 3rd file creation
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_SIZE, "4");
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK, "0.5");
    String keystoresDir = baseDir.getAbsolutePath();
    String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureEncryptionZoneWithKMS.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
    baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
    baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
    File kmsFile = new File(baseDir, "kms-site.xml");
    if (kmsFile.exists()) {
        FileUtil.fullyDelete(kmsFile);
    }
    Configuration kmsConf = new Configuration(true);
    kmsConf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + new Path(baseDir.toString(), "kms.keystore").toUri());
    kmsConf.set("hadoop.kms.authentication.type", "kerberos");
    kmsConf.set("hadoop.kms.authentication.kerberos.keytab", keytab);
    kmsConf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
    kmsConf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
    kmsConf.set("hadoop.kms.acl.GENERATE_EEK", "hdfs");
    Writer writer = new FileWriter(kmsFile);
    kmsConf.writeXml(writer);
    writer.close();
    // Start MiniKMS
    MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
    miniKMS = miniKMSBuilder.setKmsConfDir(baseDir).build();
    miniKMS.start();
    baseConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH, getKeyProviderURI());
    baseConf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
    conf = new HdfsConfiguration(baseConf);
    cluster = new MiniDFSCluster.Builder(conf).build();
    cluster.waitActive();
    fs = cluster.getFileSystem();
    dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);
    // Wait cluster to be active
    cluster.waitActive();
    // Create a test key
    DFSTestUtil.createKey(TEST_KEY, cluster, conf);
    clientConf = new Configuration(conf);
    clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);
    shell = new FsShell(clientConf);
    System.setProperty("user.name", HDFS_USER_NAME);
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) KMSConfiguration(org.apache.hadoop.crypto.key.kms.server.KMSConfiguration) FileWriter(java.io.FileWriter) Properties(java.util.Properties) FsShell(org.apache.hadoop.fs.FsShell) MiniKMS(org.apache.hadoop.crypto.key.kms.server.MiniKMS) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) FileWriter(java.io.FileWriter) Writer(java.io.Writer) BeforeClass(org.junit.BeforeClass)

Example 32 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hadoop by apache.

the class SaslDataTransferTestCase method initKdc.

@BeforeClass
public static void initKdc() throws Exception {
    baseDir = GenericTestUtils.getTestDir(SaslDataTransferTestCase.class.getSimpleName());
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();
    String userName = RandomStringUtils.randomAlphabetic(8);
    File userKeytabFile = new File(baseDir, userName + ".keytab");
    userKeyTab = userKeytabFile.getAbsolutePath();
    kdc.createPrincipal(userKeytabFile, userName + "/localhost");
    userPrincipal = userName + "/localhost@" + kdc.getRealm();
    String superUserName = "hdfs";
    File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
    hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
    kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
    hdfsPrincipal = superUserName + "/localhost@" + kdc.getRealm();
    spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Properties(java.util.Properties) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Example 33 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project bookkeeper by apache.

the class GSSAPIBookKeeperTest method startMiniKdc.

@Before
public void startMiniKdc() throws Exception {
    conf = MiniKdc.createConf();
    kdc = new MiniKdc(conf, kdcDir.getRoot());
    kdc.start();
    ServerConfiguration bookieConf = newServerConfiguration();
    bookieConf.setUseHostNameAsBookieID(true);
    String localhostName = Bookie.getBookieAddress(bookieConf).getHostName();
    String principalServerNoRealm = "bookkeeper/" + localhostName;
    String principalServer = "bookkeeper/" + localhostName + "@" + kdc.getRealm();
    LOG.info("principalServer: " + principalServer);
    String principalClientNoRealm = "bookkeeperclient/" + localhostName;
    String principalClient = principalClientNoRealm + "@" + kdc.getRealm();
    LOG.info("principalClient: " + principalClient);
    File keytabClient = new File(kerberosWorkDir.getRoot(), "bookkeeperclient.keytab");
    kdc.createPrincipal(keytabClient, principalClientNoRealm);
    File keytabServer = new File(kerberosWorkDir.getRoot(), "bookkeeperserver.keytab");
    kdc.createPrincipal(keytabServer, principalServerNoRealm);
    File jaasFile = new File(kerberosWorkDir.getRoot(), "jaas.conf");
    try (FileWriter writer = new FileWriter(jaasFile)) {
        writer.write("\n" + "Bookie {\n" + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n" + "  useKeyTab=true\n" + "  keyTab=\"" + keytabServer.getAbsolutePath() + "\n" + "  storeKey=true\n" + // won't test useTicketCache=true on JUnit tests
        "  useTicketCache=false\n" + "  principal=\"" + principalServer + "\";\n" + "};\n" + "\n" + "\n" + "\n" + "BookKeeper {\n" + "  com.sun.security.auth.module.Krb5LoginModule required debug=true\n" + "  useKeyTab=true\n" + "  keyTab=\"" + keytabClient.getAbsolutePath() + "\n" + "  storeKey=true\n" + "  useTicketCache=false\n" + "  principal=\"" + principalClient + "\";\n" + "};\n");
    }
    File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
    try (FileWriter writer = new FileWriter(krb5file)) {
        writer.write("[libdefaults]\n" + " default_realm = " + kdc.getRealm() + "\n" + "\n" + "\n" + "[realms]\n" + " " + kdc.getRealm() + "  = {\n" + "  kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n" + " }");
    }
    System.setProperty("java.security.auth.login.config", jaasFile.getAbsolutePath());
    System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
    javax.security.auth.login.Configuration.getConfiguration().refresh();
}
Also used : ServerConfiguration(org.apache.bookkeeper.conf.ServerConfiguration) FileWriter(java.io.FileWriter) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) Before(org.junit.Before)

Example 34 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project activemq-artemis by apache.

the class JMSSaslGssapiTest method setUpKerberos.

@Before
public void setUpKerberos() throws Exception {
    kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
    kdc.start();
    // hard coded match, default_keytab_name in minikdc-krb5.conf template
    File userKeyTab = new File("target/test.krb5.keytab");
    kdc.createPrincipal(userKeyTab, "client", "amqp/localhost");
    if (debug) {
        for (java.util.logging.Logger logger : new java.util.logging.Logger[] { java.util.logging.Logger.getLogger("javax.security.sasl"), java.util.logging.Logger.getLogger("org.apache.qpid.proton") }) {
            logger.setLevel(java.util.logging.Level.FINEST);
            logger.addHandler(new java.util.logging.ConsoleHandler());
            for (java.util.logging.Handler handler : logger.getHandlers()) {
                handler.setLevel(java.util.logging.Level.FINEST);
            }
        }
    }
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) Before(org.junit.Before)

Example 35 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project activemq-artemis by apache.

the class CoreClientOverOneWaySSLKerb5Test method setUp.

// Package protected ---------------------------------------------
@Override
@Before
public void setUp() throws Exception {
    super.setUp();
    kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
    kdc.start();
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Before(org.junit.Before)

Aggregations

MiniKdc (org.apache.hadoop.minikdc.MiniKdc)41 File (java.io.File)33 Properties (java.util.Properties)18 BeforeClass (org.junit.BeforeClass)15 Configuration (org.apache.hadoop.conf.Configuration)10 FileWriter (java.io.FileWriter)5 IOException (java.io.IOException)5 Before (org.junit.Before)5 BindException (java.net.BindException)4 Closeable (java.io.Closeable)3 Writer (java.io.Writer)3 HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)3 AuthenticationTokenIdentifier (org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier)3 Text (org.apache.hadoop.io.Text)3 Job (org.apache.hadoop.mapreduce.Job)3 Credentials (org.apache.hadoop.security.Credentials)3 Token (org.apache.hadoop.security.token.Token)3 TokenIdentifier (org.apache.hadoop.security.token.TokenIdentifier)3 Test (org.junit.Test)3 ApplicationProperties (org.apache.atlas.ApplicationProperties)2