use of org.apache.hadoop.minikdc.MiniKdc in project hadoop by apache.
the class TestTrashWithSecureEncryptionZones method init.
@BeforeClass
public static void init() throws Exception {
baseDir = getTestDir();
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
baseConf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, baseConf);
UserGroupInformation.setConfiguration(baseConf);
assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
File keytabFile = new File(baseDir, "test.keytab");
keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
kdc.createPrincipal(keytabFile, HDFS_USER_NAME + "/" + krbInstance, SPNEGO_USER_NAME + "/" + krbInstance, OOZIE_USER_NAME + "/" + krbInstance, OOZIE_PROXIED_USER_NAME + "/" + krbInstance);
hdfsPrincipal = HDFS_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
spnegoPrincipal = SPNEGO_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
// Set a small (2=4*0.5) KMSClient EDEK cache size to trigger
// on demand refill upon the 3rd file creation
baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_SIZE, "4");
baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK, "0.5");
String keystoresDir = baseDir.getAbsolutePath();
String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureEncryptionZoneWithKMS.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
File kmsFile = new File(baseDir, "kms-site.xml");
if (kmsFile.exists()) {
FileUtil.fullyDelete(kmsFile);
}
Configuration kmsConf = new Configuration(true);
kmsConf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + new Path(baseDir.toString(), "kms.keystore").toUri());
kmsConf.set("hadoop.kms.authentication.type", "kerberos");
kmsConf.set("hadoop.kms.authentication.kerberos.keytab", keytab);
kmsConf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
kmsConf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
kmsConf.set("hadoop.kms.acl.GENERATE_EEK", "hdfs");
Writer writer = new FileWriter(kmsFile);
kmsConf.writeXml(writer);
writer.close();
// Start MiniKMS
MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
miniKMS = miniKMSBuilder.setKmsConfDir(baseDir).build();
miniKMS.start();
baseConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH, getKeyProviderURI());
baseConf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
conf = new HdfsConfiguration(baseConf);
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
fs = cluster.getFileSystem();
dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);
// Wait cluster to be active
cluster.waitActive();
// Create a test key
DFSTestUtil.createKey(TEST_KEY, cluster, conf);
clientConf = new Configuration(conf);
clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);
shell = new FsShell(clientConf);
System.setProperty("user.name", HDFS_USER_NAME);
}
use of org.apache.hadoop.minikdc.MiniKdc in project hadoop by apache.
the class SaslDataTransferTestCase method initKdc.
@BeforeClass
public static void initKdc() throws Exception {
baseDir = GenericTestUtils.getTestDir(SaslDataTransferTestCase.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
String userName = RandomStringUtils.randomAlphabetic(8);
File userKeytabFile = new File(baseDir, userName + ".keytab");
userKeyTab = userKeytabFile.getAbsolutePath();
kdc.createPrincipal(userKeytabFile, userName + "/localhost");
userPrincipal = userName + "/localhost@" + kdc.getRealm();
String superUserName = "hdfs";
File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
hdfsPrincipal = superUserName + "/localhost@" + kdc.getRealm();
spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();
}
use of org.apache.hadoop.minikdc.MiniKdc in project bookkeeper by apache.
the class GSSAPIBookKeeperTest method startMiniKdc.
@Before
public void startMiniKdc() throws Exception {
conf = MiniKdc.createConf();
kdc = new MiniKdc(conf, kdcDir.getRoot());
kdc.start();
ServerConfiguration bookieConf = newServerConfiguration();
bookieConf.setUseHostNameAsBookieID(true);
String localhostName = Bookie.getBookieAddress(bookieConf).getHostName();
String principalServerNoRealm = "bookkeeper/" + localhostName;
String principalServer = "bookkeeper/" + localhostName + "@" + kdc.getRealm();
LOG.info("principalServer: " + principalServer);
String principalClientNoRealm = "bookkeeperclient/" + localhostName;
String principalClient = principalClientNoRealm + "@" + kdc.getRealm();
LOG.info("principalClient: " + principalClient);
File keytabClient = new File(kerberosWorkDir.getRoot(), "bookkeeperclient.keytab");
kdc.createPrincipal(keytabClient, principalClientNoRealm);
File keytabServer = new File(kerberosWorkDir.getRoot(), "bookkeeperserver.keytab");
kdc.createPrincipal(keytabServer, principalServerNoRealm);
File jaasFile = new File(kerberosWorkDir.getRoot(), "jaas.conf");
try (FileWriter writer = new FileWriter(jaasFile)) {
writer.write("\n" + "Bookie {\n" + " com.sun.security.auth.module.Krb5LoginModule required debug=true\n" + " useKeyTab=true\n" + " keyTab=\"" + keytabServer.getAbsolutePath() + "\n" + " storeKey=true\n" + // won't test useTicketCache=true on JUnit tests
" useTicketCache=false\n" + " principal=\"" + principalServer + "\";\n" + "};\n" + "\n" + "\n" + "\n" + "BookKeeper {\n" + " com.sun.security.auth.module.Krb5LoginModule required debug=true\n" + " useKeyTab=true\n" + " keyTab=\"" + keytabClient.getAbsolutePath() + "\n" + " storeKey=true\n" + " useTicketCache=false\n" + " principal=\"" + principalClient + "\";\n" + "};\n");
}
File krb5file = new File(kerberosWorkDir.getRoot(), "krb5.conf");
try (FileWriter writer = new FileWriter(krb5file)) {
writer.write("[libdefaults]\n" + " default_realm = " + kdc.getRealm() + "\n" + "\n" + "\n" + "[realms]\n" + " " + kdc.getRealm() + " = {\n" + " kdc = " + kdc.getHost() + ":" + kdc.getPort() + "\n" + " }");
}
System.setProperty("java.security.auth.login.config", jaasFile.getAbsolutePath());
System.setProperty("java.security.krb5.conf", krb5file.getAbsolutePath());
javax.security.auth.login.Configuration.getConfiguration().refresh();
}
use of org.apache.hadoop.minikdc.MiniKdc in project activemq-artemis by apache.
the class JMSSaslGssapiTest method setUpKerberos.
@Before
public void setUpKerberos() throws Exception {
kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
kdc.start();
// hard coded match, default_keytab_name in minikdc-krb5.conf template
File userKeyTab = new File("target/test.krb5.keytab");
kdc.createPrincipal(userKeyTab, "client", "amqp/localhost");
if (debug) {
for (java.util.logging.Logger logger : new java.util.logging.Logger[] { java.util.logging.Logger.getLogger("javax.security.sasl"), java.util.logging.Logger.getLogger("org.apache.qpid.proton") }) {
logger.setLevel(java.util.logging.Level.FINEST);
logger.addHandler(new java.util.logging.ConsoleHandler());
for (java.util.logging.Handler handler : logger.getHandlers()) {
handler.setLevel(java.util.logging.Level.FINEST);
}
}
}
}
use of org.apache.hadoop.minikdc.MiniKdc in project activemq-artemis by apache.
the class CoreClientOverOneWaySSLKerb5Test method setUp.
// Package protected ---------------------------------------------
@Override
@Before
public void setUp() throws Exception {
super.setUp();
kdc = new MiniKdc(MiniKdc.createConf(), temporaryFolder.newFolder("kdc"));
kdc.start();
}
Aggregations