Search in sources :

Example 36 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project knox by apache.

the class SecureClusterTest method initKdc.

private static void initKdc() throws Exception {
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    ((MiniKdc) kdc).start();
    configuration = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
    UserGroupInformation.setConfiguration(configuration);
    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
    userName = UserGroupInformation.createUserForTesting("guest", new String[] { "users" }).getUserName();
    File keytabFile = new File(baseDir, userName + ".keytab");
    String keytab = keytabFile.getAbsolutePath();
    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
    ((MiniKdc) kdc).createPrincipal(keytabFile, userName + "/" + krbInstance, "HTTP/" + krbInstance);
    String hdfsPrincipal = userName + "/" + krbInstance + "@" + ((MiniKdc) kdc).getRealm();
    String spnegoPrincipal = "HTTP/" + krbInstance + "@" + ((MiniKdc) kdc).getRealm();
    configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
    configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
    configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
    configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
    configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
    configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
    configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
    configuration.setBoolean("dfs.permissions", true);
    String keystoresDir = baseDir.getAbsolutePath();
    File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
    File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
    configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, sslClientConfFile.getName());
    configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, sslServerConfFile.getName());
    setupKnox(keytab, hdfsPrincipal);
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Properties(java.util.Properties) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) File(java.io.File)

Example 37 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class TestSaslFanOutOneBlockAsyncDFSOutput method setupMiniKdc.

/**
 * Sets up {@link MiniKdc} for testing security. Uses {@link HBaseKerberosUtils} to set the given
 * keytab file as {@link HBaseKerberosUtils#KRB_KEYTAB_FILE}.
 */
private static MiniKdc setupMiniKdc(File keytabFile) throws Exception {
    Properties conf = MiniKdc.createConf();
    conf.put(MiniKdc.DEBUG, true);
    MiniKdc kdc = null;
    File dir = null;
    // There is time lag between selecting a port and trying to bind with it. It's possible that
    // another service captures the port in between which'll result in BindException.
    boolean bindException;
    int numTries = 0;
    do {
        try {
            bindException = false;
            dir = new File(UTIL.getDataTestDir("kdc").toUri().getPath());
            kdc = new MiniKdc(conf, dir);
            kdc.start();
        } catch (BindException e) {
            // clean directory
            FileUtils.deleteDirectory(dir);
            numTries++;
            if (numTries == 3) {
                LOG.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
                throw e;
            }
            LOG.error("BindException encountered when setting up MiniKdc. Trying again.");
            bindException = true;
        }
    } while (bindException);
    System.setProperty(SecurityConstants.REGIONSERVER_KRB_KEYTAB_FILE, keytabFile.getAbsolutePath());
    return kdc;
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) BindException(java.net.BindException) Properties(java.util.Properties) File(java.io.File)

Example 38 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class TestLogLevel method setupMiniKdc.

/**
 * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc().
 */
private static MiniKdc setupMiniKdc() throws Exception {
    Properties conf = MiniKdc.createConf();
    conf.put(MiniKdc.DEBUG, true);
    MiniKdc kdc = null;
    File dir = null;
    // There is time lag between selecting a port and trying to bind with it. It's possible that
    // another service captures the port in between which'll result in BindException.
    boolean bindException;
    int numTries = 0;
    do {
        try {
            bindException = false;
            dir = new File(HTU.getDataTestDir("kdc").toUri().getPath());
            kdc = new MiniKdc(conf, dir);
            kdc.start();
        } catch (BindException e) {
            // clean directory
            FileUtils.deleteDirectory(dir);
            numTries++;
            if (numTries == 3) {
                log.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
                throw e;
            }
            log.error("BindException encountered when setting up MiniKdc. Trying again.");
            bindException = true;
        }
    } while (bindException);
    return kdc;
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) BindException(java.net.BindException) Properties(java.util.Properties) File(java.io.File)

Example 39 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class TestTableMapReduceUtil method testInitCredentialsForCluster2.

@Test
@SuppressWarnings("unchecked")
public void testInitCredentialsForCluster2() throws Exception {
    HBaseTestingUtil util1 = new HBaseTestingUtil();
    HBaseTestingUtil util2 = new HBaseTestingUtil();
    File keytab = new File(util1.getDataTestDir("keytab").toUri().getPath());
    MiniKdc kdc = util1.setupMiniKdc(keytab);
    try {
        String username = UserGroupInformation.getLoginUser().getShortUserName();
        String userPrincipal = username + "/localhost";
        kdc.createPrincipal(keytab, userPrincipal, HTTP_PRINCIPAL);
        loginUserFromKeytab(userPrincipal + '@' + kdc.getRealm(), keytab.getAbsolutePath());
        try (Closeable util1Closeable = startSecureMiniCluster(util1, kdc, userPrincipal);
            Closeable util2Closeable = startSecureMiniCluster(util2, kdc, userPrincipal)) {
            Configuration conf1 = util1.getConfiguration();
            Job job = Job.getInstance(conf1);
            TableMapReduceUtil.initCredentialsForCluster(job, util2.getConfiguration());
            Credentials credentials = job.getCredentials();
            Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
            assertEquals(1, tokens.size());
            String clusterId = ZKClusterId.readClusterIdZNode(util2.getZooKeeperWatcher());
            Token<AuthenticationTokenIdentifier> tokenForCluster = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId));
            assertEquals(userPrincipal + '@' + kdc.getRealm(), tokenForCluster.decodeIdentifier().getUsername());
        }
    } finally {
        kdc.stop();
    }
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 40 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project flink by apache.

the class SecureTestEnvironment method prepare.

public static void prepare(TemporaryFolder tempFolder, String... additionalPrincipals) {
    checkArgument(additionalPrincipals != null, "Valid principals must be provided");
    try {
        File baseDirForSecureRun = tempFolder.newFolder();
        LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);
        Properties kdcConf = MiniKdc.createConf();
        if (LOG.isDebugEnabled()) {
            kdcConf.setProperty(MiniKdc.DEBUG, "true");
        }
        kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, HOST_NAME);
        kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
        kdc.start();
        LOG.info("Started Mini KDC");
        File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
        testKeytab = keytabFile.getAbsolutePath();
        testZkServerPrincipal = "zookeeper/" + HOST_NAME;
        testZkClientPrincipal = "zk-client/" + HOST_NAME;
        testKafkaServerPrincipal = "kafka/" + HOST_NAME;
        hadoopServicePrincipal = "hadoop/" + HOST_NAME;
        testPrincipal = "client/" + HOST_NAME;
        String[] embeddedPrincipals = { testZkServerPrincipal, testZkClientPrincipal, testKafkaServerPrincipal, hadoopServicePrincipal, testPrincipal };
        String[] principals = ArrayUtils.addAll(embeddedPrincipals, additionalPrincipals);
        kdc.createPrincipal(keytabFile, principals);
        testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
        testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
        testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
        hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();
        testPrincipal = testPrincipal + "@" + kdc.getRealm();
        LOG.info("-------------------------------------------------------------------");
        LOG.info("Test Principal: {}", testPrincipal);
        LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
        LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
        LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
        LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
        LOG.info("Test Keytab: {}", testKeytab);
        LOG.info("-------------------------------------------------------------------");
        // Security Context is established to allow non hadoop applications that requires JAAS
        // based SASL/Kerberos authentication to work. However, for Hadoop specific applications
        // the context can be reinitialized with Hadoop configuration by calling
        // ctx.setHadoopConfiguration() for the UGI implementation to work properly.
        // See Yarn test case module for reference
        Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
        flinkConfig.setBoolean(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
        flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient," + KerberosUtils.getDefaultKerberosInitAppEntryName());
        SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
        TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());
        populateJavaPropertyVariables();
    } catch (Exception e) {
        throw new RuntimeException("Exception occurred while preparing secure environment.", e);
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) SecurityConfiguration(org.apache.flink.runtime.security.SecurityConfiguration) GlobalConfiguration(org.apache.flink.configuration.GlobalConfiguration) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) SecurityConfiguration(org.apache.flink.runtime.security.SecurityConfiguration) Properties(java.util.Properties) File(java.io.File)

Aggregations

MiniKdc (org.apache.hadoop.minikdc.MiniKdc)41 File (java.io.File)33 Properties (java.util.Properties)18 BeforeClass (org.junit.BeforeClass)15 Configuration (org.apache.hadoop.conf.Configuration)10 FileWriter (java.io.FileWriter)5 IOException (java.io.IOException)5 Before (org.junit.Before)5 BindException (java.net.BindException)4 Closeable (java.io.Closeable)3 Writer (java.io.Writer)3 HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)3 AuthenticationTokenIdentifier (org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier)3 Text (org.apache.hadoop.io.Text)3 Job (org.apache.hadoop.mapreduce.Job)3 Credentials (org.apache.hadoop.security.Credentials)3 Token (org.apache.hadoop.security.token.Token)3 TokenIdentifier (org.apache.hadoop.security.token.TokenIdentifier)3 Test (org.junit.Test)3 ApplicationProperties (org.apache.atlas.ApplicationProperties)2