Search in sources :

Example 21 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project atlas by apache.

the class BaseSecurityTest method startKDC.

protected File startKDC() throws Exception {
    File target = Files.createTempDirectory("sectest").toFile();
    File kdcWorkDir = new File(target, "kdc");
    Properties kdcConf = MiniKdc.createConf();
    kdcConf.setProperty(MiniKdc.DEBUG, "true");
    kdc = new MiniKdc(kdcConf, kdcWorkDir);
    kdc.start();
    Assert.assertNotNull(kdc.getRealm());
    return kdcWorkDir;
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Properties(java.util.Properties) ApplicationProperties(org.apache.atlas.ApplicationProperties) File(java.io.File)

Example 22 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class TestShadeSaslAuthenticationProvider method setupCluster.

@BeforeClass
public static void setupCluster() throws Exception {
    KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
    final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE);
    // Adds our test impls instead of creating service loader entries which
    // might inadvertently get them loaded on a real cluster.
    CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, ShadeSaslClientAuthenticationProvider.class.getName());
    CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, ShadeSaslServerAuthenticationProvider.class.getName());
    CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, ShadeProviderSelector.class.getName());
    CLUSTER = createCluster(UTIL, KEYTAB_FILE, kdc, Collections.singletonMap("user1", USER1_PASSWORD));
    CLUSTER.startup();
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Example 23 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class TestTableMapReduceUtil method testInitCredentialsForCluster4.

@Test
@SuppressWarnings("unchecked")
public void testInitCredentialsForCluster4() throws Exception {
    HBaseTestingUtil util1 = new HBaseTestingUtil();
    // Assume util1 is insecure cluster
    // Do not start util1 because cannot boot secured mini cluster and insecure mini cluster at once
    HBaseTestingUtil util2 = new HBaseTestingUtil();
    File keytab = new File(util2.getDataTestDir("keytab").toUri().getPath());
    MiniKdc kdc = util2.setupMiniKdc(keytab);
    try {
        String username = UserGroupInformation.getLoginUser().getShortUserName();
        String userPrincipal = username + "/localhost";
        kdc.createPrincipal(keytab, userPrincipal, HTTP_PRINCIPAL);
        loginUserFromKeytab(userPrincipal + '@' + kdc.getRealm(), keytab.getAbsolutePath());
        try (Closeable util2Closeable = startSecureMiniCluster(util2, kdc, userPrincipal)) {
            Configuration conf1 = util1.getConfiguration();
            Job job = Job.getInstance(conf1);
            TableMapReduceUtil.initCredentialsForCluster(job, util2.getConfiguration());
            Credentials credentials = job.getCredentials();
            Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
            assertEquals(1, tokens.size());
            String clusterId = ZKClusterId.readClusterIdZNode(util2.getZooKeeperWatcher());
            Token<AuthenticationTokenIdentifier> tokenForCluster = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId));
            assertEquals(userPrincipal + '@' + kdc.getRealm(), tokenForCluster.decodeIdentifier().getUsername());
        }
    } finally {
        kdc.stop();
    }
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 24 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class TestTableMapReduceUtil method testInitCredentialsForCluster3.

@Test
public void testInitCredentialsForCluster3() throws Exception {
    HBaseTestingUtil util1 = new HBaseTestingUtil();
    File keytab = new File(util1.getDataTestDir("keytab").toUri().getPath());
    MiniKdc kdc = util1.setupMiniKdc(keytab);
    try {
        String username = UserGroupInformation.getLoginUser().getShortUserName();
        String userPrincipal = username + "/localhost";
        kdc.createPrincipal(keytab, userPrincipal, HTTP_PRINCIPAL);
        loginUserFromKeytab(userPrincipal + '@' + kdc.getRealm(), keytab.getAbsolutePath());
        try (Closeable util1Closeable = startSecureMiniCluster(util1, kdc, userPrincipal)) {
            HBaseTestingUtil util2 = new HBaseTestingUtil();
            // Assume util2 is insecure cluster
            // Do not start util2 because cannot boot secured mini cluster and insecure mini cluster at
            // once
            Configuration conf1 = util1.getConfiguration();
            Job job = Job.getInstance(conf1);
            TableMapReduceUtil.initCredentialsForCluster(job, util2.getConfiguration());
            Credentials credentials = job.getCredentials();
            Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
            assertTrue(tokens.isEmpty());
        }
    } finally {
        kdc.stop();
    }
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Token(org.apache.hadoop.security.token.Token) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 25 with MiniKdc

use of org.apache.hadoop.minikdc.MiniKdc in project hbase by apache.

the class HBaseTestingUtil method setupMiniKdc.

/**
 * Sets up {@link MiniKdc} for testing security. Uses {@link HBaseKerberosUtils} to set the given
 * keytab file as {@link HBaseKerberosUtils#KRB_KEYTAB_FILE}. FYI, there is also the easier-to-use
 * kerby KDC server and utility for using it,
 * {@link org.apache.hadoop.hbase.util.SimpleKdcServerUtil}. The kerby KDC server is preferred;
 * less baggage. It came in in HBASE-5291.
 */
public MiniKdc setupMiniKdc(File keytabFile) throws Exception {
    Properties conf = MiniKdc.createConf();
    conf.put(MiniKdc.DEBUG, true);
    MiniKdc kdc = null;
    File dir = null;
    // There is time lag between selecting a port and trying to bind with it. It's possible that
    // another service captures the port in between which'll result in BindException.
    boolean bindException;
    int numTries = 0;
    do {
        try {
            bindException = false;
            dir = new File(getDataTestDir("kdc").toUri().getPath());
            kdc = new MiniKdc(conf, dir);
            kdc.start();
        } catch (BindException e) {
            // clean directory
            FileUtils.deleteDirectory(dir);
            numTries++;
            if (numTries == 3) {
                LOG.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
                throw e;
            }
            LOG.error("BindException encountered when setting up MiniKdc. Trying again.");
            bindException = true;
        }
    } while (bindException);
    HBaseKerberosUtils.setKeytabFileForTesting(keytabFile.getAbsolutePath());
    return kdc;
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) BindException(java.net.BindException) Properties(java.util.Properties) File(java.io.File) HFile(org.apache.hadoop.hbase.io.hfile.HFile)

Aggregations

MiniKdc (org.apache.hadoop.minikdc.MiniKdc)41 File (java.io.File)33 Properties (java.util.Properties)18 BeforeClass (org.junit.BeforeClass)15 Configuration (org.apache.hadoop.conf.Configuration)10 FileWriter (java.io.FileWriter)5 IOException (java.io.IOException)5 Before (org.junit.Before)5 BindException (java.net.BindException)4 Closeable (java.io.Closeable)3 Writer (java.io.Writer)3 HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)3 AuthenticationTokenIdentifier (org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier)3 Text (org.apache.hadoop.io.Text)3 Job (org.apache.hadoop.mapreduce.Job)3 Credentials (org.apache.hadoop.security.Credentials)3 Token (org.apache.hadoop.security.token.Token)3 TokenIdentifier (org.apache.hadoop.security.token.TokenIdentifier)3 Test (org.junit.Test)3 ApplicationProperties (org.apache.atlas.ApplicationProperties)2