Search in sources :

Example 1 with KMSClientProvider

use of org.apache.hadoop.crypto.key.kms.KMSClientProvider in project hadoop by apache.

the class TestEncryptionZonesWithKMS method testCreateEZPopulatesEDEKCache.

@Test(timeout = 120000)
public void testCreateEZPopulatesEDEKCache() throws Exception {
    final Path zonePath = new Path("/TestEncryptionZone");
    fsWrapper.mkdir(zonePath, FsPermission.getDirDefault(), false);
    dfsAdmin.createEncryptionZone(zonePath, TEST_KEY, NO_TRASH);
    @SuppressWarnings("unchecked") KMSClientProvider kcp = (KMSClientProvider) Whitebox.getInternalState(cluster.getNamesystem().getProvider(), "extension");
    assertTrue(kcp.getEncKeyQueueSize(TEST_KEY) > 0);
}
Also used : Path(org.apache.hadoop.fs.Path) KMSClientProvider(org.apache.hadoop.crypto.key.kms.KMSClientProvider) Test(org.junit.Test)

Example 2 with KMSClientProvider

use of org.apache.hadoop.crypto.key.kms.KMSClientProvider in project hadoop by apache.

the class TestEncryptionZonesWithKMS method testWarmupEDEKCacheOnStartup.

@Test(timeout = 120000)
public void testWarmupEDEKCacheOnStartup() throws Exception {
    Path zonePath = new Path("/TestEncryptionZone");
    fsWrapper.mkdir(zonePath, FsPermission.getDirDefault(), false);
    dfsAdmin.createEncryptionZone(zonePath, TEST_KEY, NO_TRASH);
    final String anotherKey = "k2";
    zonePath = new Path("/TestEncryptionZone2");
    DFSTestUtil.createKey(anotherKey, cluster, conf);
    fsWrapper.mkdir(zonePath, FsPermission.getDirDefault(), false);
    dfsAdmin.createEncryptionZone(zonePath, anotherKey, NO_TRASH);
    @SuppressWarnings("unchecked") KMSClientProvider spy = (KMSClientProvider) Whitebox.getInternalState(cluster.getNamesystem().getProvider(), "extension");
    assertTrue("key queue is empty after creating encryption zone", spy.getEncKeyQueueSize(TEST_KEY) > 0);
    conf.setInt(DFSConfigKeys.DFS_NAMENODE_EDEKCACHELOADER_INITIAL_DELAY_MS_KEY, 0);
    cluster.restartNameNode(true);
    GenericTestUtils.waitFor(new Supplier<Boolean>() {

        @Override
        public Boolean get() {
            final KMSClientProvider kspy = (KMSClientProvider) Whitebox.getInternalState(cluster.getNamesystem().getProvider(), "extension");
            return kspy.getEncKeyQueueSize(TEST_KEY) > 0;
        }
    }, 1000, 60000);
}
Also used : Path(org.apache.hadoop.fs.Path) KMSClientProvider(org.apache.hadoop.crypto.key.kms.KMSClientProvider) Test(org.junit.Test)

Example 3 with KMSClientProvider

use of org.apache.hadoop.crypto.key.kms.KMSClientProvider in project hadoop by apache.

the class TestKMS method testKMSTimeout.

/**
   * Test the configurable timeout in the KMSClientProvider.  Open up a
   * socket, but don't accept connections for it.  This leads to a timeout
   * when the KMS client attempts to connect.
   * @throws Exception
   */
@Test
public void testKMSTimeout() throws Exception {
    File confDir = getTestDir();
    Configuration conf = createBaseKMSConf(confDir);
    conf.setInt(KMSClientProvider.TIMEOUT_ATTR, 1);
    writeConf(confDir, conf);
    ServerSocket sock;
    int port;
    try {
        sock = new ServerSocket(0, 50, InetAddress.getByName("localhost"));
        port = sock.getLocalPort();
    } catch (Exception e) {
        /* Problem creating socket?  Just bail. */
        return;
    }
    URL url = new URL("http://localhost:" + port + "/kms");
    URI uri = createKMSUri(url);
    boolean caughtTimeout = false;
    try {
        KeyProvider kp = createProvider(uri, conf);
        kp.getKeys();
    } catch (SocketTimeoutException e) {
        caughtTimeout = true;
    } catch (IOException e) {
        Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
    }
    caughtTimeout = false;
    try {
        KeyProvider kp = createProvider(uri, conf);
        KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp).generateEncryptedKey("a");
    } catch (SocketTimeoutException e) {
        caughtTimeout = true;
    } catch (IOException e) {
        Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
    }
    caughtTimeout = false;
    try {
        KeyProvider kp = createProvider(uri, conf);
        KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp).decryptEncryptedKey(new KMSClientProvider.KMSEncryptedKeyVersion("a", "a", new byte[] { 1, 2 }, "EEK", new byte[] { 1, 2 }));
    } catch (SocketTimeoutException e) {
        caughtTimeout = true;
    } catch (IOException e) {
        Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
    }
    Assert.assertTrue(caughtTimeout);
    sock.close();
}
Also used : KeyProvider(org.apache.hadoop.crypto.key.KeyProvider) Configuration(org.apache.hadoop.conf.Configuration) ServerSocket(java.net.ServerSocket) IOException(java.io.IOException) URI(java.net.URI) KMSClientProvider(org.apache.hadoop.crypto.key.kms.KMSClientProvider) LoadBalancingKMSClientProvider(org.apache.hadoop.crypto.key.kms.LoadBalancingKMSClientProvider) AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) SocketTimeoutException(java.net.SocketTimeoutException) IOException(java.io.IOException) URL(java.net.URL) SocketTimeoutException(java.net.SocketTimeoutException) File(java.io.File) Test(org.junit.Test)

Example 4 with KMSClientProvider

use of org.apache.hadoop.crypto.key.kms.KMSClientProvider in project hadoop by apache.

the class TestKMS method testKMSProviderCaching.

@Test
public void testKMSProviderCaching() throws Exception {
    Configuration conf = new Configuration();
    File confDir = getTestDir();
    conf = createBaseKMSConf(confDir, conf);
    conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k1.ALL", "*");
    writeConf(confDir, conf);
    runServer(null, null, confDir, new KMSCallable<Void>() {

        @Override
        public Void call() throws Exception {
            final String keyName = "k1";
            final String mockVersionName = "mock";
            final Configuration conf = new Configuration();
            final URI uri = createKMSUri(getKMSUrl());
            KMSClientProvider kmscp = createKMSClientProvider(uri, conf);
            // get the reference to the internal cache, to test invalidation.
            ValueQueue vq = (ValueQueue) Whitebox.getInternalState(kmscp, "encKeyVersionQueue");
            LoadingCache<String, LinkedBlockingQueue<EncryptedKeyVersion>> kq = ((LoadingCache<String, LinkedBlockingQueue<EncryptedKeyVersion>>) Whitebox.getInternalState(vq, "keyQueues"));
            EncryptedKeyVersion mockEKV = Mockito.mock(EncryptedKeyVersion.class);
            when(mockEKV.getEncryptionKeyName()).thenReturn(keyName);
            when(mockEKV.getEncryptionKeyVersionName()).thenReturn(mockVersionName);
            // createKey()
            KeyProvider.Options options = new KeyProvider.Options(conf);
            options.setCipher("AES/CTR/NoPadding");
            options.setBitLength(128);
            options.setDescription("l1");
            KeyProvider.KeyVersion kv0 = kmscp.createKey(keyName, options);
            assertNotNull(kv0.getVersionName());
            assertEquals("Default key version name is incorrect.", "k1@0", kmscp.generateEncryptedKey(keyName).getEncryptionKeyVersionName());
            kmscp.invalidateCache(keyName);
            kq.get(keyName).put(mockEKV);
            assertEquals("Key version incorrect after invalidating cache + putting" + " mock key.", mockVersionName, kmscp.generateEncryptedKey(keyName).getEncryptionKeyVersionName());
            // test new version is returned after invalidation.
            for (int i = 0; i < 100; ++i) {
                kq.get(keyName).put(mockEKV);
                kmscp.invalidateCache(keyName);
                assertEquals("Cache invalidation guarantee failed.", "k1@0", kmscp.generateEncryptedKey(keyName).getEncryptionKeyVersionName());
            }
            return null;
        }
    });
}
Also used : KeyProvider(org.apache.hadoop.crypto.key.KeyProvider) Options(org.apache.hadoop.crypto.key.KeyProvider.Options) Configuration(org.apache.hadoop.conf.Configuration) KeyVersion(org.apache.hadoop.crypto.key.KeyProvider.KeyVersion) EncryptedKeyVersion(org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion) URI(java.net.URI) KMSClientProvider(org.apache.hadoop.crypto.key.kms.KMSClientProvider) LoadBalancingKMSClientProvider(org.apache.hadoop.crypto.key.kms.LoadBalancingKMSClientProvider) AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) SocketTimeoutException(java.net.SocketTimeoutException) IOException(java.io.IOException) EncryptedKeyVersion(org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion) ValueQueue(org.apache.hadoop.crypto.key.kms.ValueQueue) Options(org.apache.hadoop.crypto.key.KeyProvider.Options) LoadingCache(com.google.common.cache.LoadingCache) File(java.io.File) Test(org.junit.Test)

Aggregations

KMSClientProvider (org.apache.hadoop.crypto.key.kms.KMSClientProvider)4 Test (org.junit.Test)4 File (java.io.File)2 IOException (java.io.IOException)2 SocketTimeoutException (java.net.SocketTimeoutException)2 URI (java.net.URI)2 Configuration (org.apache.hadoop.conf.Configuration)2 KeyProvider (org.apache.hadoop.crypto.key.KeyProvider)2 LoadBalancingKMSClientProvider (org.apache.hadoop.crypto.key.kms.LoadBalancingKMSClientProvider)2 Path (org.apache.hadoop.fs.Path)2 AuthorizationException (org.apache.hadoop.security.authorize.AuthorizationException)2 LoadingCache (com.google.common.cache.LoadingCache)1 ServerSocket (java.net.ServerSocket)1 URL (java.net.URL)1 KeyVersion (org.apache.hadoop.crypto.key.KeyProvider.KeyVersion)1 Options (org.apache.hadoop.crypto.key.KeyProvider.Options)1 EncryptedKeyVersion (org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion)1 ValueQueue (org.apache.hadoop.crypto.key.kms.ValueQueue)1