use of org.apache.hadoop.crypto.key.KeyProvider in project hbase by apache.
the class TestSaslFanOutOneBlockAsyncDFSOutput method setUpKeyProvider.
private static void setUpKeyProvider(Configuration conf) throws Exception {
URI keyProviderUri = new URI("jceks://file" + TEST_UTIL.getDataTestDir("test.jks").toUri().toString());
conf.set("dfs.encryption.key.provider.uri", keyProviderUri.toString());
KeyProvider keyProvider = KeyProviderFactory.get(keyProviderUri, conf);
keyProvider.createKey(TEST_KEY_NAME, KeyProvider.options(conf));
keyProvider.flush();
keyProvider.close();
}
use of org.apache.hadoop.crypto.key.KeyProvider in project cdap by caskdata.
the class KMSSecureStore method getSecureData.
/**
* Returns the data stored in the secure store. Makes two calls to the provider, one to get the metadata and another
* to get the data.
* @param namespace The namespace this key belongs to.
* @param name Name of the key.
* @return An object representing the securely stored data associated with the name.
* @throws NamespaceNotFoundException If the specified namespace does not exist.
* @throws IOException If there was a problem getting the key or the metadata from the underlying key provider.
*/
// Unfortunately KeyProvider does not specify the underlying cause except in the message, so we can not throw a
// more specific exception.
@Override
public SecureStoreData getSecureData(String namespace, String name) throws Exception {
checkNamespaceExists(namespace);
String keyName = getKeyName(namespace, name);
KeyProvider.Metadata metadata = provider.getMetadata(keyName);
// Provider returns null if the key is not found.
if (metadata == null) {
throw new NotFoundException(new SecureKeyId(namespace, name));
}
SecureStoreMetadata meta = SecureStoreMetadata.of(name, metadata.getDescription(), metadata.getAttributes());
KeyProvider.KeyVersion keyVersion = provider.getCurrentKey(keyName);
return new SecureStoreData(meta, keyVersion.getMaterial());
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestLoadBalancingKMSClientProvider method testLoadBalancing.
@Test
public void testLoadBalancing() throws Exception {
Configuration conf = new Configuration();
KMSClientProvider p1 = mock(KMSClientProvider.class);
when(p1.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenReturn(new KMSClientProvider.KMSKeyVersion("p1", "v1", new byte[0]));
KMSClientProvider p2 = mock(KMSClientProvider.class);
when(p2.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenReturn(new KMSClientProvider.KMSKeyVersion("p2", "v2", new byte[0]));
KMSClientProvider p3 = mock(KMSClientProvider.class);
when(p3.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenReturn(new KMSClientProvider.KMSKeyVersion("p3", "v3", new byte[0]));
KeyProvider kp = new LoadBalancingKMSClientProvider(new KMSClientProvider[] { p1, p2, p3 }, 0, conf);
assertEquals("p1", kp.createKey("test1", new Options(conf)).getName());
assertEquals("p2", kp.createKey("test2", new Options(conf)).getName());
assertEquals("p3", kp.createKey("test3", new Options(conf)).getName());
assertEquals("p1", kp.createKey("test4", new Options(conf)).getName());
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestLoadBalancingKMSClientProvider method testLoadBalancingWithAllBadNodes.
@Test
public void testLoadBalancingWithAllBadNodes() throws Exception {
Configuration conf = new Configuration();
KMSClientProvider p1 = mock(KMSClientProvider.class);
when(p1.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenThrow(new IOException("p1"));
KMSClientProvider p2 = mock(KMSClientProvider.class);
when(p2.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenThrow(new IOException("p2"));
KMSClientProvider p3 = mock(KMSClientProvider.class);
when(p3.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenThrow(new IOException("p3"));
KMSClientProvider p4 = mock(KMSClientProvider.class);
when(p4.createKey(Mockito.anyString(), Mockito.any(Options.class))).thenThrow(new IOException("p4"));
when(p1.getKMSUrl()).thenReturn("p1");
when(p2.getKMSUrl()).thenReturn("p2");
when(p3.getKMSUrl()).thenReturn("p3");
when(p4.getKMSUrl()).thenReturn("p4");
KeyProvider kp = new LoadBalancingKMSClientProvider(new KMSClientProvider[] { p1, p2, p3, p4 }, 0, conf);
try {
kp.createKey("test3", new Options(conf)).getName();
fail("Should fail since all providers threw an IOException");
} catch (Exception e) {
assertTrue(e instanceof IOException);
}
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestKMS method testTGTRenewal.
@Test
public void testTGTRenewal() throws Exception {
tearDownMiniKdc();
Properties kdcConf = MiniKdc.createConf();
kdcConf.setProperty(MiniKdc.MAX_TICKET_LIFETIME, "3");
kdcConf.setProperty(MiniKdc.MIN_TICKET_LIFETIME, "3");
setUpMiniKdc(kdcConf);
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
final File testDir = getTestDir();
conf = createBaseKMSConf(testDir, conf);
conf.set("hadoop.kms.authentication.type", "kerberos");
conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath());
conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
conf.set("hadoop.kms.proxyuser.client.users", "*");
conf.set("hadoop.kms.proxyuser.client.hosts", "*");
writeConf(testDir, conf);
runServer(null, null, testDir, new KMSCallable<Void>() {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
final URI uri = createKMSUri(getKMSUrl());
UserGroupInformation.setShouldRenewImmediatelyForTests(true);
UserGroupInformation.loginUserFromKeytab("client", keytab.getAbsolutePath());
final UserGroupInformation clientUgi = UserGroupInformation.getCurrentUser();
clientUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
// Verify getKeys can relogin
Thread.sleep(3100);
KeyProvider kp = createProvider(uri, conf);
kp.getKeys();
// Verify addDelegationTokens can relogin
// (different code path inside KMSClientProvider than getKeys)
Thread.sleep(3100);
kp = createProvider(uri, conf);
((KeyProviderDelegationTokenExtension.DelegationTokenExtension) kp).addDelegationTokens("myuser", new Credentials());
// Verify getKeys can relogin with proxy user
UserGroupInformation anotherUgi = UserGroupInformation.createProxyUser("client1", clientUgi);
anotherUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Thread.sleep(3100);
KeyProvider kp = createProvider(uri, conf);
kp.getKeys();
return null;
}
});
return null;
}
});
return null;
}
});
}
Aggregations