use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestKMS method doWebHDFSProxyUserTest.
public void doWebHDFSProxyUserTest(final boolean kerberos) throws Exception {
Configuration conf = new Configuration();
if (kerberos) {
conf.set("hadoop.security.authentication", "kerberos");
}
UserGroupInformation.setConfiguration(conf);
final File testDir = getTestDir();
conf = createBaseKMSConf(testDir, conf);
if (kerberos) {
conf.set("hadoop.kms.authentication.type", "kerberos");
}
conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath());
conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
conf.set("hadoop.security.kms.client.timeout", "300");
conf.set("hadoop.kms.proxyuser.client.users", "foo,bar");
conf.set("hadoop.kms.proxyuser.client.hosts", "*");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kaa.ALL", "foo");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kbb.ALL", "foo1");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kcc.ALL", "bar");
writeConf(testDir, conf);
runServer(null, null, testDir, new KMSCallable<Void>() {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
UserGroupInformation proxyUgi = null;
if (kerberos) {
// proxyuser client using kerberos credentials
proxyUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI("client", keytab.getAbsolutePath());
} else {
proxyUgi = UserGroupInformation.createRemoteUser("client");
}
final UserGroupInformation clientUgi = proxyUgi;
clientUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
// authorized proxyuser
UserGroupInformation fooUgi = UserGroupInformation.createProxyUser("foo", clientUgi);
fooUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
Assert.assertNotNull(kp.createKey("kaa", new KeyProvider.Options(conf)));
return null;
}
});
// unauthorized proxyuser
UserGroupInformation foo1Ugi = UserGroupInformation.createProxyUser("foo1", clientUgi);
foo1Ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
KeyProvider kp = createProvider(uri, conf);
kp.createKey("kbb", new KeyProvider.Options(conf));
Assert.fail();
} catch (Exception ex) {
Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("Forbidden"));
}
return null;
}
});
// authorized proxyuser
UserGroupInformation barUgi = UserGroupInformation.createProxyUser("bar", clientUgi);
barUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
Assert.assertNotNull(kp.createKey("kcc", new KeyProvider.Options(conf)));
return null;
}
});
return null;
}
});
return null;
}
});
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestKMS method doKMSWithZK.
public void doKMSWithZK(boolean zkDTSM, boolean zkSigner) throws Exception {
TestingServer zkServer = null;
try {
zkServer = new TestingServer();
zkServer.start();
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
final File testDir = getTestDir();
conf = createBaseKMSConf(testDir, conf);
conf.set("hadoop.kms.authentication.type", "kerberos");
conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath());
conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
if (zkSigner) {
conf.set("hadoop.kms.authentication.signer.secret.provider", "zookeeper");
conf.set("hadoop.kms.authentication.signer.secret.provider.zookeeper.path", "/testKMSWithZKDTSM");
conf.set("hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string", zkServer.getConnectString());
}
if (zkDTSM) {
conf.set("hadoop.kms.authentication.zk-dt-secret-manager.enable", "true");
}
if (zkDTSM && !zkSigner) {
conf.set("hadoop.kms.authentication.zk-dt-secret-manager.zkConnectionString", zkServer.getConnectString());
conf.set("hadoop.kms.authentication.zk-dt-secret-manager.znodeWorkingPath", "testZKPath");
conf.set("hadoop.kms.authentication.zk-dt-secret-manager.zkAuthType", "none");
}
for (KMSACLs.Type type : KMSACLs.Type.values()) {
conf.set(type.getAclConfigKey(), type.toString());
}
conf.set(KMSACLs.Type.CREATE.getAclConfigKey(), KMSACLs.Type.CREATE.toString() + ",SET_KEY_MATERIAL");
conf.set(KMSACLs.Type.ROLLOVER.getAclConfigKey(), KMSACLs.Type.ROLLOVER.toString() + ",SET_KEY_MATERIAL");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k0.ALL", "*");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k1.ALL", "*");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k2.ALL", "*");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k3.ALL", "*");
writeConf(testDir, conf);
KMSCallable<KeyProvider> c = new KMSCallable<KeyProvider>() {
@Override
public KeyProvider call() throws Exception {
final Configuration conf = new Configuration();
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
final KeyProvider kp = doAs("SET_KEY_MATERIAL", new PrivilegedExceptionAction<KeyProvider>() {
@Override
public KeyProvider run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
kp.createKey("k1", new byte[16], new KeyProvider.Options(conf));
kp.createKey("k2", new byte[16], new KeyProvider.Options(conf));
kp.createKey("k3", new byte[16], new KeyProvider.Options(conf));
return kp;
}
});
return kp;
}
};
runServer(null, null, testDir, c);
} finally {
if (zkServer != null) {
zkServer.stop();
zkServer.close();
}
}
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestKeyAuthorizationKeyProvider method testOpsWhenACLAttributeExists.
@Test
public void testOpsWhenACLAttributeExists() throws Exception {
final Configuration conf = new Configuration();
KeyProvider kp = new UserProvider.Factory().createProvider(new URI("user:///"), conf);
KeyACLs mock = mock(KeyACLs.class);
when(mock.isACLPresent("testKey", KeyOpType.MANAGEMENT)).thenReturn(true);
when(mock.isACLPresent("testKey", KeyOpType.GENERATE_EEK)).thenReturn(true);
when(mock.isACLPresent("testKey", KeyOpType.DECRYPT_EEK)).thenReturn(true);
when(mock.isACLPresent("testKey", KeyOpType.ALL)).thenReturn(true);
UserGroupInformation u1 = UserGroupInformation.createRemoteUser("u1");
UserGroupInformation u2 = UserGroupInformation.createRemoteUser("u2");
UserGroupInformation u3 = UserGroupInformation.createRemoteUser("u3");
UserGroupInformation sudo = UserGroupInformation.createRemoteUser("sudo");
when(mock.hasAccessToKey("testKey", u1, KeyOpType.MANAGEMENT)).thenReturn(true);
when(mock.hasAccessToKey("testKey", u2, KeyOpType.GENERATE_EEK)).thenReturn(true);
when(mock.hasAccessToKey("testKey", u3, KeyOpType.DECRYPT_EEK)).thenReturn(true);
when(mock.hasAccessToKey("testKey", sudo, KeyOpType.ALL)).thenReturn(true);
final KeyProviderCryptoExtension kpExt = new KeyAuthorizationKeyProvider(KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp), mock);
final KeyVersion barKv = u1.doAs(new PrivilegedExceptionAction<KeyVersion>() {
@Override
public KeyVersion run() throws Exception {
Options opt = newOptions(conf);
Map<String, String> m = new HashMap<String, String>();
m.put("key.acl.name", "testKey");
opt.setAttributes(m);
try {
KeyVersion kv = kpExt.createKey("foo", SecureRandom.getSeed(16), opt);
kpExt.rollNewVersion(kv.getName());
kpExt.rollNewVersion(kv.getName(), SecureRandom.getSeed(16));
kpExt.deleteKey(kv.getName());
} catch (IOException ioe) {
Assert.fail("User should be Authorized !!");
}
KeyVersion retkv = null;
try {
retkv = kpExt.createKey("bar", SecureRandom.getSeed(16), opt);
kpExt.generateEncryptedKey(retkv.getName());
Assert.fail("User should NOT be Authorized to generate EEK !!");
} catch (IOException ioe) {
}
Assert.assertNotNull(retkv);
return retkv;
}
});
final EncryptedKeyVersion barEKv = u2.doAs(new PrivilegedExceptionAction<EncryptedKeyVersion>() {
@Override
public EncryptedKeyVersion run() throws Exception {
try {
kpExt.deleteKey(barKv.getName());
Assert.fail("User should NOT be Authorized to " + "perform any other operation !!");
} catch (IOException ioe) {
}
return kpExt.generateEncryptedKey(barKv.getName());
}
});
u3.doAs(new PrivilegedExceptionAction<KeyVersion>() {
@Override
public KeyVersion run() throws Exception {
try {
kpExt.deleteKey(barKv.getName());
Assert.fail("User should NOT be Authorized to " + "perform any other operation !!");
} catch (IOException ioe) {
}
return kpExt.decryptEncryptedKey(barEKv);
}
});
sudo.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Options opt = newOptions(conf);
Map<String, String> m = new HashMap<String, String>();
m.put("key.acl.name", "testKey");
opt.setAttributes(m);
try {
KeyVersion kv = kpExt.createKey("foo", SecureRandom.getSeed(16), opt);
kpExt.rollNewVersion(kv.getName());
kpExt.rollNewVersion(kv.getName(), SecureRandom.getSeed(16));
EncryptedKeyVersion ekv = kpExt.generateEncryptedKey(kv.getName());
kpExt.decryptEncryptedKey(ekv);
kpExt.deleteKey(kv.getName());
} catch (IOException ioe) {
Assert.fail("User should be Allowed to do everything !!");
}
return null;
}
});
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class KMSUtil method createKeyProvider.
/**
* Creates a new KeyProvider from the given Configuration
* and configuration key name.
*
* @param conf Configuration
* @param configKeyName The configuration key name
* @return new KeyProvider, or null if no provider was found.
* @throws IOException if the KeyProvider is improperly specified in
* the Configuration
*/
public static KeyProvider createKeyProvider(final Configuration conf, final String configKeyName) throws IOException {
LOG.debug("Creating key provider with config key {}", configKeyName);
final String providerUriStr = conf.getTrimmed(configKeyName, "");
// No provider set in conf
if (providerUriStr.isEmpty()) {
return null;
}
final URI providerUri;
try {
providerUri = new URI(providerUriStr);
} catch (URISyntaxException e) {
throw new IOException(e);
}
KeyProvider keyProvider = KeyProviderFactory.get(providerUri, conf);
if (keyProvider == null) {
throw new IOException("Could not instantiate KeyProvider from " + configKeyName + " setting of '" + providerUriStr + "'");
}
if (keyProvider.isTransient()) {
throw new IOException("KeyProvider " + keyProvider.toString() + " was found but it is a transient provider.");
}
return keyProvider;
}
use of org.apache.hadoop.crypto.key.KeyProvider in project hadoop by apache.
the class TestEncryptionZones method testVersionAndSuiteNegotiation.
@Test
public void testVersionAndSuiteNegotiation() throws Exception {
final HdfsAdmin dfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
final Path zone = new Path("/zone");
fs.mkdirs(zone);
dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);
// Create a file in an EZ, which should succeed
DFSTestUtil.createFile(fs, new Path(zone, "success1"), 0, (short) 1, 0xFEED);
// Pass no supported versions, fail
DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] {};
try {
DFSTestUtil.createFile(fs, new Path(zone, "fail"), 0, (short) 1, 0xFEED);
fail("Created a file without specifying a crypto protocol version");
} catch (UnknownCryptoProtocolVersionException e) {
assertExceptionContains("No crypto protocol versions", e);
}
// Pass some unknown versions, fail
DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] { CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.UNKNOWN };
try {
DFSTestUtil.createFile(fs, new Path(zone, "fail"), 0, (short) 1, 0xFEED);
fail("Created a file without specifying a known crypto protocol version");
} catch (UnknownCryptoProtocolVersionException e) {
assertExceptionContains("No crypto protocol versions", e);
}
// Pass some unknown and a good cipherSuites, success
DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] { CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.ENCRYPTION_ZONES };
DFSTestUtil.createFile(fs, new Path(zone, "success2"), 0, (short) 1, 0xFEED);
DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] { CryptoProtocolVersion.ENCRYPTION_ZONES, CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.UNKNOWN };
DFSTestUtil.createFile(fs, new Path(zone, "success3"), 4096, (short) 1, 0xFEED);
// Check KeyProvider state
// Flushing the KP on the NN, since it caches, and init a test one
cluster.getNamesystem().getProvider().flush();
KeyProvider provider = KeyProviderFactory.get(new URI(conf.getTrimmed(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH)), conf);
List<String> keys = provider.getKeys();
assertEquals("Expected NN to have created one key per zone", 1, keys.size());
List<KeyProvider.KeyVersion> allVersions = Lists.newArrayList();
for (String key : keys) {
List<KeyProvider.KeyVersion> versions = provider.getKeyVersions(key);
assertEquals("Should only have one key version per key", 1, versions.size());
allVersions.addAll(versions);
}
// Check that the specified CipherSuite was correctly saved on the NN
for (int i = 2; i <= 3; i++) {
FileEncryptionInfo feInfo = getFileEncryptionInfo(new Path(zone.toString() + "/success" + i));
assertEquals(feInfo.getCipherSuite(), CipherSuite.AES_CTR_NOPADDING);
}
DFSClient old = fs.dfs;
try {
testCipherSuiteNegotiation(fs, conf);
} finally {
fs.dfs = old;
}
}
Aggregations