Search in sources :

Example 76 with URI

use of java.net.URI in project hadoop by apache.

the class TestKeyProviderFactory method testJksProvider.

@Test
public void testJksProvider() throws Exception {
    Configuration conf = new Configuration();
    final Path jksPath = new Path(testRootDir.toString(), "test.jks");
    final String ourUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
    File file = new File(testRootDir, "test.jks");
    file.delete();
    conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl);
    checkSpecificProvider(conf, ourUrl);
    // START : Test flush error by failure injection
    conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl.replace(JavaKeyStoreProvider.SCHEME_NAME, FailureInjectingJavaKeyStoreProvider.SCHEME_NAME));
    // get a new instance of the provider to ensure it was saved correctly
    KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
    // inject failure during keystore write
    FailureInjectingJavaKeyStoreProvider fProvider = (FailureInjectingJavaKeyStoreProvider) provider;
    fProvider.setWriteFail(true);
    provider.createKey("key5", new byte[] { 1 }, KeyProvider.options(conf).setBitLength(8));
    assertNotNull(provider.getCurrentKey("key5"));
    try {
        provider.flush();
        Assert.fail("Should not succeed");
    } catch (Exception e) {
    // Ignore
    }
    // SHould be reset to pre-flush state
    Assert.assertNull(provider.getCurrentKey("key5"));
    // Un-inject last failure and
    // inject failure during keystore backup
    fProvider.setWriteFail(false);
    fProvider.setBackupFail(true);
    provider.createKey("key6", new byte[] { 1 }, KeyProvider.options(conf).setBitLength(8));
    assertNotNull(provider.getCurrentKey("key6"));
    try {
        provider.flush();
        Assert.fail("Should not succeed");
    } catch (Exception e) {
    // Ignore
    }
    // SHould be reset to pre-flush state
    Assert.assertNull(provider.getCurrentKey("key6"));
    // END : Test flush error by failure injection
    conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl.replace(FailureInjectingJavaKeyStoreProvider.SCHEME_NAME, JavaKeyStoreProvider.SCHEME_NAME));
    Path path = ProviderUtils.unnestUri(new URI(ourUrl));
    FileSystem fs = path.getFileSystem(conf);
    FileStatus s = fs.getFileStatus(path);
    assertTrue(s.getPermission().toString().equals("rw-------"));
    assertTrue(file + " should exist", file.isFile());
    // Corrupt file and Check if JKS can reload from _OLD file
    File oldFile = new File(file.getPath() + "_OLD");
    file.renameTo(oldFile);
    file.delete();
    file.createNewFile();
    assertTrue(oldFile.exists());
    provider = KeyProviderFactory.getProviders(conf).get(0);
    assertTrue(file.exists());
    assertTrue(oldFile + "should be deleted", !oldFile.exists());
    verifyAfterReload(file, provider);
    assertTrue(!oldFile.exists());
    // _NEW and current file should not exist together
    File newFile = new File(file.getPath() + "_NEW");
    newFile.createNewFile();
    try {
        provider = KeyProviderFactory.getProviders(conf).get(0);
        Assert.fail("_NEW and current file should not exist together !!");
    } catch (Exception e) {
    // Ignore
    } finally {
        if (newFile.exists()) {
            newFile.delete();
        }
    }
    // Load from _NEW file
    file.renameTo(newFile);
    file.delete();
    try {
        provider = KeyProviderFactory.getProviders(conf).get(0);
        Assert.assertFalse(newFile.exists());
        Assert.assertFalse(oldFile.exists());
    } catch (Exception e) {
        Assert.fail("JKS should load from _NEW file !!");
    // Ignore
    }
    verifyAfterReload(file, provider);
    // _NEW exists but corrupt.. must load from _OLD
    newFile.createNewFile();
    file.renameTo(oldFile);
    file.delete();
    try {
        provider = KeyProviderFactory.getProviders(conf).get(0);
        Assert.assertFalse(newFile.exists());
        Assert.assertFalse(oldFile.exists());
    } catch (Exception e) {
        Assert.fail("JKS should load from _OLD file !!");
    // Ignore
    } finally {
        if (newFile.exists()) {
            newFile.delete();
        }
    }
    verifyAfterReload(file, provider);
    // check permission retention after explicit change
    fs.setPermission(path, new FsPermission("777"));
    checkPermissionRetention(conf, ourUrl, path);
    // Check that an uppercase keyname results in an error
    provider = KeyProviderFactory.getProviders(conf).get(0);
    try {
        provider.createKey("UPPERCASE", KeyProvider.options(conf));
        Assert.fail("Expected failure on creating key name with uppercase " + "characters");
    } catch (IllegalArgumentException e) {
        GenericTestUtils.assertExceptionContains("Uppercase key names", e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) File(java.io.File) URI(java.net.URI) IOException(java.io.IOException) Test(org.junit.Test)

Example 77 with URI

use of java.net.URI in project hadoop by apache.

the class TestLoadBalancingKMSClientProvider method testCreation.

@Test
public void testCreation() throws Exception {
    Configuration conf = new Configuration();
    KeyProvider kp = new KMSClientProvider.Factory().createProvider(new URI("kms://http@host1/kms/foo"), conf);
    assertTrue(kp instanceof KMSClientProvider);
    assertEquals("http://host1/kms/foo/v1/", ((KMSClientProvider) kp).getKMSUrl());
    kp = new KMSClientProvider.Factory().createProvider(new URI("kms://http@host1;host2;host3/kms/foo"), conf);
    assertTrue(kp instanceof LoadBalancingKMSClientProvider);
    KMSClientProvider[] providers = ((LoadBalancingKMSClientProvider) kp).getProviders();
    assertEquals(3, providers.length);
    assertEquals(Sets.newHashSet("http://host1/kms/foo/v1/", "http://host2/kms/foo/v1/", "http://host3/kms/foo/v1/"), Sets.newHashSet(providers[0].getKMSUrl(), providers[1].getKMSUrl(), providers[2].getKMSUrl()));
    kp = new KMSClientProvider.Factory().createProvider(new URI("kms://http@host1;host2;host3:9600/kms/foo"), conf);
    assertTrue(kp instanceof LoadBalancingKMSClientProvider);
    providers = ((LoadBalancingKMSClientProvider) kp).getProviders();
    assertEquals(3, providers.length);
    assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/", "http://host2:9600/kms/foo/v1/", "http://host3:9600/kms/foo/v1/"), Sets.newHashSet(providers[0].getKMSUrl(), providers[1].getKMSUrl(), providers[2].getKMSUrl()));
}
Also used : KeyProvider(org.apache.hadoop.crypto.key.KeyProvider) Configuration(org.apache.hadoop.conf.Configuration) URI(java.net.URI) Test(org.junit.Test)

Example 78 with URI

use of java.net.URI in project hadoop by apache.

the class FCStatisticsBaseTest method getSchemeAuthorityUri.

protected URI getSchemeAuthorityUri() {
    URI uri = getFsUri();
    String SchemeAuthString = uri.getScheme() + "://";
    if (uri.getAuthority() == null) {
        SchemeAuthString += "/";
    } else {
        SchemeAuthString += uri.getAuthority();
    }
    return URI.create(SchemeAuthString);
}
Also used : URI(java.net.URI)

Example 79 with URI

use of java.net.URI in project hadoop by apache.

the class TestKeyProviderCryptoExtension method testDefaultCryptoExtensionSelectionWithCachingKeyProvider.

@Test
public void testDefaultCryptoExtensionSelectionWithCachingKeyProvider() throws Exception {
    Configuration config = new Configuration();
    KeyProvider localKp = new UserProvider.Factory().createProvider(new URI("user:///"), config);
    localKp = new CachingKeyProvider(localKp, 30000, 30000);
    EncryptedKeyVersion localEkv = getEncryptedKeyVersion(config, localKp);
    Assert.assertEquals(ENCRYPTION_KEY_NAME + "@0", localEkv.getEncryptionKeyVersionName());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) EncryptedKeyVersion(org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion) URI(java.net.URI) Test(org.junit.Test)

Example 80 with URI

use of java.net.URI in project hadoop by apache.

the class TestAfsCheckPath method testCheckPathWithTheSameNonDefaultPort.

@Test
public void testCheckPathWithTheSameNonDefaultPort() throws URISyntaxException {
    URI uri = new URI("dummy://dummy-host:" + OTHER_PORT);
    AbstractFileSystem afs = new DummyFileSystem(uri);
    afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT));
}
Also used : URI(java.net.URI) Test(org.junit.Test)

Aggregations

URI (java.net.URI)5680 Test (org.junit.Test)1852 URISyntaxException (java.net.URISyntaxException)1016 IOException (java.io.IOException)749 File (java.io.File)531 HashMap (java.util.HashMap)458 ArrayList (java.util.ArrayList)452 Test (org.testng.annotations.Test)394 Configuration (org.apache.hadoop.conf.Configuration)321 Path (org.apache.hadoop.fs.Path)267 URL (java.net.URL)266 Map (java.util.Map)262 Response (javax.ws.rs.core.Response)218 List (java.util.List)184 InputStream (java.io.InputStream)154 HashSet (java.util.HashSet)136 FileSystem (org.apache.hadoop.fs.FileSystem)135 RequestContext (com.linkedin.r2.message.RequestContext)129 RestRequestBuilder (com.linkedin.r2.message.rest.RestRequestBuilder)128 RestRequest (com.linkedin.r2.message.rest.RestRequest)112