Search in sources :

Example 26 with Location

use of org.apache.twill.filesystem.Location in project cdap by caskdata.

the class DefaultAuthorizationEnforcerTest method setupClass.

@BeforeClass
public static void setupClass() throws IOException {
    Manifest manifest = new Manifest();
    manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, InMemoryAuthorizer.class.getName());
    Location externalAuthJar = AppJarHelper.createDeploymentJar(locationFactory, InMemoryAuthorizer.class, manifest);
    CCONF.set(Constants.Security.Authorization.EXTENSION_JAR_PATH, externalAuthJar.toString());
}
Also used : Manifest(java.util.jar.Manifest) Location(org.apache.twill.filesystem.Location) BeforeClass(org.junit.BeforeClass)

Example 27 with Location

use of org.apache.twill.filesystem.Location in project cdap by caskdata.

the class SparkCredentialsUpdaterTest method testCleanup.

@Test
public void testCleanup() throws IOException, InterruptedException {
    Location credentialsDir = Locations.toLocation(TEMPORARY_FOLDER.newFolder());
    // Create a updater that don't do any auto-update within the test time
    SparkCredentialsUpdater updater = new SparkCredentialsUpdater(createCredentialsSupplier(), credentialsDir, "credentials", TimeUnit.DAYS.toMillis(1), TimeUnit.SECONDS.toMillis(3), 3) {

        @Override
        long getNextUpdateDelay(Credentials credentials) throws IOException {
            return TimeUnit.DAYS.toMillis(1);
        }
    };
    updater.startAndWait();
    try {
        // Expect this loop to finish in 3 seconds because we don't want sleep for too long for testing cleanup
        for (int i = 1; i <= 5; i++) {
            Assert.assertEquals(i, credentialsDir.list().size());
            updater.run();
        }
        // Sleep for 3 seconds, which is the cleanup expire time
        TimeUnit.SECONDS.sleep(3);
        // Run the updater again, it should only have three files (2 older than expire time, 1 new)
        updater.run();
        Assert.assertEquals(3, credentialsDir.list().size());
    } finally {
        updater.stopAndWait();
    }
}
Also used : Credentials(org.apache.hadoop.security.Credentials) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Example 28 with Location

use of org.apache.twill.filesystem.Location in project cdap by caskdata.

the class SparkCredentialsUpdaterTest method testUpdater.

@Test
public void testUpdater() throws Exception {
    Location credentialsDir = Locations.toLocation(TEMPORARY_FOLDER.newFolder());
    // Create a updater that don't do any auto-update within the test time and don't cleanup
    SparkCredentialsUpdater updater = new SparkCredentialsUpdater(createCredentialsSupplier(), credentialsDir, "credentials", TimeUnit.DAYS.toMillis(1), TimeUnit.DAYS.toMillis(1), Integer.MAX_VALUE) {

        @Override
        long getNextUpdateDelay(Credentials credentials) throws IOException {
            return TimeUnit.DAYS.toMillis(1);
        }
    };
    // Before the updater starts, the directory is empty
    Assert.assertTrue(credentialsDir.list().isEmpty());
    UserGroupInformation.getCurrentUser().addToken(new Token<>(Bytes.toBytes("id"), Bytes.toBytes("pass"), new Text("kind"), new Text("service")));
    updater.startAndWait();
    try {
        List<Location> expectedFiles = new ArrayList<>();
        expectedFiles.add(credentialsDir.append("credentials-1"));
        for (int i = 1; i <= 10; i++) {
            Assert.assertEquals(expectedFiles, listAndSort(credentialsDir));
            // Read the credentials from the last file
            Credentials newCredentials = new Credentials();
            try (DataInputStream is = new DataInputStream(expectedFiles.get(expectedFiles.size() - 1).getInputStream())) {
                newCredentials.readTokenStorageStream(is);
            }
            // Should contains all tokens of the current user
            Credentials userCredentials = UserGroupInformation.getCurrentUser().getCredentials();
            for (Token<? extends TokenIdentifier> token : userCredentials.getAllTokens()) {
                Assert.assertEquals(token, newCredentials.getToken(token.getService()));
            }
            UserGroupInformation.getCurrentUser().addToken(new Token<>(Bytes.toBytes("id" + i), Bytes.toBytes("pass" + i), new Text("kind" + i), new Text("service" + i)));
            updater.run();
            expectedFiles.add(credentialsDir.append("credentials-" + (i + 1)));
        }
    } finally {
        updater.stopAndWait();
    }
}
Also used : ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DataInputStream(java.io.DataInputStream) Credentials(org.apache.hadoop.security.Credentials) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Example 29 with Location

use of org.apache.twill.filesystem.Location in project cdap by caskdata.

the class SparkCredentialsUpdater method run.

@Override
public void run() {
    long nextUpdateTime = updateIntervalMs;
    try {
        if (generation == 0) {
            generation = findLatestGeneration();
        }
        // Write to the next generation file. It's ok to skip some generation if the write failed.
        generation++;
        Location credentialsFile = credentialsDir.append(fileNamePrefix + SPARK_YARN_CREDS_COUNTER_DELIM + generation);
        Location tempFile = credentialsDir.append(credentialsFile.getName() + SPARK_YARN_CREDS_TEMP_EXTENSION);
        // Writes the credentials to temp location, then rename to the final one
        Credentials credentials = credentialsSupplier.get();
        try (DataOutputStream os = new DataOutputStream(tempFile.getOutputStream("600"))) {
            credentials.writeTokenStorageToStream(os);
        }
        if (!credentialsFile.equals(tempFile.renameTo(credentialsFile))) {
            throw new IOException("Failed to rename from " + tempFile + " to " + credentialsFile);
        }
        LOG.debug("Credentials written to {}", credentialsFile);
        // Schedule the next update.
        // Use the same logic as the Spark executor to calculate the update time.
        nextUpdateTime = getNextUpdateDelay(credentials);
        LOG.debug("Next credentials refresh at {}ms later", nextUpdateTime);
        scheduler.schedule(this, nextUpdateTime, TimeUnit.MILLISECONDS);
        cleanup();
    } catch (Exception e) {
        // Retry time is the min(1 minute, update interval)
        long retryDelay = Math.min(60000, nextUpdateTime);
        LOG.warn("Exception raised when saving credentials. Retry in {}ms", retryDelay, e);
        scheduler.schedule(this, retryDelay, TimeUnit.MILLISECONDS);
    }
}
Also used : DataOutputStream(java.io.DataOutputStream) IOException(java.io.IOException) Credentials(org.apache.hadoop.security.Credentials) IOException(java.io.IOException) Location(org.apache.twill.filesystem.Location)

Example 30 with Location

use of org.apache.twill.filesystem.Location in project cdap by caskdata.

the class LocationsTest method absolutePathTests.

@Test
public void absolutePathTests() throws IOException {
    // Test HDFS:
    Configuration conf = new Configuration();
    conf.set("fs.defaultFS", "hdfs://1.2.3.4:8020/");
    LocationFactory locationFactory = new FileContextLocationFactory(conf, TEST_BASE_PATH);
    Location location1 = locationFactory.create(TEST_PATH);
    Location location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
    Assert.assertEquals(location1.toURI(), location2.toURI());
    // Test file:
    conf = new Configuration();
    conf.set("fs.defaultFS", "file:///");
    locationFactory = new FileContextLocationFactory(conf, TEST_BASE_PATH);
    location1 = locationFactory.create(TEST_PATH);
    location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
    Assert.assertEquals(location1.toURI(), location2.toURI());
    // Test LocalLocation
    locationFactory = new LocalLocationFactory(new File(TEST_BASE_PATH));
    location1 = locationFactory.create(TEST_PATH);
    location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
    Assert.assertEquals(location1.toURI(), location2.toURI());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) FileContextLocationFactory(org.apache.twill.filesystem.FileContextLocationFactory) LocalLocationFactory(org.apache.twill.filesystem.LocalLocationFactory) File(java.io.File) LocalLocationFactory(org.apache.twill.filesystem.LocalLocationFactory) FileContextLocationFactory(org.apache.twill.filesystem.FileContextLocationFactory) LocationFactory(org.apache.twill.filesystem.LocationFactory) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Aggregations

Location (org.apache.twill.filesystem.Location)272 Test (org.junit.Test)110 IOException (java.io.IOException)67 File (java.io.File)45 FileSet (co.cask.cdap.api.dataset.lib.FileSet)32 LocationFactory (org.apache.twill.filesystem.LocationFactory)32 LocalLocationFactory (org.apache.twill.filesystem.LocalLocationFactory)31 PartitionedFileSet (co.cask.cdap.api.dataset.lib.PartitionedFileSet)27 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)27 CConfiguration (co.cask.cdap.common.conf.CConfiguration)20 HashMap (java.util.HashMap)20 NamespaceId (co.cask.cdap.proto.id.NamespaceId)19 Manifest (java.util.jar.Manifest)18 StreamId (co.cask.cdap.proto.id.StreamId)17 ArrayList (java.util.ArrayList)15 DatasetFramework (co.cask.cdap.data2.dataset2.DatasetFramework)13 OutputStream (java.io.OutputStream)13 TimePartitionedFileSet (co.cask.cdap.api.dataset.lib.TimePartitionedFileSet)11 ApplicationManager (co.cask.cdap.test.ApplicationManager)11 HashSet (java.util.HashSet)11