use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class DefaultAuthorizationEnforcerTest method setupClass.
@BeforeClass
public static void setupClass() throws IOException {
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MAIN_CLASS, InMemoryAuthorizer.class.getName());
Location externalAuthJar = AppJarHelper.createDeploymentJar(locationFactory, InMemoryAuthorizer.class, manifest);
CCONF.set(Constants.Security.Authorization.EXTENSION_JAR_PATH, externalAuthJar.toString());
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class SparkCredentialsUpdaterTest method testCleanup.
@Test
public void testCleanup() throws IOException, InterruptedException {
Location credentialsDir = Locations.toLocation(TEMPORARY_FOLDER.newFolder());
// Create a updater that don't do any auto-update within the test time
SparkCredentialsUpdater updater = new SparkCredentialsUpdater(createCredentialsSupplier(), credentialsDir, "credentials", TimeUnit.DAYS.toMillis(1), TimeUnit.SECONDS.toMillis(3), 3) {
@Override
long getNextUpdateDelay(Credentials credentials) throws IOException {
return TimeUnit.DAYS.toMillis(1);
}
};
updater.startAndWait();
try {
// Expect this loop to finish in 3 seconds because we don't want sleep for too long for testing cleanup
for (int i = 1; i <= 5; i++) {
Assert.assertEquals(i, credentialsDir.list().size());
updater.run();
}
// Sleep for 3 seconds, which is the cleanup expire time
TimeUnit.SECONDS.sleep(3);
// Run the updater again, it should only have three files (2 older than expire time, 1 new)
updater.run();
Assert.assertEquals(3, credentialsDir.list().size());
} finally {
updater.stopAndWait();
}
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class SparkCredentialsUpdaterTest method testUpdater.
@Test
public void testUpdater() throws Exception {
Location credentialsDir = Locations.toLocation(TEMPORARY_FOLDER.newFolder());
// Create a updater that don't do any auto-update within the test time and don't cleanup
SparkCredentialsUpdater updater = new SparkCredentialsUpdater(createCredentialsSupplier(), credentialsDir, "credentials", TimeUnit.DAYS.toMillis(1), TimeUnit.DAYS.toMillis(1), Integer.MAX_VALUE) {
@Override
long getNextUpdateDelay(Credentials credentials) throws IOException {
return TimeUnit.DAYS.toMillis(1);
}
};
// Before the updater starts, the directory is empty
Assert.assertTrue(credentialsDir.list().isEmpty());
UserGroupInformation.getCurrentUser().addToken(new Token<>(Bytes.toBytes("id"), Bytes.toBytes("pass"), new Text("kind"), new Text("service")));
updater.startAndWait();
try {
List<Location> expectedFiles = new ArrayList<>();
expectedFiles.add(credentialsDir.append("credentials-1"));
for (int i = 1; i <= 10; i++) {
Assert.assertEquals(expectedFiles, listAndSort(credentialsDir));
// Read the credentials from the last file
Credentials newCredentials = new Credentials();
try (DataInputStream is = new DataInputStream(expectedFiles.get(expectedFiles.size() - 1).getInputStream())) {
newCredentials.readTokenStorageStream(is);
}
// Should contains all tokens of the current user
Credentials userCredentials = UserGroupInformation.getCurrentUser().getCredentials();
for (Token<? extends TokenIdentifier> token : userCredentials.getAllTokens()) {
Assert.assertEquals(token, newCredentials.getToken(token.getService()));
}
UserGroupInformation.getCurrentUser().addToken(new Token<>(Bytes.toBytes("id" + i), Bytes.toBytes("pass" + i), new Text("kind" + i), new Text("service" + i)));
updater.run();
expectedFiles.add(credentialsDir.append("credentials-" + (i + 1)));
}
} finally {
updater.stopAndWait();
}
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class SparkCredentialsUpdater method run.
@Override
public void run() {
long nextUpdateTime = updateIntervalMs;
try {
if (generation == 0) {
generation = findLatestGeneration();
}
// Write to the next generation file. It's ok to skip some generation if the write failed.
generation++;
Location credentialsFile = credentialsDir.append(fileNamePrefix + SPARK_YARN_CREDS_COUNTER_DELIM + generation);
Location tempFile = credentialsDir.append(credentialsFile.getName() + SPARK_YARN_CREDS_TEMP_EXTENSION);
// Writes the credentials to temp location, then rename to the final one
Credentials credentials = credentialsSupplier.get();
try (DataOutputStream os = new DataOutputStream(tempFile.getOutputStream("600"))) {
credentials.writeTokenStorageToStream(os);
}
if (!credentialsFile.equals(tempFile.renameTo(credentialsFile))) {
throw new IOException("Failed to rename from " + tempFile + " to " + credentialsFile);
}
LOG.debug("Credentials written to {}", credentialsFile);
// Schedule the next update.
// Use the same logic as the Spark executor to calculate the update time.
nextUpdateTime = getNextUpdateDelay(credentials);
LOG.debug("Next credentials refresh at {}ms later", nextUpdateTime);
scheduler.schedule(this, nextUpdateTime, TimeUnit.MILLISECONDS);
cleanup();
} catch (Exception e) {
// Retry time is the min(1 minute, update interval)
long retryDelay = Math.min(60000, nextUpdateTime);
LOG.warn("Exception raised when saving credentials. Retry in {}ms", retryDelay, e);
scheduler.schedule(this, retryDelay, TimeUnit.MILLISECONDS);
}
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class LocationsTest method absolutePathTests.
@Test
public void absolutePathTests() throws IOException {
// Test HDFS:
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://1.2.3.4:8020/");
LocationFactory locationFactory = new FileContextLocationFactory(conf, TEST_BASE_PATH);
Location location1 = locationFactory.create(TEST_PATH);
Location location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
Assert.assertEquals(location1.toURI(), location2.toURI());
// Test file:
conf = new Configuration();
conf.set("fs.defaultFS", "file:///");
locationFactory = new FileContextLocationFactory(conf, TEST_BASE_PATH);
location1 = locationFactory.create(TEST_PATH);
location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
Assert.assertEquals(location1.toURI(), location2.toURI());
// Test LocalLocation
locationFactory = new LocalLocationFactory(new File(TEST_BASE_PATH));
location1 = locationFactory.create(TEST_PATH);
location2 = Locations.getLocationFromAbsolutePath(locationFactory, location1.toURI().getPath());
Assert.assertEquals(location1.toURI(), location2.toURI());
}
Aggregations