use of co.cask.cdap.app.runtime.spark.SparkCredentialsUpdater in project cdap by caskdata.
the class SparkDriverService method createCredentialsUpdater.
/**
* Creates a {@link SparkCredentialsUpdater} for {@link Credentials} in secure environment. If security is disable,
* or failure to create one due to {@link IOException} from {@link LocationFactory}, {@code null} will be returned.
*/
@Nullable
private SparkCredentialsUpdater createCredentialsUpdater(Configuration hConf, SparkExecutionClient client) {
try {
SparkConf sparkConf = new SparkConf();
long updateIntervalMs = sparkConf.getLong("spark.yarn.token.renewal.interval", -1L);
if (updateIntervalMs <= 0) {
return null;
}
// This env variable is set by Spark for all known Spark versions
// If it is missing, exception will be thrown
URI stagingURI = URI.create(System.getenv("SPARK_YARN_STAGING_DIR"));
LocationFactory lf = new FileContextLocationFactory(hConf);
Location credentialsDir = stagingURI.isAbsolute() ? lf.create(stagingURI.getPath()) : lf.getHomeLocation().append(stagingURI.getPath());
LOG.info("Credentials DIR: {}", credentialsDir);
int daysToKeepFiles = sparkConf.getInt("spark.yarn.credentials.file.retention.days", 5);
int numFilesToKeep = sparkConf.getInt("spark.yarn.credentials.file.retention.count", 5);
Location credentialsFile = credentialsDir.append("credentials-" + UUID.randomUUID());
// Update this property so that the executor will pick it up. It can't get set from the client side,
// otherwise the AM process will try to look for keytab file
SparkRuntimeEnv.setProperty("spark.yarn.credentials.file", credentialsFile.toURI().toString());
return new SparkCredentialsUpdater(createCredentialsSupplier(client, credentialsDir), credentialsDir, credentialsFile.getName(), updateIntervalMs, TimeUnit.DAYS.toMillis(daysToKeepFiles), numFilesToKeep);
} catch (IOException e) {
LOG.warn("Failed to create credentials updater. Credentials update disabled", e);
return null;
}
}
Aggregations