use of co.cask.cdap.internal.app.spark.DefaultSparkConfigurer in project cdap by caskdata.
the class DefaultAppConfigurer method addSpark.
@Override
public void addSpark(Spark spark) {
Preconditions.checkArgument(spark != null, "Spark cannot be null.");
DefaultSparkConfigurer configurer = null;
// It is a bit hacky here to look for the DefaultExtendedSparkConfigurer implementation through the
// SparkRunnerClassloader directly (CDAP-11797)
ClassLoader sparkRunnerClassLoader = ClassLoaders.findByName(spark.getClass().getClassLoader(), "co.cask.cdap.app.runtime.spark.classloader.SparkRunnerClassLoader");
if (sparkRunnerClassLoader != null) {
try {
configurer = (DefaultSparkConfigurer) sparkRunnerClassLoader.loadClass("co.cask.cdap.app.deploy.spark.DefaultExtendedSparkConfigurer").getConstructor(Spark.class, Id.Namespace.class, Id.Artifact.class, ArtifactRepository.class, PluginInstantiator.class).newInstance(spark, deployNamespace, artifactId, artifactRepository, pluginInstantiator);
} catch (Exception e) {
// Ignore it and the configurer will be defaulted to DefaultSparkConfigurer
LOG.trace("No DefaultExtendedSparkConfigurer found. Fallback to DefaultSparkConfigurer.", e);
}
}
if (configurer == null) {
configurer = new DefaultSparkConfigurer(spark, deployNamespace, artifactId, artifactRepository, pluginInstantiator);
}
spark.configure(configurer);
addDatasetsAndPlugins(configurer);
SparkSpecification spec = configurer.createSpecification();
sparks.put(spec.getName(), spec);
}
Aggregations