use of io.cdap.cdap.internal.app.runtime.ProgramClassLoader in project cdap by caskdata.
the class PluginClassLoader method createParent.
static ClassLoader createParent(ClassLoader templateClassLoader) {
// Find the ProgramClassLoader from the template ClassLoader
ClassLoader programClassLoader = templateClassLoader;
while (programClassLoader != null && !(programClassLoader instanceof ProgramClassLoader)) {
programClassLoader = programClassLoader.getParent();
}
// This shouldn't happen
Preconditions.checkArgument(programClassLoader != null, "Cannot find ProgramClassLoader");
// Package filtered classloader of the template classloader, which only classes in "Export-Packages" are loadable.
Manifest manifest = ((ProgramClassLoader) programClassLoader).getManifest();
Set<String> exportPackages = ManifestFields.getExportPackages(manifest);
ClassLoader filteredTemplateClassLoader = new PackageFilterClassLoader(templateClassLoader, exportPackages::contains);
// followed by template export-packages, then by a plugin lib jars.
return new CombineClassLoader(programClassLoader.getParent(), filteredTemplateClassLoader);
}
use of io.cdap.cdap.internal.app.runtime.ProgramClassLoader in project cdap by caskdata.
the class Programs method create.
/**
* Creates a new {@link Program} using information from an existing program. The new program has the same
* runtime dependencies and must be from the same application as the original program.
*
* @param cConf the CDAP configuration
* @param originalProgram the original program
* @param programId the new program id
* @param programRunner the {@link ProgramRunner} for executing the new program. If provided and if it implements
* {@link ProgramClassLoaderProvider}, then the
* {@link ClassLoader} created for the {@link Program} will be determined based on it.
* Otherwise, the {@link ClassLoader} will only have visibility
* to cdap-api and hadoop classes.
* @return a new {@link Program} instance for the given programId
*/
public static Program create(CConfiguration cConf, Program originalProgram, ProgramId programId, @Nullable ProgramRunner programRunner) {
ClassLoader classLoader = originalProgram.getClassLoader();
// The classloader should be ProgramClassLoader
Preconditions.checkArgument(classLoader instanceof ProgramClassLoader, "Program %s doesn't use ProgramClassLoader", originalProgram);
// The new program should be in the same namespace and app
ProgramId originalId = originalProgram.getId();
Preconditions.checkArgument(originalId.getNamespaceId().equals(programId.getNamespaceId()), "Program %s is not in the same namespace as %s", programId, originalId);
Preconditions.checkArgument(originalId.getParent().equals(programId.getParent()), "Program %s is not in the same application as %s", programId, originalId);
// Make sure the program is defined in the app
ApplicationSpecification appSpec = originalProgram.getApplicationSpecification();
ensureProgramInApplication(appSpec, programId);
return Programs.create(cConf, programRunner, new ProgramDescriptor(programId, appSpec), originalProgram.getJarLocation(), ((ProgramClassLoader) classLoader).getDir());
}
use of io.cdap.cdap.internal.app.runtime.ProgramClassLoader in project cdap by caskdata.
the class ArtifactClassLoaderFactory method createClassLoader.
/**
* Create a classloader that loads classes from a directory where an artifact jar has been expanded, with access to
* packages that all program type has access to. The classloader created is only for artifact inspection purpose
* and shouldn't be used for program execution as it doesn't have the proper class filtering for the specific
* program type for the program being executed.
*
* @param unpackDir the directory where the artifact jar has been expanded
* @return a closeable classloader based off the specified artifact; on closing the returned {@link ClassLoader},
* all temporary resources created for the classloader will be removed
*/
CloseableClassLoader createClassLoader(File unpackDir) {
ProgramRunner programRunner = null;
try {
// Try to create a ProgramClassLoader from the Spark runtime system if it is available.
// It is needed because we don't know what program types that an artifact might have.
// TODO: CDAP-5613. We shouldn't always expose the Spark classes.
programRunner = programRunnerFactory.create(ProgramType.SPARK);
} catch (Exception e) {
// If Spark is not supported, exception is expected. We'll use the default filter.
LOG.trace("Spark is not supported. Not using ProgramClassLoader from Spark", e);
}
ProgramClassLoader programClassLoader = null;
if (programRunner instanceof ProgramClassLoaderProvider) {
programClassLoader = new ProgramClassLoader(cConf, unpackDir, ((ProgramClassLoaderProvider) programRunner).createProgramClassLoaderParent());
}
if (programClassLoader == null) {
programClassLoader = new ProgramClassLoader(cConf, unpackDir, FilterClassLoader.create(getClass().getClassLoader()));
}
final ClassLoader finalProgramClassLoader = programClassLoader;
final ProgramRunner finalProgramRunner = programRunner;
return new CloseableClassLoader(programClassLoader, () -> {
Closeables.closeQuietly((Closeable) finalProgramClassLoader);
if (finalProgramRunner instanceof Closeable) {
Closeables.closeQuietly((Closeable) finalProgramRunner);
}
});
}
use of io.cdap.cdap.internal.app.runtime.ProgramClassLoader in project cdap by caskdata.
the class SparkRuntimeContextProvider method createProgram.
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException {
File programJar = new File(PROGRAM_JAR_NAME);
File programDir = new File(PROGRAM_JAR_EXPANDED_NAME);
ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER);
ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader);
return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader);
}
use of io.cdap.cdap.internal.app.runtime.ProgramClassLoader in project cdap by caskdata.
the class PluginInstantiatorTest method testResourceOrder.
/**
* If a plugin has some resources in main jar and in libraries, main jar should take precedence
*/
@Test
public void testResourceOrder() throws IOException {
File appDir = TMP_FOLDER.newFolder();
File pluginsDir = TMP_FOLDER.newFolder();
File pluginDir = TMP_FOLDER.newFolder();
File pluginArchive = TMP_FOLDER.newFile();
File jarDir = TMP_FOLDER.newFolder();
ArtifactId artifactId = new ArtifactId("dummy", new ArtifactVersion("1.0"), ArtifactScope.USER);
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TMP_FOLDER.newFolder().getAbsolutePath());
ProgramClassLoader programClassLoader = new ProgramClassLoader(cConf, appDir, this.getClass().getClassLoader());
PluginInstantiator pluginInstantiator = new PluginInstantiator(cConf, programClassLoader, pluginsDir);
FileUtils.write(new File(jarDir, "test.class"), "jarData");
FileUtils.write(new File(pluginDir, "test.class"), "pluginData");
BundleJarUtil.createJar(jarDir, new File(pluginDir, "library.jar"));
BundleJarUtil.createJar(pluginDir, pluginArchive);
pluginInstantiator.addArtifact(Locations.toLocation(pluginArchive), artifactId);
PluginClassLoader loader = pluginInstantiator.getPluginClassLoader(artifactId, Collections.emptyList());
Assert.assertEquals("pluginData", IOUtils.toString(loader.getResource("test.class")));
pluginInstantiator.close();
}
Aggregations