use of co.cask.cdap.app.runtime.ProgramRunner in project cdap by caskdata.
the class ArtifactClassLoaderFactory method createClassLoader.
/**
* Create a classloader that loads classes from a directory where an artifact jar has been expanded, with access to
* packages that all program type has access to. The classloader created is only for artifact inspection purpose
* and shouldn't be used for program execution as it doesn't have the proper class filtering for the specific
* program type for the program being executed.
*
* @param unpackDir the directory where the artifact jar has been expanded
* @return a closeable classloader based off the specified artifact; on closing the returned {@link ClassLoader},
* all temporary resources created for the classloader will be removed
* @throws IOException if there was an error copying or unpacking the artifact
*/
CloseableClassLoader createClassLoader(File unpackDir) throws IOException {
ProgramRunner programRunner = null;
try {
// Try to create a ProgramClassLoader from the Spark runtime system if it is available.
// It is needed because we don't know what program types that an artifact might have.
// TODO: CDAP-5613. We shouldn't always expose the Spark classes.
programRunner = programRunnerFactory.create(ProgramType.SPARK);
} catch (Exception e) {
// If Spark is not supported, exception is expected. We'll use the default filter.
LOG.trace("Spark is not supported. Not using ProgramClassLoader from Spark", e);
}
ProgramClassLoader programClassLoader = null;
if (programRunner instanceof ProgramClassLoaderProvider) {
programClassLoader = new ProgramClassLoader(cConf, unpackDir, ((ProgramClassLoaderProvider) programRunner).createProgramClassLoaderParent());
}
if (programClassLoader == null) {
programClassLoader = new ProgramClassLoader(cConf, unpackDir, FilterClassLoader.create(getClass().getClassLoader()));
}
final ClassLoader finalProgramClassLoader = programClassLoader;
final ProgramRunner finalProgramRunner = programRunner;
return new CloseableClassLoader(programClassLoader, new Closeable() {
@Override
public void close() {
Closeables.closeQuietly((Closeable) finalProgramClassLoader);
if (finalProgramRunner instanceof Closeable) {
Closeables.closeQuietly((Closeable) finalProgramRunner);
}
}
});
}
use of co.cask.cdap.app.runtime.ProgramRunner in project cdap by caskdata.
the class DistributedProgramRuntimeService method createController.
@Nullable
private ProgramController createController(ProgramDescriptor programDescriptor, TwillController controller, RunId runId) {
ProgramId programId = programDescriptor.getProgramId();
ProgramRunner programRunner;
try {
programRunner = programRunnerFactory.create(programId.getType());
} catch (IllegalArgumentException e) {
// This shouldn't happen. If it happen, it means CDAP was incorrectly install such that some of the program
// type is not support (maybe due to version mismatch in upgrade).
LOG.error("Unsupported program type {} for program {}. " + "It is likely caused by incorrect CDAP installation or upgrade to incompatible CDAP version", programId.getType(), programId);
return null;
}
if (!(programRunner instanceof DistributedProgramRunner)) {
// This is also unexpected. If it happen, it means the CDAP core or the runtime provider extension was wrongly
// implemented
ResourceReport resourceReport = controller.getResourceReport();
LOG.error("Unable to create ProgramController for program {} for twill application {}. It is likely caused by " + "invalid CDAP program runtime extension.", programId, resourceReport == null ? "'unknown twill application'" : resourceReport.getApplicationId());
return null;
}
return ((DistributedProgramRunner) programRunner).createProgramController(controller, programDescriptor, runId);
}
use of co.cask.cdap.app.runtime.ProgramRunner in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(LaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
WorkflowSpecification spec = program.getApplicationSpecification().getWorkflows().get(program.getName());
List<ClassAcceptor> acceptors = new ArrayList<>();
// Only interested in MapReduce and Spark nodes
Set<SchedulableProgramType> runnerTypes = EnumSet.of(SchedulableProgramType.MAPREDUCE, SchedulableProgramType.SPARK);
for (WorkflowActionNode node : Iterables.filter(spec.getNodeIdMap().values(), WorkflowActionNode.class)) {
// For each type, we only need one node to setup the launch context
ScheduleProgramInfo programInfo = node.getProgram();
if (!runnerTypes.remove(programInfo.getProgramType())) {
continue;
}
// Find the ProgramRunner of the given type and setup the launch context
ProgramType programType = ProgramType.valueOfSchedulableType(programInfo.getProgramType());
ProgramRunner runner = programRunnerFactory.create(programType);
try {
if (runner instanceof DistributedProgramRunner) {
// Call setupLaunchConfig with the corresponding program
ProgramId programId = program.getId().getParent().program(programType, programInfo.getProgramName());
((DistributedProgramRunner) runner).setupLaunchConfig(launchConfig, Programs.create(cConf, program, programId, runner), options, cConf, hConf, tempDir);
acceptors.add(launchConfig.getClassAcceptor());
}
} finally {
if (runner instanceof Closeable) {
Closeables.closeQuietly((Closeable) runner);
}
}
}
// Set the class acceptor
launchConfig.setClassAcceptor(new AndClassAcceptor(acceptors));
// Clear and set the runnable for the workflow driver
launchConfig.clearRunnables();
Resources defaultResources = findDriverResources(program.getApplicationSpecification().getSpark(), program.getApplicationSpecification().getMapReduce(), spec);
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1, options.getArguments().asMap(), defaultResources, 0);
}
use of co.cask.cdap.app.runtime.ProgramRunner in project cdap by caskdata.
the class DefaultProgramWorkflowRunner method create.
@Override
public Runnable create(String name) {
ProgramRunner programRunner = programRunnerFactory.create(programType);
try {
ProgramId programId = workflowProgram.getId().getParent().program(programType, name);
Program program = Programs.create(cConf, workflowProgram, programId, programRunner);
return getProgramRunnable(name, programRunner, program);
} catch (Exception e) {
closeProgramRunner(programRunner);
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.app.runtime.ProgramRunner in project cdap by caskdata.
the class AppFabricTestHelper method submit.
/**
* Submits a program execution.
*
* @param app the application containing the program
* @param programClassName name of the program class
* @param userArgs runtime arguments
* @param folderSupplier a Supplier of temporary folder
* @return a {@link ProgramController} for controlling the program execution.
*/
public static ProgramController submit(ApplicationWithPrograms app, String programClassName, Arguments userArgs, Supplier<File> folderSupplier) throws Exception {
ProgramRunnerFactory runnerFactory = injector.getInstance(ProgramRunnerFactory.class);
ProgramRunner runner = null;
Program program = null;
for (ProgramDescriptor programDescriptor : app.getPrograms()) {
if (programDescriptor.getSpecification().getClassName().equals(programClassName)) {
runner = runnerFactory.create(programDescriptor.getProgramId().getType());
program = createProgram(programDescriptor, app.getArtifactLocation(), runner, folderSupplier);
break;
}
}
Assert.assertNotNull(program);
BasicArguments systemArgs = new BasicArguments(ImmutableMap.of(ProgramOptionConstants.RUN_ID, RunIds.generate().getId(), ProgramOptionConstants.HOST, InetAddress.getLoopbackAddress().getCanonicalHostName(), ProgramOptionConstants.ARTIFACT_ID, Joiner.on(":").join(app.getArtifactId().toIdParts())));
return runner.run(program, new SimpleProgramOptions(program.getId(), systemArgs, userArgs));
}
Aggregations