use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class DistributedProgramRunner method updateProgramOptions.
/**
* Creates a new instance of {@link ProgramOptions} with artifact localization information and with
* extra system arguments, while maintaining other fields of the given {@link ProgramOptions}.
*
* @param options the original {@link ProgramOptions}.
* @param localizeResources a {@link Map} of {@link LocalizeResource} to be localized to the remote container
* @param tempDir a local temporary directory for creating files for artifact localization.
* @param extraSystemArgs a set of extra system arguments to be added/updated
* @return a new instance of {@link ProgramOptions}
* @throws IOException if failed to create local copy of artifact files
*/
private ProgramOptions updateProgramOptions(ProgramOptions options, Map<String, LocalizeResource> localizeResources, File tempDir, Map<String, String> extraSystemArgs) throws IOException {
Arguments systemArgs = options.getArguments();
Map<String, String> newSystemArgs = new HashMap<>(systemArgs.asMap());
newSystemArgs.putAll(extraSystemArgs);
if (systemArgs.hasOption(ProgramOptionConstants.PLUGIN_ARCHIVE)) {
// If the archive already exists locally, we just need to re-localize it to remote containers
File archiveFile = new File(systemArgs.getOption(ProgramOptionConstants.PLUGIN_ARCHIVE));
// Localize plugins to two files, one expanded into a directory, one not.
localizeResources.put(PLUGIN_DIR, new LocalizeResource(archiveFile, true));
localizeResources.put(PLUGIN_ARCHIVE, new LocalizeResource(archiveFile, false));
} else if (systemArgs.hasOption(ProgramOptionConstants.PLUGIN_DIR)) {
// If there is a plugin directory, then we need to create an archive and localize it to remote containers
File localDir = new File(systemArgs.getOption(ProgramOptionConstants.PLUGIN_DIR));
File archiveFile = new File(tempDir, PLUGIN_DIR + ".jar");
// Store all artifact jars into a new jar file for localization without compression
try (JarOutputStream jarOut = new JarOutputStream(new FileOutputStream(archiveFile))) {
jarOut.setLevel(0);
BundleJarUtil.addToArchive(localDir, jarOut);
}
// Localize plugins to two files, one expanded into a directory, one not.
localizeResources.put(PLUGIN_DIR, new LocalizeResource(archiveFile, true));
localizeResources.put(PLUGIN_ARCHIVE, new LocalizeResource(archiveFile, false));
}
// Add/rename the entries in the system arguments
if (localizeResources.containsKey(PLUGIN_DIR)) {
newSystemArgs.put(ProgramOptionConstants.PLUGIN_DIR, PLUGIN_DIR);
}
if (localizeResources.containsKey(PLUGIN_ARCHIVE)) {
newSystemArgs.put(ProgramOptionConstants.PLUGIN_ARCHIVE, PLUGIN_ARCHIVE);
}
return new SimpleProgramOptions(options.getProgramId(), new BasicArguments(newSystemArgs), options.getUserArguments(), options.isDebug());
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class DistributedWorkflowProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(ProgramLaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
WorkflowSpecification spec = program.getApplicationSpecification().getWorkflows().get(program.getName());
List<ClassAcceptor> acceptors = new ArrayList<>();
acceptors.add(launchConfig.getClassAcceptor());
// Only interested in MapReduce and Spark nodes.
// This is because CUSTOM_ACTION types are running inside the driver
Set<SchedulableProgramType> runnerTypes = EnumSet.of(SchedulableProgramType.MAPREDUCE, SchedulableProgramType.SPARK);
Iterable<ScheduleProgramInfo> programInfos = spec.getNodeIdMap().values().stream().filter(WorkflowActionNode.class::isInstance).map(WorkflowActionNode.class::cast).map(WorkflowActionNode::getProgram).filter(programInfo -> runnerTypes.contains(programInfo.getProgramType()))::iterator;
// Can't use Stream.forEach as we want to preserve the IOException being thrown
for (ScheduleProgramInfo programInfo : programInfos) {
ProgramType programType = ProgramType.valueOfSchedulableType(programInfo.getProgramType());
ProgramRunner runner = programRunnerFactory.create(programType);
try {
if (runner instanceof DistributedProgramRunner) {
// Call setupLaunchConfig with the corresponding program.
// Need to constructs a new ProgramOptions with the scope extracted for the given program
ProgramId programId = program.getId().getParent().program(programType, programInfo.getProgramName());
Map<String, String> programUserArgs = RuntimeArguments.extractScope(programId.getType().getScope(), programId.getProgram(), options.getUserArguments().asMap());
ProgramOptions programOptions = new SimpleProgramOptions(programId, options.getArguments(), new BasicArguments(programUserArgs));
((DistributedProgramRunner) runner).setupLaunchConfig(launchConfig, Programs.create(cConf, program, programId, runner), programOptions, cConf, hConf, tempDir);
acceptors.add(launchConfig.getClassAcceptor());
}
} finally {
if (runner instanceof Closeable) {
Closeables.closeQuietly((Closeable) runner);
}
}
}
// Set the class acceptor
launchConfig.setClassAcceptor(new AndClassAcceptor(acceptors));
// Find out the default resources requirements based on the programs inside the workflow
// At least gives the Workflow driver 768 mb of container memory
Map<String, Resources> runnablesResources = Maps.transformValues(launchConfig.getRunnables(), this::getResources);
Resources defaultResources = maxResources(new Resources(768), findDriverResources(spec.getNodes(), runnablesResources));
// Clear and set the runnable for the workflow driver.
launchConfig.clearRunnables();
// Extract scoped runtime arguments that only meant for the workflow but not for child nodes
Map<String, String> runtimeArgs = RuntimeArguments.extractScope("task", "workflow", options.getUserArguments().asMap());
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1, runtimeArgs, defaultResources, 0);
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class WorkerProgramRunnerTest method startProgram.
private ProgramController startProgram(ApplicationWithPrograms app, Class<?> programClass) throws Throwable {
final AtomicReference<Throwable> errorCause = new AtomicReference<>();
final ProgramController controller = AppFabricTestHelper.submit(app, programClass.getName(), new BasicArguments(), TEMP_FOLDER_SUPPLIER);
runningPrograms.add(controller);
controller.addListener(new AbstractListener() {
@Override
public void error(Throwable cause) {
errorCause.set(cause);
}
@Override
public void killed() {
errorCause.set(new RuntimeException("Killed"));
}
}, Threads.SAME_THREAD_EXECUTOR);
return controller;
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class DefaultRuntimeJobTest method testInjector.
@Test
public void testInjector() throws Exception {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().toString());
LocationFactory locationFactory = new LocalLocationFactory(TEMP_FOLDER.newFile());
DefaultRuntimeJob defaultRuntimeJob = new DefaultRuntimeJob();
Arguments systemArgs = new BasicArguments(Collections.singletonMap(SystemArguments.PROFILE_NAME, "test"));
Node node = new Node("test", Node.Type.MASTER, "127.0.0.1", System.currentTimeMillis(), Collections.emptyMap());
Cluster cluster = new Cluster("test", ClusterStatus.RUNNING, Collections.singleton(node), Collections.emptyMap());
ProgramRunId programRunId = NamespaceId.DEFAULT.app("app").workflow("workflow").run(RunIds.generate());
SimpleProgramOptions programOpts = new SimpleProgramOptions(programRunId.getParent(), systemArgs, new BasicArguments());
Injector injector = Guice.createInjector(defaultRuntimeJob.createModules(new RuntimeJobEnvironment() {
@Override
public LocationFactory getLocationFactory() {
return locationFactory;
}
@Override
public TwillRunner getTwillRunner() {
return new NoopTwillRunnerService();
}
@Override
public Map<String, String> getProperties() {
return Collections.emptyMap();
}
}, cConf, programRunId, programOpts));
injector.getInstance(LogAppenderInitializer.class);
defaultRuntimeJob.createCoreServices(injector, systemArgs, cluster);
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class MapReduceWithMultipleInputsTest method testMapperOutputTypeChecking.
@Test
public void testMapperOutputTypeChecking() throws Exception {
final ApplicationWithPrograms app = deployApp(AppWithMapReduceUsingInconsistentMappers.class);
// the mapreduce with consistent mapper types will succeed
Assert.assertTrue(runProgram(app, AppWithMapReduceUsingInconsistentMappers.MapReduceWithConsistentMapperTypes.class, new BasicArguments()));
// the mapreduce with mapper classes of inconsistent output types will fail, whether the mappers are set through
// CDAP APIs or also directly on the job
Assert.assertFalse(runProgram(app, AppWithMapReduceUsingInconsistentMappers.MapReduceWithInconsistentMapperTypes.class, new BasicArguments()));
Assert.assertFalse(runProgram(app, AppWithMapReduceUsingInconsistentMappers.MapReduceWithInconsistentMapperTypes2.class, new BasicArguments()));
}
Aggregations