use of io.cdap.cdap.app.runtime.Arguments in project cdap by caskdata.
the class AbstractProgramTwillRunnable method doInitialize.
/**
* Prepares this instance to execute a program.
*
* @param programOptionFile a json file containing the serialized {@link ProgramOptions}
* @throws Exception if failed to initialize
*/
private void doInitialize(File programOptionFile) throws Exception {
controllerFuture = new CompletableFuture<>();
programCompletion = new CompletableFuture<>();
// Setup process wide settings
Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler());
System.setSecurityManager(new ProgramContainerSecurityManager(System.getSecurityManager()));
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
// Create the ProgramOptions
programOptions = createProgramOptions(programOptionFile);
programRunId = programOptions.getProgramId().run(ProgramRunners.getRunId(programOptions));
Arguments systemArgs = programOptions.getArguments();
LoggingContextAccessor.setLoggingContext(LoggingContextHelper.getLoggingContextWithRunId(programRunId, systemArgs.asMap()));
ClusterMode clusterMode = ProgramRunners.getClusterMode(programOptions);
// Loads configurations
Configuration hConf = new Configuration();
if (clusterMode == ClusterMode.ON_PREMISE) {
hConf.clear();
hConf.addResource(new File(systemArgs.getOption(ProgramOptionConstants.HADOOP_CONF_FILE)).toURI().toURL());
}
UserGroupInformation.setConfiguration(hConf);
CConfiguration cConf = CConfiguration.create();
cConf.clear();
cConf.addResource(new File(systemArgs.getOption(ProgramOptionConstants.CDAP_CONF_FILE)).toURI().toURL());
maxStopSeconds = cConf.getLong(io.cdap.cdap.common.conf.Constants.AppFabric.PROGRAM_MAX_STOP_SECONDS);
Injector injector = Guice.createInjector(createModule(cConf, hConf, programOptions, programRunId));
// Initialize log appender
logAppenderInitializer = injector.getInstance(LogAppenderInitializer.class);
logAppenderInitializer.initialize();
SystemArguments.setLogLevel(programOptions.getUserArguments(), logAppenderInitializer);
// Setup the proxy selector for in active monitoring mode
oldProxySelector = ProxySelector.getDefault();
if (clusterMode == ClusterMode.ISOLATED) {
RuntimeMonitors.setupMonitoring(injector, programOptions);
}
// Create list of core services. They'll will be started in the run method and shutdown when the run
// method completed
coreServices = createCoreServices(injector, programOptions);
// Create the ProgramRunner
programRunner = createProgramRunner(injector);
// Create the Program instance
Location programJarLocation = Locations.toLocation(new File(systemArgs.getOption(ProgramOptionConstants.PROGRAM_JAR)));
ApplicationSpecification appSpec = readJsonFile(new File(systemArgs.getOption(ProgramOptionConstants.APP_SPEC_FILE)), ApplicationSpecification.class);
// Expand the program jar for creating classloader
ClassLoaderFolder classLoaderFolder = BundleJarUtil.prepareClassLoaderFolder(programJarLocation, () -> new File("expanded." + System.currentTimeMillis() + programJarLocation.getName()));
program = Programs.create(cConf, programRunner, new ProgramDescriptor(programOptions.getProgramId(), appSpec), programJarLocation, classLoaderFolder.getDir());
}
use of io.cdap.cdap.app.runtime.Arguments in project cdap by caskdata.
the class DistributedProgramRunner method updateProgramOptions.
/**
* Creates a new instance of {@link ProgramOptions} with artifact localization information and with
* extra system arguments, while maintaining other fields of the given {@link ProgramOptions}.
*
* @param options the original {@link ProgramOptions}.
* @param localizeResources a {@link Map} of {@link LocalizeResource} to be localized to the remote container
* @param tempDir a local temporary directory for creating files for artifact localization.
* @param extraSystemArgs a set of extra system arguments to be added/updated
* @return a new instance of {@link ProgramOptions}
* @throws IOException if failed to create local copy of artifact files
*/
private ProgramOptions updateProgramOptions(ProgramOptions options, Map<String, LocalizeResource> localizeResources, File tempDir, Map<String, String> extraSystemArgs) throws IOException {
Arguments systemArgs = options.getArguments();
Map<String, String> newSystemArgs = new HashMap<>(systemArgs.asMap());
newSystemArgs.putAll(extraSystemArgs);
if (systemArgs.hasOption(ProgramOptionConstants.PLUGIN_ARCHIVE)) {
// If the archive already exists locally, we just need to re-localize it to remote containers
File archiveFile = new File(systemArgs.getOption(ProgramOptionConstants.PLUGIN_ARCHIVE));
// Localize plugins to two files, one expanded into a directory, one not.
localizeResources.put(PLUGIN_DIR, new LocalizeResource(archiveFile, true));
localizeResources.put(PLUGIN_ARCHIVE, new LocalizeResource(archiveFile, false));
} else if (systemArgs.hasOption(ProgramOptionConstants.PLUGIN_DIR)) {
// If there is a plugin directory, then we need to create an archive and localize it to remote containers
File localDir = new File(systemArgs.getOption(ProgramOptionConstants.PLUGIN_DIR));
File archiveFile = new File(tempDir, PLUGIN_DIR + ".jar");
// Store all artifact jars into a new jar file for localization without compression
try (JarOutputStream jarOut = new JarOutputStream(new FileOutputStream(archiveFile))) {
jarOut.setLevel(0);
BundleJarUtil.addToArchive(localDir, jarOut);
}
// Localize plugins to two files, one expanded into a directory, one not.
localizeResources.put(PLUGIN_DIR, new LocalizeResource(archiveFile, true));
localizeResources.put(PLUGIN_ARCHIVE, new LocalizeResource(archiveFile, false));
}
// Add/rename the entries in the system arguments
if (localizeResources.containsKey(PLUGIN_DIR)) {
newSystemArgs.put(ProgramOptionConstants.PLUGIN_DIR, PLUGIN_DIR);
}
if (localizeResources.containsKey(PLUGIN_ARCHIVE)) {
newSystemArgs.put(ProgramOptionConstants.PLUGIN_ARCHIVE, PLUGIN_ARCHIVE);
}
return new SimpleProgramOptions(options.getProgramId(), new BasicArguments(newSystemArgs), options.getUserArguments(), options.isDebug());
}
use of io.cdap.cdap.app.runtime.Arguments in project cdap by caskdata.
the class ProgramRunners method getApplicationPrincipal.
/**
* Returns the application principal if there is one.
*
* @param programOptions the program options to extract information from
* @return the application principal or {@code null} if no application principal is available.
*/
@Nullable
public static KerberosPrincipalId getApplicationPrincipal(ProgramOptions programOptions) {
Arguments systemArgs = programOptions.getArguments();
boolean hasAppPrincipal = Boolean.parseBoolean(systemArgs.getOption(ProgramOptionConstants.APP_PRINCIPAL_EXISTS));
return hasAppPrincipal ? new KerberosPrincipalId(systemArgs.getOption(ProgramOptionConstants.PRINCIPAL)) : null;
}
use of io.cdap.cdap.app.runtime.Arguments in project cdap by caskdata.
the class DefaultRuntimeJobTest method testInjector.
@Test
public void testInjector() throws Exception {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().toString());
LocationFactory locationFactory = new LocalLocationFactory(TEMP_FOLDER.newFile());
DefaultRuntimeJob defaultRuntimeJob = new DefaultRuntimeJob();
Arguments systemArgs = new BasicArguments(Collections.singletonMap(SystemArguments.PROFILE_NAME, "test"));
Node node = new Node("test", Node.Type.MASTER, "127.0.0.1", System.currentTimeMillis(), Collections.emptyMap());
Cluster cluster = new Cluster("test", ClusterStatus.RUNNING, Collections.singleton(node), Collections.emptyMap());
ProgramRunId programRunId = NamespaceId.DEFAULT.app("app").workflow("workflow").run(RunIds.generate());
SimpleProgramOptions programOpts = new SimpleProgramOptions(programRunId.getParent(), systemArgs, new BasicArguments());
Injector injector = Guice.createInjector(defaultRuntimeJob.createModules(new RuntimeJobEnvironment() {
@Override
public LocationFactory getLocationFactory() {
return locationFactory;
}
@Override
public TwillRunner getTwillRunner() {
return new NoopTwillRunnerService();
}
@Override
public Map<String, String> getProperties() {
return Collections.emptyMap();
}
}, cConf, programRunId, programOpts));
injector.getInstance(LogAppenderInitializer.class);
defaultRuntimeJob.createCoreServices(injector, systemArgs, cluster);
}
use of io.cdap.cdap.app.runtime.Arguments in project cdap by caskdata.
the class SparkProgramRunner method run.
@Override
public ProgramController run(Program program, ProgramOptions options) {
LOG.trace("Starting Spark program {} with SparkProgramRunner of ClassLoader {}", program.getId(), getClass().getClassLoader());
// Get the RunId first. It is used for the creation of the ClassLoader closing thread.
Arguments arguments = options.getArguments();
RunId runId = ProgramRunners.getRunId(options);
Deque<Closeable> closeables = new LinkedList<>();
try {
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
String host = options.getArguments().getOption(ProgramOptionConstants.HOST);
Preconditions.checkArgument(host != null, "No hostname is provided");
// Get the WorkflowProgramInfo if it is started by Workflow
WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
if (pluginInstantiator != null) {
closeables.addFirst(pluginInstantiator);
}
SparkRuntimeContext runtimeContext = new SparkRuntimeContext(new Configuration(hConf), program, options, cConf, host, txClient, programDatasetFramework, metricsCollectionService, workflowInfo, pluginInstantiator, secureStore, secureStoreManager, accessEnforcer, authenticationContext, messagingService, serviceAnnouncer, pluginFinder, locationFactory, metadataReader, metadataPublisher, namespaceQueryAdmin, fieldLineageWriter, remoteClientFactory, () -> {
});
closeables.addFirst(runtimeContext);
Spark spark;
try {
spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
boolean isLocal = SparkRuntimeContextConfig.isLocal(options);
SparkSubmitter submitter;
// If MasterEnvironment is not available, use non-master env spark submitters
MasterEnvironment masterEnv = MasterEnvironments.getMasterEnvironment();
if (masterEnv != null && cConf.getBoolean(Constants.Environment.PROGRAM_SUBMISSION_MASTER_ENV_ENABLED, true)) {
submitter = new MasterEnvironmentSparkSubmitter(cConf, locationFactory, host, runtimeContext, masterEnv, options);
} else {
submitter = isLocal ? new LocalSparkSubmitter() : new DistributedSparkSubmitter(hConf, locationFactory, host, runtimeContext, options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE));
}
Service sparkRuntimeService = new SparkRuntimeService(cConf, spark, getPluginArchive(options), runtimeContext, submitter, locationFactory, isLocal, fieldLineageWriter, masterEnv, commonNettyHttpServiceFactory);
sparkRuntimeService.addListener(createRuntimeServiceListener(closeables), Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new SparkProgramController(sparkRuntimeService, runtimeContext);
LOG.debug("Starting Spark Job. Context: {}", runtimeContext);
if (isLocal || UserGroupInformation.isSecurityEnabled()) {
sparkRuntimeService.start();
} else {
ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), sparkRuntimeService);
}
return controller;
} catch (Throwable t) {
closeAllQuietly(closeables);
throw Throwables.propagate(t);
}
}
Aggregations