use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class MapReduceProgramRunner method run.
@Override
public ProgramController run(final Program program, ProgramOptions options) {
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MAPREDUCE process type is supported.");
MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());
Arguments arguments = options.getArguments();
RunId runId = ProgramRunners.getRunId(options);
WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
MapReduce mapReduce;
try {
mapReduce = new InstantiatorFactory(false).get(TypeToken.of(program.<MapReduce>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate MapReduce class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
// List of all Closeable resources that needs to be cleanup
List<Closeable> closeables = new ArrayList<>();
try {
PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
if (pluginInstantiator != null) {
closeables.add(pluginInstantiator);
}
final BasicMapReduceContext context = new BasicMapReduceContext(program, options, cConf, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txSystemClient, programDatasetFramework, streamAdmin, getPluginArchive(options), pluginInstantiator, secureStore, secureStoreManager, messagingService);
Reflections.visit(mapReduce, mapReduce.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new MetricsFieldSetter(context.getMetrics()), new DataSetFieldSetter(context));
// note: this sets logging context on the thread level
LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
// Set the job queue to hConf if it is provided
Configuration hConf = new Configuration(this.hConf);
String schedulerQueue = options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE);
if (schedulerQueue != null && !schedulerQueue.isEmpty()) {
hConf.set(JobContext.QUEUE_NAME, schedulerQueue);
}
Service mapReduceRuntimeService = new MapReduceRuntimeService(injector, cConf, hConf, mapReduce, spec, context, program.getJarLocation(), locationFactory, streamAdmin, txSystemClient, authorizationEnforcer, authenticationContext);
mapReduceRuntimeService.addListener(createRuntimeServiceListener(program.getId(), runId, closeables, arguments, options.getUserArguments()), Threads.SAME_THREAD_EXECUTOR);
final ProgramController controller = new MapReduceProgramController(mapReduceRuntimeService, context);
LOG.debug("Starting MapReduce Job: {}", context);
// be running the job, but the data directory will be owned by cdap.
if (MapReduceTaskContextProvider.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
mapReduceRuntimeService.start();
} else {
ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), mapReduceRuntimeService);
}
return controller;
} catch (Exception e) {
closeAllQuietly(closeables);
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class WorkerDriver method startUp.
@Override
protected void startUp() throws Exception {
LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
// Instantiate worker instance
Class<?> workerClass = program.getClassLoader().loadClass(spec.getClassName());
@SuppressWarnings("unchecked") TypeToken<Worker> workerType = (TypeToken<Worker>) TypeToken.of(workerClass);
worker = new InstantiatorFactory(false).get(workerType).create();
// Fields injection
Reflections.visit(worker, workerType.getType(), new MetricsFieldSetter(context.getMetrics()), new PropertyFieldSetter(spec.getProperties()));
LOG.debug("Starting Worker Program {}", program.getId());
// Initialize worker
TransactionControl txControl = Transactions.getTransactionControl(TransactionControl.EXPLICIT, Worker.class, worker, "initialize", WorkerContext.class);
try {
context.initializeProgram(worker, context, txControl, false);
} catch (LinkageError e) {
// of the user program is missing dependencies (CDAP-2543)
throw new Exception(e.getMessage(), e);
}
}
use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class CustomActionExecutor method createAction.
@SuppressWarnings("unchecked")
@Deprecated
private WorkflowAction createAction(BasicWorkflowContext context, InstantiatorFactory instantiator, ClassLoader classLoader) throws Exception {
Class<?> clz = Class.forName(context.getSpecification().getClassName(), true, classLoader);
Preconditions.checkArgument(WorkflowAction.class.isAssignableFrom(clz), "%s is not a WorkflowAction.", clz);
WorkflowAction action = instantiator.get(TypeToken.of((Class<? extends WorkflowAction>) clz)).create();
Metrics metrics = new ProgramUserMetrics(context.getProgramMetrics().childContext(Constants.Metrics.Tag.NODE, context.getSpecification().getName()));
Reflections.visit(action, action.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(metrics));
return action;
}
use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class CustomActionExecutor method createCustomAction.
@SuppressWarnings("unchecked")
private CustomAction createCustomAction(BasicCustomActionContext context, InstantiatorFactory instantiator, ClassLoader classLoader) throws Exception {
Class<?> clz = Class.forName(context.getSpecification().getClassName(), true, classLoader);
Preconditions.checkArgument(CustomAction.class.isAssignableFrom(clz), "%s is not a CustomAction.", clz);
CustomAction action = instantiator.get(TypeToken.of((Class<? extends CustomAction>) clz)).create();
Reflections.visit(action, action.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics()));
return action;
}
use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class SparkRuntimeService method startUp.
@Override
protected void startUp() throws Exception {
// additional spark job initialization at run-time
// This context is for calling initialize and onFinish on the Spark program
// Fields injection for the Spark program
// It has to be done in here instead of in SparkProgramRunner for the @UseDataset injection
// since the dataset cache being used in Spark is a MultiThreadDatasetCache
// The AbstractExecutionThreadService guarantees that startUp(), run() and shutDown() all happens in the same thread
Reflections.visit(spark, spark.getClass(), new PropertyFieldSetter(runtimeContext.getSparkSpecification().getProperties()), new DataSetFieldSetter(runtimeContext.getDatasetCache()), new MetricsFieldSetter(runtimeContext));
// Creates a temporary directory locally for storing all generated files.
File tempDir = DirUtils.createTempDir(new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile());
tempDir.mkdirs();
this.cleanupTask = createCleanupTask(tempDir, System.getProperties());
try {
initialize();
SparkRuntimeContextConfig contextConfig = new SparkRuntimeContextConfig(runtimeContext.getConfiguration());
final File jobJar = generateJobJar(tempDir, contextConfig.isLocal(), cConf);
final List<LocalizeResource> localizeResources = new ArrayList<>();
String metricsConfPath;
String classpath = "";
if (contextConfig.isLocal()) {
// In local mode, always copy (or link if local) user requested resources
copyUserResources(context.getLocalizeResources(), tempDir);
File metricsConf = SparkMetricsSink.writeConfig(new File(tempDir, CDAP_METRICS_PROPERTIES));
metricsConfPath = metricsConf.getAbsolutePath();
} else {
// Localize all user requested files in distributed mode
distributedUserResources(context.getLocalizeResources(), localizeResources);
// Localize program jar and the expanding program jar
File programJar = Locations.linkOrCopy(runtimeContext.getProgram().getJarLocation(), new File(tempDir, SparkRuntimeContextProvider.PROGRAM_JAR_NAME));
File expandedProgramJar = Locations.linkOrCopy(runtimeContext.getProgram().getJarLocation(), new File(tempDir, SparkRuntimeContextProvider.PROGRAM_JAR_EXPANDED_NAME));
// Localize both the unexpanded and expanded program jar
localizeResources.add(new LocalizeResource(programJar));
localizeResources.add(new LocalizeResource(expandedProgramJar, true));
// Localize plugins
if (pluginArchive != null) {
localizeResources.add(new LocalizeResource(pluginArchive, true));
}
// Create and localize the launcher jar, which is for setting up services and classloader for spark containers
localizeResources.add(new LocalizeResource(createLauncherJar(tempDir)));
// Create metrics conf file in the current directory since
// the same value for the "spark.metrics.conf" config needs to be used for both driver and executor processes
// Also localize the metrics conf file to the executor nodes
File metricsConf = SparkMetricsSink.writeConfig(new File(CDAP_METRICS_PROPERTIES));
metricsConfPath = metricsConf.getName();
localizeResources.add(new LocalizeResource(metricsConf));
// Localize the cConf file
localizeResources.add(new LocalizeResource(saveCConf(cConf, tempDir)));
// Preserves and localize runtime information in the hConf
Configuration hConf = contextConfig.set(runtimeContext, pluginArchive).getConfiguration();
localizeResources.add(new LocalizeResource(saveHConf(hConf, tempDir)));
// Joiner for creating classpath for spark containers
Joiner joiner = Joiner.on(File.pathSeparator).skipNulls();
// Localize the spark.jar archive, which contains all CDAP and dependency jars
File sparkJar = new File(tempDir, CDAP_SPARK_JAR);
classpath = joiner.join(Iterables.transform(buildDependencyJar(sparkJar), new Function<String, String>() {
@Override
public String apply(String name) {
return Paths.get("$PWD", CDAP_SPARK_JAR, name).toString();
}
}));
localizeResources.add(new LocalizeResource(sparkJar, true));
// Localize logback if there is one. It is placed at the beginning of the classpath
File logbackJar = ProgramRunners.createLogbackJar(new File(tempDir, "logback.xml.jar"));
if (logbackJar != null) {
localizeResources.add(new LocalizeResource(logbackJar));
classpath = joiner.join(Paths.get("$PWD", logbackJar.getName()), classpath);
}
// Localize extra jars and append to the end of the classpath
List<String> extraJars = new ArrayList<>();
for (URI jarURI : CConfigurationUtil.getExtraJars(cConf)) {
extraJars.add(Paths.get("$PWD", LocalizationUtils.getLocalizedName(jarURI)).toString());
localizeResources.add(new LocalizeResource(jarURI, false));
}
classpath = joiner.join(classpath, joiner.join(extraJars));
}
final Map<String, String> configs = createSubmitConfigs(tempDir, metricsConfPath, classpath, context.getLocalizeResources(), contextConfig.isLocal());
submitSpark = new Callable<ListenableFuture<RunId>>() {
@Override
public ListenableFuture<RunId> call() throws Exception {
// This happen when stop() was called whiling starting
if (!isRunning()) {
return immediateCancelledFuture();
}
return sparkSubmitter.submit(runtimeContext, configs, localizeResources, jobJar, runtimeContext.getRunId());
}
};
} catch (LinkageError e) {
// of the user program is missing dependencies (CDAP-2543)
throw new Exception(e.getMessage(), e);
} catch (Throwable t) {
cleanupTask.run();
throw t;
}
}
Aggregations