use of io.cdap.cdap.common.lang.PropertyFieldSetter in project cdap by cdapio.
the class WorkerDriver method startUp.
@Override
protected void startUp() throws Exception {
LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
// Instantiate worker instance
Class<?> workerClass = program.getClassLoader().loadClass(spec.getClassName());
@SuppressWarnings("unchecked") TypeToken<Worker> workerType = (TypeToken<Worker>) TypeToken.of(workerClass);
worker = new InstantiatorFactory(false).get(workerType).create();
// Fields injection
Reflections.visit(worker, workerType.getType(), new MetricsFieldSetter(context.getMetrics()), new PropertyFieldSetter(spec.getProperties()));
LOG.debug("Starting Worker Program {}", program.getId());
// Initialize worker
// Worker is always using Explicit transaction
TransactionControl txControl = Transactions.getTransactionControl(TransactionControl.EXPLICIT, Worker.class, worker, "initialize", WorkerContext.class);
context.initializeProgram(worker, txControl, false);
}
use of io.cdap.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class CustomActionExecutor method createCustomAction.
@SuppressWarnings("unchecked")
private CustomAction createCustomAction(BasicCustomActionContext context, InstantiatorFactory instantiator, ClassLoader classLoader) throws Exception {
Class<?> clz = Class.forName(context.getSpecification().getClassName(), true, classLoader);
Preconditions.checkArgument(CustomAction.class.isAssignableFrom(clz), "%s is not a CustomAction.", clz);
CustomAction action = instantiator.get(TypeToken.of((Class<? extends CustomAction>) clz)).create();
Reflections.visit(action, action.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics()));
return action;
}
use of io.cdap.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class WorkerDriver method startUp.
@Override
protected void startUp() throws Exception {
LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
// Instantiate worker instance
Class<?> workerClass = program.getClassLoader().loadClass(spec.getClassName());
@SuppressWarnings("unchecked") TypeToken<Worker> workerType = (TypeToken<Worker>) TypeToken.of(workerClass);
worker = new InstantiatorFactory(false).get(workerType).create();
// Fields injection
Reflections.visit(worker, workerType.getType(), new MetricsFieldSetter(context.getMetrics()), new PropertyFieldSetter(spec.getProperties()));
LOG.debug("Starting Worker Program {}", program.getId());
// Initialize worker
// Worker is always using Explicit transaction
TransactionControl txControl = Transactions.getTransactionControl(TransactionControl.EXPLICIT, Worker.class, worker, "initialize", WorkerContext.class);
context.initializeProgram(worker, txControl, false);
}
use of io.cdap.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class MapReduceProgramRunner method run.
@Override
public ProgramController run(final Program program, ProgramOptions options) {
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MAPREDUCE process type is supported.");
MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());
Arguments arguments = options.getArguments();
RunId runId = ProgramRunners.getRunId(options);
WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
MapReduce mapReduce;
try {
mapReduce = new InstantiatorFactory(false).get(TypeToken.of(program.<MapReduce>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate MapReduce class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
// List of all Closeable resources that needs to be cleanup
List<Closeable> closeables = new ArrayList<>();
try {
PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
if (pluginInstantiator != null) {
closeables.add(pluginInstantiator);
}
final BasicMapReduceContext context = new BasicMapReduceContext(program, options, cConf, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txSystemClient, programDatasetFramework, getPluginArchive(options), pluginInstantiator, secureStore, secureStoreManager, messagingService, metadataReader, metadataPublisher, namespaceQueryAdmin, fieldLineageWriter, remoteClientFactory);
closeables.add(context);
Reflections.visit(mapReduce, mapReduce.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new MetricsFieldSetter(context.getMetrics()), new DataSetFieldSetter(context));
// note: this sets logging context on the thread level
LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
// Set the job queue to hConf if it is provided
Configuration hConf = new Configuration(this.hConf);
String schedulerQueue = options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE);
if (schedulerQueue != null && !schedulerQueue.isEmpty()) {
hConf.set(JobContext.QUEUE_NAME, schedulerQueue);
}
ClusterMode clusterMode = ProgramRunners.getClusterMode(options);
Service mapReduceRuntimeService = new MapReduceRuntimeService(injector, cConf, hConf, mapReduce, spec, context, program.getJarLocation(), locationFactory, clusterMode, fieldLineageWriter);
mapReduceRuntimeService.addListener(createRuntimeServiceListener(closeables), Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new MapReduceProgramController(mapReduceRuntimeService, context);
LOG.debug("Starting MapReduce Job: {}", context);
// be running the job, but the data directory will be owned by cdap.
if (MapReduceTaskContextProvider.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
mapReduceRuntimeService.start();
} else {
ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), mapReduceRuntimeService);
}
return controller;
} catch (Exception e) {
closeAllQuietly(closeables);
throw Throwables.propagate(e);
}
}
use of io.cdap.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class ReducerWrapper method run.
@SuppressWarnings("unchecked")
@Override
public void run(Context context) throws IOException, InterruptedException {
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration());
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader);
BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context);
long metricsReportInterval = basicMapReduceContext.getMetricsReportIntervalMillis();
final ReduceTaskMetricsWriter reduceTaskMetricsWriter = new ReduceTaskMetricsWriter(basicMapReduceContext.getProgramMetrics(), context);
// this is a hook for periodic flushing of changes buffered by datasets (to avoid OOME)
WrappedReducer.Context flushingContext = createAutoFlushingContext(context, basicMapReduceContext, reduceTaskMetricsWriter);
basicMapReduceContext.setHadoopContext(flushingContext);
String userReducer = context.getConfiguration().get(ATTR_REDUCER_CLASS);
ClassLoader programClassLoader = classLoader.getProgramClassLoader();
Reducer delegate = createReducerInstance(programClassLoader, userReducer);
// injecting runtime components, like datasets, etc.
try {
Reflections.visit(delegate, delegate.getClass(), new PropertyFieldSetter(basicMapReduceContext.getSpecification().getProperties()), new MetricsFieldSetter(basicMapReduceContext.getMetrics()), new DataSetFieldSetter(basicMapReduceContext));
} catch (Throwable t) {
LOG.error("Failed to inject fields to {}.", delegate.getClass(), t);
throw Throwables.propagate(t);
}
ClassLoader oldClassLoader;
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle) delegate).initialize(new MapReduceLifecycleContext(basicMapReduceContext));
} catch (Exception e) {
LOG.error("Failed to initialize reducer with {}", basicMapReduceContext, e);
throw Throwables.propagate(e);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
delegate.run(flushingContext);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
// memory by tx agent)
try {
basicMapReduceContext.flushOperations();
} catch (Exception e) {
LOG.error("Failed to flush operations at the end of reducer of " + basicMapReduceContext, e);
throw Throwables.propagate(e);
}
// Close all writers created by MultipleOutputs
basicMapReduceContext.closeMultiOutputs();
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle<? extends RuntimeContext>) delegate).destroy();
} catch (Exception e) {
LOG.error("Error during destroy of reducer {}", basicMapReduceContext, e);
// Do nothing, try to finish
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
reduceTaskMetricsWriter.reportMetrics();
}
Aggregations