use of co.cask.cdap.internal.app.runtime.BasicProgramContext in project cdap by caskdata.
the class MapReduceTaskContextProvider method createCacheLoader.
/**
* Creates a {@link CacheLoader} for the task context cache.
*/
private CacheLoader<ContextCacheKey, BasicMapReduceTaskContext> createCacheLoader(final Injector injector) {
final DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
final DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
final SecureStore secureStore = injector.getInstance(SecureStore.class);
final SecureStoreManager secureStoreManager = injector.getInstance(SecureStoreManager.class);
final MessagingService messagingService = injector.getInstance(MessagingService.class);
// Multiple instances of BasicMapReduceTaskContext can shares the same program.
final AtomicReference<Program> programRef = new AtomicReference<>();
return new CacheLoader<ContextCacheKey, BasicMapReduceTaskContext>() {
@Override
public BasicMapReduceTaskContext load(ContextCacheKey key) throws Exception {
MapReduceContextConfig contextConfig = new MapReduceContextConfig(key.getConfiguration());
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration());
Program program = programRef.get();
if (program == null) {
// Creation of program is relatively cheap, so just create and do compare and set.
programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader()));
program = programRef.get();
}
WorkflowProgramInfo workflowInfo = contextConfig.getWorkflowProgramInfo();
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, program.getApplicationSpecification());
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramRunId programRunId = program.getId().run(ProgramRunners.getRunId(contextConfig.getProgramOptions()));
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programRunId));
}
MapReduceSpecification spec = program.getApplicationSpecification().getMapReduce().get(program.getName());
MetricsCollectionService metricsCollectionService = null;
MapReduceMetrics.TaskType taskType = null;
String taskId = null;
TaskAttemptID taskAttemptId = key.getTaskAttemptID();
// from a org.apache.hadoop.io.RawComparator
if (taskAttemptId != null) {
taskId = taskAttemptId.getTaskID().toString();
if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) {
taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
// if this is not for a mapper or a reducer, we don't need the metrics collection service
metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
}
}
CConfiguration cConf = injector.getInstance(CConfiguration.class);
TransactionSystemClient txClient = injector.getInstance(TransactionSystemClient.class);
return new BasicMapReduceTaskContext(program, contextConfig.getProgramOptions(), cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, contextConfig.getTx(), programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService);
}
};
}
use of co.cask.cdap.internal.app.runtime.BasicProgramContext in project cdap by caskdata.
the class WorkerProgramRunner method run.
@Override
public ProgramController run(Program program, ProgramOptions options) {
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
RunId runId = ProgramRunners.getRunId(options);
ProgramType programType = program.getType();
Preconditions.checkNotNull(programType, "Missing processor type.");
Preconditions.checkArgument(programType == ProgramType.WORKER, "Only Worker process type is supported.");
WorkerSpecification workerSpec = appSpec.getWorkers().get(program.getName());
Preconditions.checkArgument(workerSpec != null, "Missing Worker specification for %s", program.getId());
String instances = options.getArguments().getOption(ProgramOptionConstants.INSTANCES, String.valueOf(workerSpec.getInstances()));
WorkerSpecification newWorkerSpec = new WorkerSpecification(workerSpec.getClassName(), workerSpec.getName(), workerSpec.getDescription(), workerSpec.getProperties(), workerSpec.getDatasets(), workerSpec.getResources(), Integer.valueOf(instances));
// Setup dataset framework context, if required
if (datasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) datasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
final PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
try {
BasicWorkerContext context = new BasicWorkerContext(newWorkerSpec, program, options, cConf, instanceId, instanceCount, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, streamWriterFactory, pluginInstantiator, secureStore, secureStoreManager, messagingService);
WorkerDriver worker = new WorkerDriver(program, newWorkerSpec, context);
// Add a service listener to make sure the plugin instantiator is closed when the worker driver finished.
worker.addListener(new ServiceListenerAdapter() {
@Override
public void terminated(Service.State from) {
Closeables.closeQuietly(pluginInstantiator);
}
@Override
public void failed(Service.State from, Throwable failure) {
Closeables.closeQuietly(pluginInstantiator);
}
}, Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new WorkerControllerServiceAdapter(worker, program.getId(), runId, workerSpec.getName() + "-" + instanceId);
worker.start();
return controller;
} catch (Throwable t) {
Closeables.closeQuietly(pluginInstantiator);
throw t;
}
}
use of co.cask.cdap.internal.app.runtime.BasicProgramContext in project cdap by caskdata.
the class FlowletProgramRunner method run.
@SuppressWarnings("unchecked")
@Override
public ProgramController run(Program program, ProgramOptions options) {
BasicFlowletContext flowletContext = null;
try {
// Extract and verify parameters
String flowletName = options.getName();
int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
RunId runId = ProgramRunners.getRunId(options);
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported.");
String processorName = program.getName();
Preconditions.checkNotNull(processorName, "Missing processor name.");
FlowSpecification flowSpec = appSpec.getFlows().get(processorName);
FlowletDefinition flowletDef = flowSpec.getFlowlets().get(flowletName);
Preconditions.checkNotNull(flowletDef, "Definition missing for flowlet \"%s\"", flowletName);
Class<?> clz = Class.forName(flowletDef.getFlowletSpec().getClassName(), true, program.getClassLoader());
Preconditions.checkArgument(Flowlet.class.isAssignableFrom(clz), "%s is not a Flowlet.", clz);
// Setup dataset framework context, if required
ProgramId programId = program.getId();
FlowletId flowletId = programId.flowlet(flowletName);
ProgramRunId run = programId.run(runId);
ProgramContext programContext = new BasicProgramContext(run, flowletId);
if (dsFramework instanceof ProgramContextAware) {
((ProgramContextAware) dsFramework).setContext(programContext);
}
Class<? extends Flowlet> flowletClass = (Class<? extends Flowlet>) clz;
// Creates flowlet context
flowletContext = new BasicFlowletContext(program, options, flowletId, instanceId, instanceCount, flowletDef.getDatasets(), flowletDef.getFlowletSpec(), metricsCollectionService, discoveryServiceClient, txClient, dsFramework, secureStore, secureStoreManager, messageService, cConf);
// Creates tx related objects
DataFabricFacade dataFabricFacade = dataFabricFacadeFactory.create(program, flowletContext.getDatasetCache());
if (dataFabricFacade instanceof ProgramContextAware) {
((ProgramContextAware) dataFabricFacade).setContext(programContext);
}
// Creates QueueSpecification
Table<Node, String, Set<QueueSpecification>> queueSpecs = new SimpleQueueSpecificationGenerator(new ApplicationId(program.getNamespaceId(), program.getApplicationId())).create(flowSpec);
Flowlet flowlet = new InstantiatorFactory(false).get(TypeToken.of(flowletClass)).create();
TypeToken<? extends Flowlet> flowletType = TypeToken.of(flowletClass);
// Set the context classloader to the cdap classloader. It is needed for the DatumWriterFactory be able
// to load cdap classes
Thread.currentThread().setContextClassLoader(FlowletProgramRunner.class.getClassLoader());
// Inject DataSet, OutputEmitter, Metric fields
ImmutableList.Builder<ProducerSupplier> queueProducerSupplierBuilder = ImmutableList.builder();
Reflections.visit(flowlet, flowlet.getClass(), new PropertyFieldSetter(flowletDef.getFlowletSpec().getProperties()), new DataSetFieldSetter(flowletContext), new MetricsFieldSetter(flowletContext.getMetrics()), new OutputEmitterFieldSetter(outputEmitterFactory(flowletContext, flowletName, dataFabricFacade, queueProducerSupplierBuilder, queueSpecs)));
ImmutableList.Builder<ConsumerSupplier<?>> queueConsumerSupplierBuilder = ImmutableList.builder();
Collection<ProcessSpecification<?>> processSpecs = createProcessSpecification(flowletContext, flowletType, processMethodFactory(flowlet), processSpecificationFactory(flowletContext, dataFabricFacade, queueReaderFactory, flowletName, queueSpecs, queueConsumerSupplierBuilder, createSchemaCache(program)), Lists.<ProcessSpecification<?>>newLinkedList());
List<ConsumerSupplier<?>> consumerSuppliers = queueConsumerSupplierBuilder.build();
// Create the flowlet driver
AtomicReference<FlowletProgramController> controllerRef = new AtomicReference<>();
Service serviceHook = createServiceHook(flowletName, consumerSuppliers, controllerRef);
FlowletRuntimeService driver = new FlowletRuntimeService(flowlet, flowletContext, processSpecs, createCallback(flowlet, flowletDef.getFlowletSpec()), dataFabricFacade, serviceHook);
FlowletProgramController controller = new FlowletProgramController(program.getId(), flowletName, flowletContext, driver, queueProducerSupplierBuilder.build(), consumerSuppliers);
controllerRef.set(controller);
LOG.info("Starting flowlet: {}", flowletContext);
driver.start();
LOG.info("Flowlet started: {}", flowletContext);
return controller;
} catch (Exception e) {
// of the flowlet context.
if (flowletContext != null) {
flowletContext.close();
}
throw Throwables.propagate(e);
}
}
Aggregations