Search in sources :

Example 6 with BasicProgramContext

use of co.cask.cdap.internal.app.runtime.BasicProgramContext in project cdap by caskdata.

the class MapReduceTaskContextProvider method createCacheLoader.

/**
   * Creates a {@link CacheLoader} for the task context cache.
   */
private CacheLoader<ContextCacheKey, BasicMapReduceTaskContext> createCacheLoader(final Injector injector) {
    final DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
    final DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
    final SecureStore secureStore = injector.getInstance(SecureStore.class);
    final SecureStoreManager secureStoreManager = injector.getInstance(SecureStoreManager.class);
    final MessagingService messagingService = injector.getInstance(MessagingService.class);
    // Multiple instances of BasicMapReduceTaskContext can shares the same program.
    final AtomicReference<Program> programRef = new AtomicReference<>();
    return new CacheLoader<ContextCacheKey, BasicMapReduceTaskContext>() {

        @Override
        public BasicMapReduceTaskContext load(ContextCacheKey key) throws Exception {
            MapReduceContextConfig contextConfig = new MapReduceContextConfig(key.getConfiguration());
            MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration());
            Program program = programRef.get();
            if (program == null) {
                // Creation of program is relatively cheap, so just create and do compare and set.
                programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader()));
                program = programRef.get();
            }
            WorkflowProgramInfo workflowInfo = contextConfig.getWorkflowProgramInfo();
            DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, program.getApplicationSpecification());
            // Setup dataset framework context, if required
            if (programDatasetFramework instanceof ProgramContextAware) {
                ProgramRunId programRunId = program.getId().run(ProgramRunners.getRunId(contextConfig.getProgramOptions()));
                ((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programRunId));
            }
            MapReduceSpecification spec = program.getApplicationSpecification().getMapReduce().get(program.getName());
            MetricsCollectionService metricsCollectionService = null;
            MapReduceMetrics.TaskType taskType = null;
            String taskId = null;
            TaskAttemptID taskAttemptId = key.getTaskAttemptID();
            // from a org.apache.hadoop.io.RawComparator
            if (taskAttemptId != null) {
                taskId = taskAttemptId.getTaskID().toString();
                if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) {
                    taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
                    // if this is not for a mapper or a reducer, we don't need the metrics collection service
                    metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
                }
            }
            CConfiguration cConf = injector.getInstance(CConfiguration.class);
            TransactionSystemClient txClient = injector.getInstance(TransactionSystemClient.class);
            return new BasicMapReduceTaskContext(program, contextConfig.getProgramOptions(), cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, contextConfig.getTx(), programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService);
        }
    };
}
Also used : DiscoveryServiceClient(org.apache.twill.discovery.DiscoveryServiceClient) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) NameMappedDatasetFramework(co.cask.cdap.internal.app.runtime.workflow.NameMappedDatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) SecureStoreManager(co.cask.cdap.api.security.store.SecureStoreManager) MapReduceMetrics(co.cask.cdap.app.metrics.MapReduceMetrics) Program(co.cask.cdap.app.program.Program) DefaultProgram(co.cask.cdap.app.program.DefaultProgram) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) MapReduceSpecification(co.cask.cdap.api.mapreduce.MapReduceSpecification) AtomicReference(java.util.concurrent.atomic.AtomicReference) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) SecureStore(co.cask.cdap.api.security.store.SecureStore) CConfiguration(co.cask.cdap.common.conf.CConfiguration) MessagingService(co.cask.cdap.messaging.MessagingService) WorkflowProgramInfo(co.cask.cdap.internal.app.runtime.workflow.WorkflowProgramInfo) CacheLoader(com.google.common.cache.CacheLoader) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Example 7 with BasicProgramContext

use of co.cask.cdap.internal.app.runtime.BasicProgramContext in project cdap by caskdata.

the class WorkerProgramRunner method run.

@Override
public ProgramController run(Program program, ProgramOptions options) {
    ApplicationSpecification appSpec = program.getApplicationSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");
    int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
    Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
    int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
    Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
    RunId runId = ProgramRunners.getRunId(options);
    ProgramType programType = program.getType();
    Preconditions.checkNotNull(programType, "Missing processor type.");
    Preconditions.checkArgument(programType == ProgramType.WORKER, "Only Worker process type is supported.");
    WorkerSpecification workerSpec = appSpec.getWorkers().get(program.getName());
    Preconditions.checkArgument(workerSpec != null, "Missing Worker specification for %s", program.getId());
    String instances = options.getArguments().getOption(ProgramOptionConstants.INSTANCES, String.valueOf(workerSpec.getInstances()));
    WorkerSpecification newWorkerSpec = new WorkerSpecification(workerSpec.getClassName(), workerSpec.getName(), workerSpec.getDescription(), workerSpec.getProperties(), workerSpec.getDatasets(), workerSpec.getResources(), Integer.valueOf(instances));
    // Setup dataset framework context, if required
    if (datasetFramework instanceof ProgramContextAware) {
        ProgramId programId = program.getId();
        ((ProgramContextAware) datasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
    }
    final PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
    try {
        BasicWorkerContext context = new BasicWorkerContext(newWorkerSpec, program, options, cConf, instanceId, instanceCount, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, streamWriterFactory, pluginInstantiator, secureStore, secureStoreManager, messagingService);
        WorkerDriver worker = new WorkerDriver(program, newWorkerSpec, context);
        // Add a service listener to make sure the plugin instantiator is closed when the worker driver finished.
        worker.addListener(new ServiceListenerAdapter() {

            @Override
            public void terminated(Service.State from) {
                Closeables.closeQuietly(pluginInstantiator);
            }

            @Override
            public void failed(Service.State from, Throwable failure) {
                Closeables.closeQuietly(pluginInstantiator);
            }
        }, Threads.SAME_THREAD_EXECUTOR);
        ProgramController controller = new WorkerControllerServiceAdapter(worker, program.getId(), runId, workerSpec.getName() + "-" + instanceId);
        worker.start();
        return controller;
    } catch (Throwable t) {
        Closeables.closeQuietly(pluginInstantiator);
        throw t;
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) ProgramController(co.cask.cdap.app.runtime.ProgramController) WorkerSpecification(co.cask.cdap.api.worker.WorkerSpecification) MessagingService(co.cask.cdap.messaging.MessagingService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) ServiceListenerAdapter(org.apache.twill.internal.ServiceListenerAdapter) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) PluginInstantiator(co.cask.cdap.internal.app.runtime.plugin.PluginInstantiator) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Example 8 with BasicProgramContext

use of co.cask.cdap.internal.app.runtime.BasicProgramContext in project cdap by caskdata.

the class FlowletProgramRunner method run.

@SuppressWarnings("unchecked")
@Override
public ProgramController run(Program program, ProgramOptions options) {
    BasicFlowletContext flowletContext = null;
    try {
        // Extract and verify parameters
        String flowletName = options.getName();
        int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
        Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
        int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
        Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
        RunId runId = ProgramRunners.getRunId(options);
        ApplicationSpecification appSpec = program.getApplicationSpecification();
        Preconditions.checkNotNull(appSpec, "Missing application specification.");
        ProgramType processorType = program.getType();
        Preconditions.checkNotNull(processorType, "Missing processor type.");
        Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported.");
        String processorName = program.getName();
        Preconditions.checkNotNull(processorName, "Missing processor name.");
        FlowSpecification flowSpec = appSpec.getFlows().get(processorName);
        FlowletDefinition flowletDef = flowSpec.getFlowlets().get(flowletName);
        Preconditions.checkNotNull(flowletDef, "Definition missing for flowlet \"%s\"", flowletName);
        Class<?> clz = Class.forName(flowletDef.getFlowletSpec().getClassName(), true, program.getClassLoader());
        Preconditions.checkArgument(Flowlet.class.isAssignableFrom(clz), "%s is not a Flowlet.", clz);
        // Setup dataset framework context, if required
        ProgramId programId = program.getId();
        FlowletId flowletId = programId.flowlet(flowletName);
        ProgramRunId run = programId.run(runId);
        ProgramContext programContext = new BasicProgramContext(run, flowletId);
        if (dsFramework instanceof ProgramContextAware) {
            ((ProgramContextAware) dsFramework).setContext(programContext);
        }
        Class<? extends Flowlet> flowletClass = (Class<? extends Flowlet>) clz;
        // Creates flowlet context
        flowletContext = new BasicFlowletContext(program, options, flowletId, instanceId, instanceCount, flowletDef.getDatasets(), flowletDef.getFlowletSpec(), metricsCollectionService, discoveryServiceClient, txClient, dsFramework, secureStore, secureStoreManager, messageService, cConf);
        // Creates tx related objects
        DataFabricFacade dataFabricFacade = dataFabricFacadeFactory.create(program, flowletContext.getDatasetCache());
        if (dataFabricFacade instanceof ProgramContextAware) {
            ((ProgramContextAware) dataFabricFacade).setContext(programContext);
        }
        // Creates QueueSpecification
        Table<Node, String, Set<QueueSpecification>> queueSpecs = new SimpleQueueSpecificationGenerator(new ApplicationId(program.getNamespaceId(), program.getApplicationId())).create(flowSpec);
        Flowlet flowlet = new InstantiatorFactory(false).get(TypeToken.of(flowletClass)).create();
        TypeToken<? extends Flowlet> flowletType = TypeToken.of(flowletClass);
        // Set the context classloader to the cdap classloader. It is needed for the DatumWriterFactory be able
        // to load cdap classes
        Thread.currentThread().setContextClassLoader(FlowletProgramRunner.class.getClassLoader());
        // Inject DataSet, OutputEmitter, Metric fields
        ImmutableList.Builder<ProducerSupplier> queueProducerSupplierBuilder = ImmutableList.builder();
        Reflections.visit(flowlet, flowlet.getClass(), new PropertyFieldSetter(flowletDef.getFlowletSpec().getProperties()), new DataSetFieldSetter(flowletContext), new MetricsFieldSetter(flowletContext.getMetrics()), new OutputEmitterFieldSetter(outputEmitterFactory(flowletContext, flowletName, dataFabricFacade, queueProducerSupplierBuilder, queueSpecs)));
        ImmutableList.Builder<ConsumerSupplier<?>> queueConsumerSupplierBuilder = ImmutableList.builder();
        Collection<ProcessSpecification<?>> processSpecs = createProcessSpecification(flowletContext, flowletType, processMethodFactory(flowlet), processSpecificationFactory(flowletContext, dataFabricFacade, queueReaderFactory, flowletName, queueSpecs, queueConsumerSupplierBuilder, createSchemaCache(program)), Lists.<ProcessSpecification<?>>newLinkedList());
        List<ConsumerSupplier<?>> consumerSuppliers = queueConsumerSupplierBuilder.build();
        // Create the flowlet driver
        AtomicReference<FlowletProgramController> controllerRef = new AtomicReference<>();
        Service serviceHook = createServiceHook(flowletName, consumerSuppliers, controllerRef);
        FlowletRuntimeService driver = new FlowletRuntimeService(flowlet, flowletContext, processSpecs, createCallback(flowlet, flowletDef.getFlowletSpec()), dataFabricFacade, serviceHook);
        FlowletProgramController controller = new FlowletProgramController(program.getId(), flowletName, flowletContext, driver, queueProducerSupplierBuilder.build(), consumerSuppliers);
        controllerRef.set(controller);
        LOG.info("Starting flowlet: {}", flowletContext);
        driver.start();
        LOG.info("Flowlet started: {}", flowletContext);
        return controller;
    } catch (Exception e) {
        // of the flowlet context.
        if (flowletContext != null) {
            flowletContext.close();
        }
        throw Throwables.propagate(e);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) Set(java.util.Set) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableList(com.google.common.collect.ImmutableList) Node(co.cask.cdap.app.queue.QueueSpecificationGenerator.Node) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) ProgramContext(co.cask.cdap.data.ProgramContext) InstantiatorFactory(co.cask.cdap.common.lang.InstantiatorFactory) FlowletDefinition(co.cask.cdap.api.flow.FlowletDefinition) FlowSpecification(co.cask.cdap.api.flow.FlowSpecification) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) Flowlet(co.cask.cdap.api.flow.flowlet.Flowlet) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) MessagingService(co.cask.cdap.messaging.MessagingService) AbstractService(com.google.common.util.concurrent.AbstractService) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) SimpleQueueSpecificationGenerator(co.cask.cdap.internal.app.queue.SimpleQueueSpecificationGenerator) PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) FlowletId(co.cask.cdap.proto.id.FlowletId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) DataFabricFacade(co.cask.cdap.internal.app.runtime.DataFabricFacade) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Aggregations

ProgramContextAware (co.cask.cdap.data.ProgramContextAware)8 BasicProgramContext (co.cask.cdap.internal.app.runtime.BasicProgramContext)8 MetricsCollectionService (co.cask.cdap.api.metrics.MetricsCollectionService)7 MessagingService (co.cask.cdap.messaging.MessagingService)7 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)6 PluginInstantiator (co.cask.cdap.internal.app.runtime.plugin.PluginInstantiator)6 ProgramType (co.cask.cdap.proto.ProgramType)6 ProgramId (co.cask.cdap.proto.id.ProgramId)6 Service (com.google.common.util.concurrent.Service)6 RunId (org.apache.twill.api.RunId)6 ProgramController (co.cask.cdap.app.runtime.ProgramController)5 CConfiguration (co.cask.cdap.common.conf.CConfiguration)4 DatasetFramework (co.cask.cdap.data2.dataset2.DatasetFramework)4 NameMappedDatasetFramework (co.cask.cdap.internal.app.runtime.workflow.NameMappedDatasetFramework)4 WorkflowProgramInfo (co.cask.cdap.internal.app.runtime.workflow.WorkflowProgramInfo)4 InstantiatorFactory (co.cask.cdap.common.lang.InstantiatorFactory)3 ProgramRunId (co.cask.cdap.proto.id.ProgramRunId)3 Closeable (java.io.Closeable)3 IOException (java.io.IOException)3 Configuration (org.apache.hadoop.conf.Configuration)3