Search in sources :

Example 1 with DataSetFieldSetter

use of co.cask.cdap.internal.app.runtime.DataSetFieldSetter in project cdap by caskdata.

the class MapReduceProgramRunner method run.

@Override
public ProgramController run(final Program program, ProgramOptions options) {
    // Extract and verify parameters
    ApplicationSpecification appSpec = program.getApplicationSpecification();
    Preconditions.checkNotNull(appSpec, "Missing application specification.");
    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MAPREDUCE process type is supported.");
    MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
    Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());
    Arguments arguments = options.getArguments();
    RunId runId = ProgramRunners.getRunId(options);
    WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
    DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
    // Setup dataset framework context, if required
    if (programDatasetFramework instanceof ProgramContextAware) {
        ProgramId programId = program.getId();
        ((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
    }
    MapReduce mapReduce;
    try {
        mapReduce = new InstantiatorFactory(false).get(TypeToken.of(program.<MapReduce>getMainClass())).create();
    } catch (Exception e) {
        LOG.error("Failed to instantiate MapReduce class for {}", spec.getClassName(), e);
        throw Throwables.propagate(e);
    }
    // List of all Closeable resources that needs to be cleanup
    List<Closeable> closeables = new ArrayList<>();
    try {
        PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
        if (pluginInstantiator != null) {
            closeables.add(pluginInstantiator);
        }
        final BasicMapReduceContext context = new BasicMapReduceContext(program, options, cConf, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txSystemClient, programDatasetFramework, streamAdmin, getPluginArchive(options), pluginInstantiator, secureStore, secureStoreManager, messagingService);
        Reflections.visit(mapReduce, mapReduce.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new MetricsFieldSetter(context.getMetrics()), new DataSetFieldSetter(context));
        // note: this sets logging context on the thread level
        LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
        // Set the job queue to hConf if it is provided
        Configuration hConf = new Configuration(this.hConf);
        String schedulerQueue = options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE);
        if (schedulerQueue != null && !schedulerQueue.isEmpty()) {
            hConf.set(JobContext.QUEUE_NAME, schedulerQueue);
        }
        Service mapReduceRuntimeService = new MapReduceRuntimeService(injector, cConf, hConf, mapReduce, spec, context, program.getJarLocation(), locationFactory, streamAdmin, txSystemClient, authorizationEnforcer, authenticationContext);
        mapReduceRuntimeService.addListener(createRuntimeServiceListener(program.getId(), runId, closeables, arguments, options.getUserArguments()), Threads.SAME_THREAD_EXECUTOR);
        final ProgramController controller = new MapReduceProgramController(mapReduceRuntimeService, context);
        LOG.debug("Starting MapReduce Job: {}", context);
        // be running the job, but the data directory will be owned by cdap.
        if (MapReduceTaskContextProvider.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
            mapReduceRuntimeService.start();
        } else {
            ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), mapReduceRuntimeService);
        }
        return controller;
    } catch (Exception e) {
        closeAllQuietly(closeables);
        throw Throwables.propagate(e);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) ArrayList(java.util.ArrayList) MapReduce(co.cask.cdap.api.mapreduce.MapReduce) NameMappedDatasetFramework(co.cask.cdap.internal.app.runtime.workflow.NameMappedDatasetFramework) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) InstantiatorFactory(co.cask.cdap.common.lang.InstantiatorFactory) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramController(co.cask.cdap.app.runtime.ProgramController) MapReduceSpecification(co.cask.cdap.api.mapreduce.MapReduceSpecification) Arguments(co.cask.cdap.app.runtime.Arguments) MessagingService(co.cask.cdap.messaging.MessagingService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter) PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) WorkflowProgramInfo(co.cask.cdap.internal.app.runtime.workflow.WorkflowProgramInfo) PluginInstantiator(co.cask.cdap.internal.app.runtime.plugin.PluginInstantiator) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Example 2 with DataSetFieldSetter

use of co.cask.cdap.internal.app.runtime.DataSetFieldSetter in project cdap by caskdata.

the class CustomActionExecutor method createAction.

@SuppressWarnings("unchecked")
@Deprecated
private WorkflowAction createAction(BasicWorkflowContext context, InstantiatorFactory instantiator, ClassLoader classLoader) throws Exception {
    Class<?> clz = Class.forName(context.getSpecification().getClassName(), true, classLoader);
    Preconditions.checkArgument(WorkflowAction.class.isAssignableFrom(clz), "%s is not a WorkflowAction.", clz);
    WorkflowAction action = instantiator.get(TypeToken.of((Class<? extends WorkflowAction>) clz)).create();
    Metrics metrics = new ProgramUserMetrics(context.getProgramMetrics().childContext(Constants.Metrics.Tag.NODE, context.getSpecification().getName()));
    Reflections.visit(action, action.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(metrics));
    return action;
}
Also used : ProgramUserMetrics(co.cask.cdap.app.metrics.ProgramUserMetrics) Metrics(co.cask.cdap.api.metrics.Metrics) PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) WorkflowAction(co.cask.cdap.api.workflow.WorkflowAction) ProgramUserMetrics(co.cask.cdap.app.metrics.ProgramUserMetrics) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter)

Example 3 with DataSetFieldSetter

use of co.cask.cdap.internal.app.runtime.DataSetFieldSetter in project cdap by caskdata.

the class CustomActionExecutor method createCustomAction.

@SuppressWarnings("unchecked")
private CustomAction createCustomAction(BasicCustomActionContext context, InstantiatorFactory instantiator, ClassLoader classLoader) throws Exception {
    Class<?> clz = Class.forName(context.getSpecification().getClassName(), true, classLoader);
    Preconditions.checkArgument(CustomAction.class.isAssignableFrom(clz), "%s is not a CustomAction.", clz);
    CustomAction action = instantiator.get(TypeToken.of((Class<? extends CustomAction>) clz)).create();
    Reflections.visit(action, action.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics()));
    return action;
}
Also used : PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) CustomAction(co.cask.cdap.api.customaction.CustomAction) AbstractCustomAction(co.cask.cdap.api.customaction.AbstractCustomAction) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter)

Example 4 with DataSetFieldSetter

use of co.cask.cdap.internal.app.runtime.DataSetFieldSetter in project cdap by caskdata.

the class SparkRuntimeService method startUp.

@Override
protected void startUp() throws Exception {
    // additional spark job initialization at run-time
    // This context is for calling initialize and onFinish on the Spark program
    // Fields injection for the Spark program
    // It has to be done in here instead of in SparkProgramRunner for the @UseDataset injection
    // since the dataset cache being used in Spark is a MultiThreadDatasetCache
    // The AbstractExecutionThreadService guarantees that startUp(), run() and shutDown() all happens in the same thread
    Reflections.visit(spark, spark.getClass(), new PropertyFieldSetter(runtimeContext.getSparkSpecification().getProperties()), new DataSetFieldSetter(runtimeContext.getDatasetCache()), new MetricsFieldSetter(runtimeContext));
    // Creates a temporary directory locally for storing all generated files.
    File tempDir = DirUtils.createTempDir(new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile());
    tempDir.mkdirs();
    this.cleanupTask = createCleanupTask(tempDir, System.getProperties());
    try {
        initialize();
        SparkRuntimeContextConfig contextConfig = new SparkRuntimeContextConfig(runtimeContext.getConfiguration());
        final File jobJar = generateJobJar(tempDir, contextConfig.isLocal(), cConf);
        final List<LocalizeResource> localizeResources = new ArrayList<>();
        String metricsConfPath;
        String classpath = "";
        if (contextConfig.isLocal()) {
            // In local mode, always copy (or link if local) user requested resources
            copyUserResources(context.getLocalizeResources(), tempDir);
            File metricsConf = SparkMetricsSink.writeConfig(new File(tempDir, CDAP_METRICS_PROPERTIES));
            metricsConfPath = metricsConf.getAbsolutePath();
        } else {
            // Localize all user requested files in distributed mode
            distributedUserResources(context.getLocalizeResources(), localizeResources);
            // Localize program jar and the expanding program jar
            File programJar = Locations.linkOrCopy(runtimeContext.getProgram().getJarLocation(), new File(tempDir, SparkRuntimeContextProvider.PROGRAM_JAR_NAME));
            File expandedProgramJar = Locations.linkOrCopy(runtimeContext.getProgram().getJarLocation(), new File(tempDir, SparkRuntimeContextProvider.PROGRAM_JAR_EXPANDED_NAME));
            // Localize both the unexpanded and expanded program jar
            localizeResources.add(new LocalizeResource(programJar));
            localizeResources.add(new LocalizeResource(expandedProgramJar, true));
            // Localize plugins
            if (pluginArchive != null) {
                localizeResources.add(new LocalizeResource(pluginArchive, true));
            }
            // Create and localize the launcher jar, which is for setting up services and classloader for spark containers
            localizeResources.add(new LocalizeResource(createLauncherJar(tempDir)));
            // Create metrics conf file in the current directory since
            // the same value for the "spark.metrics.conf" config needs to be used for both driver and executor processes
            // Also localize the metrics conf file to the executor nodes
            File metricsConf = SparkMetricsSink.writeConfig(new File(CDAP_METRICS_PROPERTIES));
            metricsConfPath = metricsConf.getName();
            localizeResources.add(new LocalizeResource(metricsConf));
            // Localize the cConf file
            localizeResources.add(new LocalizeResource(saveCConf(cConf, tempDir)));
            // Preserves and localize runtime information in the hConf
            Configuration hConf = contextConfig.set(runtimeContext, pluginArchive).getConfiguration();
            localizeResources.add(new LocalizeResource(saveHConf(hConf, tempDir)));
            // Joiner for creating classpath for spark containers
            Joiner joiner = Joiner.on(File.pathSeparator).skipNulls();
            // Localize the spark.jar archive, which contains all CDAP and dependency jars
            File sparkJar = new File(tempDir, CDAP_SPARK_JAR);
            classpath = joiner.join(Iterables.transform(buildDependencyJar(sparkJar), new Function<String, String>() {

                @Override
                public String apply(String name) {
                    return Paths.get("$PWD", CDAP_SPARK_JAR, name).toString();
                }
            }));
            localizeResources.add(new LocalizeResource(sparkJar, true));
            // Localize logback if there is one. It is placed at the beginning of the classpath
            File logbackJar = ProgramRunners.createLogbackJar(new File(tempDir, "logback.xml.jar"));
            if (logbackJar != null) {
                localizeResources.add(new LocalizeResource(logbackJar));
                classpath = joiner.join(Paths.get("$PWD", logbackJar.getName()), classpath);
            }
            // Localize extra jars and append to the end of the classpath
            List<String> extraJars = new ArrayList<>();
            for (URI jarURI : CConfigurationUtil.getExtraJars(cConf)) {
                extraJars.add(Paths.get("$PWD", LocalizationUtils.getLocalizedName(jarURI)).toString());
                localizeResources.add(new LocalizeResource(jarURI, false));
            }
            classpath = joiner.join(classpath, joiner.join(extraJars));
        }
        final Map<String, String> configs = createSubmitConfigs(tempDir, metricsConfPath, classpath, context.getLocalizeResources(), contextConfig.isLocal());
        submitSpark = new Callable<ListenableFuture<RunId>>() {

            @Override
            public ListenableFuture<RunId> call() throws Exception {
                // This happen when stop() was called whiling starting
                if (!isRunning()) {
                    return immediateCancelledFuture();
                }
                return sparkSubmitter.submit(runtimeContext, configs, localizeResources, jobJar, runtimeContext.getRunId());
            }
        };
    } catch (LinkageError e) {
        // of the user program is missing dependencies (CDAP-2543)
        throw new Exception(e.getMessage(), e);
    } catch (Throwable t) {
        cleanupTask.run();
        throw t;
    }
}
Also used : Joiner(com.google.common.base.Joiner) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) URI(java.net.URI) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter) URISyntaxException(java.net.URISyntaxException) UnsupportedTypeException(co.cask.common.internal.io.UnsupportedTypeException) IOException(java.io.IOException) PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) LocalizeResource(co.cask.cdap.internal.app.runtime.distributed.LocalizeResource) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) File(java.io.File)

Example 5 with DataSetFieldSetter

use of co.cask.cdap.internal.app.runtime.DataSetFieldSetter in project cdap by caskdata.

the class FlowletProgramRunner method run.

@SuppressWarnings("unchecked")
@Override
public ProgramController run(Program program, ProgramOptions options) {
    BasicFlowletContext flowletContext = null;
    try {
        // Extract and verify parameters
        String flowletName = options.getName();
        int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
        Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
        int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
        Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
        RunId runId = ProgramRunners.getRunId(options);
        ApplicationSpecification appSpec = program.getApplicationSpecification();
        Preconditions.checkNotNull(appSpec, "Missing application specification.");
        ProgramType processorType = program.getType();
        Preconditions.checkNotNull(processorType, "Missing processor type.");
        Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported.");
        String processorName = program.getName();
        Preconditions.checkNotNull(processorName, "Missing processor name.");
        FlowSpecification flowSpec = appSpec.getFlows().get(processorName);
        FlowletDefinition flowletDef = flowSpec.getFlowlets().get(flowletName);
        Preconditions.checkNotNull(flowletDef, "Definition missing for flowlet \"%s\"", flowletName);
        Class<?> clz = Class.forName(flowletDef.getFlowletSpec().getClassName(), true, program.getClassLoader());
        Preconditions.checkArgument(Flowlet.class.isAssignableFrom(clz), "%s is not a Flowlet.", clz);
        // Setup dataset framework context, if required
        ProgramId programId = program.getId();
        FlowletId flowletId = programId.flowlet(flowletName);
        ProgramRunId run = programId.run(runId);
        ProgramContext programContext = new BasicProgramContext(run, flowletId);
        if (dsFramework instanceof ProgramContextAware) {
            ((ProgramContextAware) dsFramework).setContext(programContext);
        }
        Class<? extends Flowlet> flowletClass = (Class<? extends Flowlet>) clz;
        // Creates flowlet context
        flowletContext = new BasicFlowletContext(program, options, flowletId, instanceId, instanceCount, flowletDef.getDatasets(), flowletDef.getFlowletSpec(), metricsCollectionService, discoveryServiceClient, txClient, dsFramework, secureStore, secureStoreManager, messageService, cConf);
        // Creates tx related objects
        DataFabricFacade dataFabricFacade = dataFabricFacadeFactory.create(program, flowletContext.getDatasetCache());
        if (dataFabricFacade instanceof ProgramContextAware) {
            ((ProgramContextAware) dataFabricFacade).setContext(programContext);
        }
        // Creates QueueSpecification
        Table<Node, String, Set<QueueSpecification>> queueSpecs = new SimpleQueueSpecificationGenerator(new ApplicationId(program.getNamespaceId(), program.getApplicationId())).create(flowSpec);
        Flowlet flowlet = new InstantiatorFactory(false).get(TypeToken.of(flowletClass)).create();
        TypeToken<? extends Flowlet> flowletType = TypeToken.of(flowletClass);
        // Set the context classloader to the cdap classloader. It is needed for the DatumWriterFactory be able
        // to load cdap classes
        Thread.currentThread().setContextClassLoader(FlowletProgramRunner.class.getClassLoader());
        // Inject DataSet, OutputEmitter, Metric fields
        ImmutableList.Builder<ProducerSupplier> queueProducerSupplierBuilder = ImmutableList.builder();
        Reflections.visit(flowlet, flowlet.getClass(), new PropertyFieldSetter(flowletDef.getFlowletSpec().getProperties()), new DataSetFieldSetter(flowletContext), new MetricsFieldSetter(flowletContext.getMetrics()), new OutputEmitterFieldSetter(outputEmitterFactory(flowletContext, flowletName, dataFabricFacade, queueProducerSupplierBuilder, queueSpecs)));
        ImmutableList.Builder<ConsumerSupplier<?>> queueConsumerSupplierBuilder = ImmutableList.builder();
        Collection<ProcessSpecification<?>> processSpecs = createProcessSpecification(flowletContext, flowletType, processMethodFactory(flowlet), processSpecificationFactory(flowletContext, dataFabricFacade, queueReaderFactory, flowletName, queueSpecs, queueConsumerSupplierBuilder, createSchemaCache(program)), Lists.<ProcessSpecification<?>>newLinkedList());
        List<ConsumerSupplier<?>> consumerSuppliers = queueConsumerSupplierBuilder.build();
        // Create the flowlet driver
        AtomicReference<FlowletProgramController> controllerRef = new AtomicReference<>();
        Service serviceHook = createServiceHook(flowletName, consumerSuppliers, controllerRef);
        FlowletRuntimeService driver = new FlowletRuntimeService(flowlet, flowletContext, processSpecs, createCallback(flowlet, flowletDef.getFlowletSpec()), dataFabricFacade, serviceHook);
        FlowletProgramController controller = new FlowletProgramController(program.getId(), flowletName, flowletContext, driver, queueProducerSupplierBuilder.build(), consumerSuppliers);
        controllerRef.set(controller);
        LOG.info("Starting flowlet: {}", flowletContext);
        driver.start();
        LOG.info("Flowlet started: {}", flowletContext);
        return controller;
    } catch (Exception e) {
        // of the flowlet context.
        if (flowletContext != null) {
            flowletContext.close();
        }
        throw Throwables.propagate(e);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) Set(java.util.Set) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableList(com.google.common.collect.ImmutableList) Node(co.cask.cdap.app.queue.QueueSpecificationGenerator.Node) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) ProgramContext(co.cask.cdap.data.ProgramContext) InstantiatorFactory(co.cask.cdap.common.lang.InstantiatorFactory) FlowletDefinition(co.cask.cdap.api.flow.FlowletDefinition) FlowSpecification(co.cask.cdap.api.flow.FlowSpecification) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) Flowlet(co.cask.cdap.api.flow.flowlet.Flowlet) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) MessagingService(co.cask.cdap.messaging.MessagingService) AbstractService(com.google.common.util.concurrent.AbstractService) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) SimpleQueueSpecificationGenerator(co.cask.cdap.internal.app.queue.SimpleQueueSpecificationGenerator) PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) FlowletId(co.cask.cdap.proto.id.FlowletId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) DataFabricFacade(co.cask.cdap.internal.app.runtime.DataFabricFacade) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Aggregations

PropertyFieldSetter (co.cask.cdap.common.lang.PropertyFieldSetter)7 DataSetFieldSetter (co.cask.cdap.internal.app.runtime.DataSetFieldSetter)7 MetricsFieldSetter (co.cask.cdap.internal.app.runtime.MetricsFieldSetter)7 IOException (java.io.IOException)4 ProgramLifecycle (co.cask.cdap.api.ProgramLifecycle)2 RuntimeContext (co.cask.cdap.api.RuntimeContext)2 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)2 MetricsCollectionService (co.cask.cdap.api.metrics.MetricsCollectionService)2 CConfiguration (co.cask.cdap.common.conf.CConfiguration)2 InstantiatorFactory (co.cask.cdap.common.lang.InstantiatorFactory)2 WeakReferenceDelegatorClassLoader (co.cask.cdap.common.lang.WeakReferenceDelegatorClassLoader)2 ProgramContextAware (co.cask.cdap.data.ProgramContextAware)2 BasicProgramContext (co.cask.cdap.internal.app.runtime.BasicProgramContext)2 MessagingService (co.cask.cdap.messaging.MessagingService)2 ProgramType (co.cask.cdap.proto.ProgramType)2 ProgramId (co.cask.cdap.proto.id.ProgramId)2 Service (com.google.common.util.concurrent.Service)2 ArrayList (java.util.ArrayList)2 Configuration (org.apache.hadoop.conf.Configuration)2 RunId (org.apache.twill.api.RunId)2