Search in sources :

Example 26 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class NamespacedEntityIdCodec method deserializeProgramId.

private ProgramId deserializeProgramId(JsonObject id) {
    ApplicationId app = deserializeApplicationId(id);
    ProgramType programType = ProgramType.valueOf(id.get("type").getAsString().toUpperCase());
    String programId = id.get("program").getAsString();
    return new ProgramId(app.getNamespace(), app.getApplication(), programType, programId);
}
Also used : ProgramType(co.cask.cdap.proto.ProgramType) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramId(co.cask.cdap.proto.id.ProgramId)

Example 27 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class SparkProgramRunner method run.

@Override
public ProgramController run(Program program, ProgramOptions options) {
    // Get the RunId first. It is used for the creation of the ClassLoader closing thread.
    Arguments arguments = options.getArguments();
    RunId runId = ProgramRunners.getRunId(options);
    Deque<Closeable> closeables = new LinkedList<>();
    try {
        // Extract and verify parameters
        ApplicationSpecification appSpec = program.getApplicationSpecification();
        Preconditions.checkNotNull(appSpec, "Missing application specification.");
        ProgramType processorType = program.getType();
        Preconditions.checkNotNull(processorType, "Missing processor type.");
        Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
        SparkSpecification spec = appSpec.getSpark().get(program.getName());
        Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
        String host = options.getArguments().getOption(ProgramOptionConstants.HOST);
        Preconditions.checkArgument(host != null, "No hostname is provided");
        // Get the WorkflowProgramInfo if it is started by Workflow
        WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
        DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
        // Setup dataset framework context, if required
        if (programDatasetFramework instanceof ProgramContextAware) {
            ProgramId programId = program.getId();
            ((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
        }
        PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
        if (pluginInstantiator != null) {
            closeables.addFirst(pluginInstantiator);
        }
        SparkRuntimeContext runtimeContext = new SparkRuntimeContext(new Configuration(hConf), program, options, cConf, host, txClient, programDatasetFramework, discoveryServiceClient, metricsCollectionService, streamAdmin, workflowInfo, pluginInstantiator, secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, serviceAnnouncer, pluginFinder, locationFactory);
        closeables.addFirst(runtimeContext);
        Spark spark;
        try {
            spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
        } catch (Exception e) {
            LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
            throw Throwables.propagate(e);
        }
        SparkSubmitter submitter = SparkRuntimeContextConfig.isLocal(hConf) ? new LocalSparkSubmitter() : new DistributedSparkSubmitter(hConf, locationFactory, host, runtimeContext, options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE));
        Service sparkRuntimeService = new SparkRuntimeService(cConf, spark, getPluginArchive(options), runtimeContext, submitter, locationFactory);
        sparkRuntimeService.addListener(createRuntimeServiceListener(closeables), Threads.SAME_THREAD_EXECUTOR);
        ProgramController controller = new SparkProgramController(sparkRuntimeService, runtimeContext);
        LOG.debug("Starting Spark Job. Context: {}", runtimeContext);
        if (SparkRuntimeContextConfig.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
            sparkRuntimeService.start();
        } else {
            ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), sparkRuntimeService);
        }
        return controller;
    } catch (Throwable t) {
        closeAllQuietly(closeables);
        throw Throwables.propagate(t);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) SparkSubmitter(co.cask.cdap.app.runtime.spark.submit.SparkSubmitter) DistributedSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.DistributedSparkSubmitter) LocalSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.LocalSparkSubmitter) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) DistributedSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.DistributedSparkSubmitter) NameMappedDatasetFramework(co.cask.cdap.internal.app.runtime.workflow.NameMappedDatasetFramework) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) InstantiatorFactory(co.cask.cdap.common.lang.InstantiatorFactory) SparkSpecification(co.cask.cdap.api.spark.SparkSpecification) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramController(co.cask.cdap.app.runtime.ProgramController) Arguments(co.cask.cdap.app.runtime.Arguments) MessagingService(co.cask.cdap.messaging.MessagingService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) LinkedList(java.util.LinkedList) IOException(java.io.IOException) WorkflowProgramInfo(co.cask.cdap.internal.app.runtime.workflow.WorkflowProgramInfo) PluginInstantiator(co.cask.cdap.internal.app.runtime.plugin.PluginInstantiator) Spark(co.cask.cdap.api.spark.Spark) LocalSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.LocalSparkSubmitter) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Example 28 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class AbstractAppCreationService method createAppAndStartProgram.

private void createAppAndStartProgram(ArtifactSummary artifactSummary) throws Exception {
    LOG.info("Creating and Starting {} App with config : {}", appId.getApplication(), appConfig);
    ArtifactId artifactId = artifactSummary.getScope().equals(ArtifactScope.SYSTEM) ? NamespaceId.SYSTEM.artifact(artifactSummary.getName(), artifactSummary.getVersion()) : appId.getNamespaceId().artifact(artifactSummary.getName(), artifactSummary.getVersion());
    applicationLifecycleService.deployApp(appId.getParent(), appId.getApplication(), appId.getVersion(), Id.Artifact.fromEntityId(artifactId), appConfig, new DefaultProgramTerminator());
    for (Map.Entry<ProgramId, Map<String, String>> programEntry : programIdMap.entrySet()) {
        try {
            programLifecycleService.start(programEntry.getKey(), programEntry.getValue(), false);
        } catch (IOException ex) {
            // Might happen if the program is being started in parallel through UI
            LOG.debug("Tried to start {} program but had a conflict. {}", programEntry.getKey(), ex.getMessage());
        }
    }
}
Also used : ArtifactId(co.cask.cdap.proto.id.ArtifactId) IOException(java.io.IOException) ProgramId(co.cask.cdap.proto.id.ProgramId) Map(java.util.Map)

Example 29 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class ServiceDiscoverableTest method testProgramId.

@Test
public void testProgramId() throws Exception {
    ProgramId serviceId = new ApplicationId("ns", "app").service("s1");
    String discoverableName = ServiceDiscoverable.getName(serviceId);
    Assert.assertEquals("service.ns.app.s1", discoverableName);
    Assert.assertTrue(ServiceDiscoverable.isUserService(discoverableName));
    Assert.assertFalse(ServiceDiscoverable.isUserService("service1."));
    Assert.assertEquals(serviceId, ServiceDiscoverable.getId(discoverableName));
}
Also used : ProgramId(co.cask.cdap.proto.id.ProgramId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) Test(org.junit.Test)

Example 30 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class MockLogReader method generateWorkflowLogs.

/**
 * Generate Workflow logs.
 */
private void generateWorkflowLogs() {
    ProgramId workflowId = SOME_WORKFLOW_APP.workflow(SOME_WORKFLOW);
    long currentTime = TimeUnit.SECONDS.toMillis(10);
    RunId workflowRunId = RunIds.generate();
    setStartAndRunning(workflowId.run(workflowRunId.getId()), currentTime);
    runRecordMap.put(workflowId, store.getRun(workflowId.run(workflowRunId.getId())));
    WorkflowLoggingContext wfLoggingContext = new WorkflowLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId());
    generateWorkflowRunLogs(wfLoggingContext);
    // Generate logs for MapReduce program started by above Workflow run
    ProgramId mapReduceId = SOME_WORKFLOW_APP.mr(SOME_MAPREDUCE);
    currentTime = TimeUnit.SECONDS.toMillis(20);
    RunId mapReduceRunId = RunIds.generate();
    Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_MAPREDUCE, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
    setStartAndRunning(mapReduceId.run(mapReduceRunId.getId()), currentTime, new HashMap<>(), systemArgs);
    runRecordMap.put(mapReduceId, store.getRun(mapReduceId.run(mapReduceRunId.getId())));
    WorkflowProgramLoggingContext context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.MAPREDUCE, SOME_MAPREDUCE, mapReduceRunId.getId());
    generateWorkflowRunLogs(context);
    // Generate logs for Spark program started by Workflow run above
    ProgramId sparkId = SOME_WORKFLOW_APP.spark(SOME_SPARK);
    currentTime = TimeUnit.SECONDS.toMillis(40);
    RunId sparkRunId = RunIds.generate();
    systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_SPARK, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
    setStartAndRunning(sparkId.run(sparkRunId.getId()), currentTime, new HashMap<>(), systemArgs);
    runRecordMap.put(sparkId, store.getRun(sparkId.run(sparkRunId.getId())));
    context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.SPARK, SOME_SPARK, sparkRunId.getId());
    generateWorkflowRunLogs(context);
    // Generate some more logs for Workflow
    generateWorkflowRunLogs(wfLoggingContext);
}
Also used : WorkflowProgramLoggingContext(co.cask.cdap.logging.context.WorkflowProgramLoggingContext) WorkflowLoggingContext(co.cask.cdap.logging.context.WorkflowLoggingContext) ProgramId(co.cask.cdap.proto.id.ProgramId) RunId(org.apache.twill.api.RunId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId)

Aggregations

ProgramId (co.cask.cdap.proto.id.ProgramId)259 Test (org.junit.Test)104 ApplicationId (co.cask.cdap.proto.id.ApplicationId)96 Path (javax.ws.rs.Path)62 ProgramRunId (co.cask.cdap.proto.id.ProgramRunId)45 DatasetId (co.cask.cdap.proto.id.DatasetId)40 RunId (org.apache.twill.api.RunId)40 NotFoundException (co.cask.cdap.common.NotFoundException)35 ProgramType (co.cask.cdap.proto.ProgramType)35 NamespaceId (co.cask.cdap.proto.id.NamespaceId)35 StreamId (co.cask.cdap.proto.id.StreamId)32 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)28 IOException (java.io.IOException)26 ArrayList (java.util.ArrayList)26 POST (javax.ws.rs.POST)24 AuditPolicy (co.cask.cdap.common.security.AuditPolicy)23 GET (javax.ws.rs.GET)22 NamespaceNotFoundException (co.cask.cdap.common.NamespaceNotFoundException)21 HttpResponse (org.apache.http.HttpResponse)20 HashMap (java.util.HashMap)19