use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.
the class NamespacedEntityIdCodec method deserializeProgramId.
private ProgramId deserializeProgramId(JsonObject id) {
ApplicationId app = deserializeApplicationId(id);
ProgramType programType = ProgramType.valueOf(id.get("type").getAsString().toUpperCase());
String programId = id.get("program").getAsString();
return new ProgramId(app.getNamespace(), app.getApplication(), programType, programId);
}
use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.
the class SparkProgramRunner method run.
@Override
public ProgramController run(Program program, ProgramOptions options) {
// Get the RunId first. It is used for the creation of the ClassLoader closing thread.
Arguments arguments = options.getArguments();
RunId runId = ProgramRunners.getRunId(options);
Deque<Closeable> closeables = new LinkedList<>();
try {
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
String host = options.getArguments().getOption(ProgramOptionConstants.HOST);
Preconditions.checkArgument(host != null, "No hostname is provided");
// Get the WorkflowProgramInfo if it is started by Workflow
WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
if (pluginInstantiator != null) {
closeables.addFirst(pluginInstantiator);
}
SparkRuntimeContext runtimeContext = new SparkRuntimeContext(new Configuration(hConf), program, options, cConf, host, txClient, programDatasetFramework, discoveryServiceClient, metricsCollectionService, streamAdmin, workflowInfo, pluginInstantiator, secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, serviceAnnouncer, pluginFinder, locationFactory);
closeables.addFirst(runtimeContext);
Spark spark;
try {
spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
SparkSubmitter submitter = SparkRuntimeContextConfig.isLocal(hConf) ? new LocalSparkSubmitter() : new DistributedSparkSubmitter(hConf, locationFactory, host, runtimeContext, options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE));
Service sparkRuntimeService = new SparkRuntimeService(cConf, spark, getPluginArchive(options), runtimeContext, submitter, locationFactory);
sparkRuntimeService.addListener(createRuntimeServiceListener(closeables), Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new SparkProgramController(sparkRuntimeService, runtimeContext);
LOG.debug("Starting Spark Job. Context: {}", runtimeContext);
if (SparkRuntimeContextConfig.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
sparkRuntimeService.start();
} else {
ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), sparkRuntimeService);
}
return controller;
} catch (Throwable t) {
closeAllQuietly(closeables);
throw Throwables.propagate(t);
}
}
use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.
the class AbstractAppCreationService method createAppAndStartProgram.
private void createAppAndStartProgram(ArtifactSummary artifactSummary) throws Exception {
LOG.info("Creating and Starting {} App with config : {}", appId.getApplication(), appConfig);
ArtifactId artifactId = artifactSummary.getScope().equals(ArtifactScope.SYSTEM) ? NamespaceId.SYSTEM.artifact(artifactSummary.getName(), artifactSummary.getVersion()) : appId.getNamespaceId().artifact(artifactSummary.getName(), artifactSummary.getVersion());
applicationLifecycleService.deployApp(appId.getParent(), appId.getApplication(), appId.getVersion(), Id.Artifact.fromEntityId(artifactId), appConfig, new DefaultProgramTerminator());
for (Map.Entry<ProgramId, Map<String, String>> programEntry : programIdMap.entrySet()) {
try {
programLifecycleService.start(programEntry.getKey(), programEntry.getValue(), false);
} catch (IOException ex) {
// Might happen if the program is being started in parallel through UI
LOG.debug("Tried to start {} program but had a conflict. {}", programEntry.getKey(), ex.getMessage());
}
}
}
use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.
the class ServiceDiscoverableTest method testProgramId.
@Test
public void testProgramId() throws Exception {
ProgramId serviceId = new ApplicationId("ns", "app").service("s1");
String discoverableName = ServiceDiscoverable.getName(serviceId);
Assert.assertEquals("service.ns.app.s1", discoverableName);
Assert.assertTrue(ServiceDiscoverable.isUserService(discoverableName));
Assert.assertFalse(ServiceDiscoverable.isUserService("service1."));
Assert.assertEquals(serviceId, ServiceDiscoverable.getId(discoverableName));
}
use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.
the class MockLogReader method generateWorkflowLogs.
/**
* Generate Workflow logs.
*/
private void generateWorkflowLogs() {
ProgramId workflowId = SOME_WORKFLOW_APP.workflow(SOME_WORKFLOW);
long currentTime = TimeUnit.SECONDS.toMillis(10);
RunId workflowRunId = RunIds.generate();
setStartAndRunning(workflowId.run(workflowRunId.getId()), currentTime);
runRecordMap.put(workflowId, store.getRun(workflowId.run(workflowRunId.getId())));
WorkflowLoggingContext wfLoggingContext = new WorkflowLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId());
generateWorkflowRunLogs(wfLoggingContext);
// Generate logs for MapReduce program started by above Workflow run
ProgramId mapReduceId = SOME_WORKFLOW_APP.mr(SOME_MAPREDUCE);
currentTime = TimeUnit.SECONDS.toMillis(20);
RunId mapReduceRunId = RunIds.generate();
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_MAPREDUCE, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapReduceId.run(mapReduceRunId.getId()), currentTime, new HashMap<>(), systemArgs);
runRecordMap.put(mapReduceId, store.getRun(mapReduceId.run(mapReduceRunId.getId())));
WorkflowProgramLoggingContext context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.MAPREDUCE, SOME_MAPREDUCE, mapReduceRunId.getId());
generateWorkflowRunLogs(context);
// Generate logs for Spark program started by Workflow run above
ProgramId sparkId = SOME_WORKFLOW_APP.spark(SOME_SPARK);
currentTime = TimeUnit.SECONDS.toMillis(40);
RunId sparkRunId = RunIds.generate();
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_SPARK, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(sparkId.run(sparkRunId.getId()), currentTime, new HashMap<>(), systemArgs);
runRecordMap.put(sparkId, store.getRun(sparkId.run(sparkRunId.getId())));
context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.SPARK, SOME_SPARK, sparkRunId.getId());
generateWorkflowRunLogs(context);
// Generate some more logs for Workflow
generateWorkflowRunLogs(wfLoggingContext);
}
Aggregations