use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method doGetSchedules.
protected void doGetSchedules(HttpResponder responder, String namespace, String app, String version, @Nullable String workflow, @Nullable String format) throws NotFoundException, BadRequestException {
boolean asScheduleSpec = returnScheduleAsSpec(format);
ApplicationId applicationId = new ApplicationId(namespace, app, version);
ApplicationSpecification appSpec = store.getApplication(applicationId);
if (appSpec == null) {
throw new NotFoundException(applicationId);
}
List<ProgramSchedule> schedules;
if (workflow != null) {
WorkflowId workflowId = applicationId.workflow(workflow);
if (appSpec.getWorkflows().get(workflow) == null) {
throw new NotFoundException(workflowId);
}
schedules = programScheduler.listSchedules(workflowId);
} else {
schedules = programScheduler.listSchedules(applicationId);
}
List<ScheduleDetail> details = Schedulers.toScheduleDetails(schedules);
if (asScheduleSpec) {
List<ScheduleSpecification> specs = ScheduleDetail.toScheduleSpecs(details);
responder.sendJson(HttpResponseStatus.OK, specs, Schedulers.SCHEDULE_SPECS_TYPE, GSON_FOR_SCHEDULES);
} else {
responder.sendJson(HttpResponseStatus.OK, details, Schedulers.SCHEDULE_DETAILS_TYPE, GSON_FOR_SCHEDULES);
}
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class SparkProgramRunner method run.
@Override
public ProgramController run(Program program, ProgramOptions options) {
// Get the RunId first. It is used for the creation of the ClassLoader closing thread.
Arguments arguments = options.getArguments();
RunId runId = ProgramRunners.getRunId(options);
Deque<Closeable> closeables = new LinkedList<>();
try {
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
String host = options.getArguments().getOption(ProgramOptionConstants.HOST);
Preconditions.checkArgument(host != null, "No hostname is provided");
// Get the WorkflowProgramInfo if it is started by Workflow
WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
if (pluginInstantiator != null) {
closeables.addFirst(pluginInstantiator);
}
SparkRuntimeContext runtimeContext = new SparkRuntimeContext(new Configuration(hConf), program, options, cConf, host, txClient, programDatasetFramework, discoveryServiceClient, metricsCollectionService, streamAdmin, workflowInfo, pluginInstantiator, secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, serviceAnnouncer, pluginFinder, locationFactory);
closeables.addFirst(runtimeContext);
Spark spark;
try {
spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
SparkSubmitter submitter = SparkRuntimeContextConfig.isLocal(hConf) ? new LocalSparkSubmitter() : new DistributedSparkSubmitter(hConf, locationFactory, host, runtimeContext, options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE));
Service sparkRuntimeService = new SparkRuntimeService(cConf, spark, getPluginArchive(options), runtimeContext, submitter, locationFactory);
sparkRuntimeService.addListener(createRuntimeServiceListener(closeables), Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new SparkProgramController(sparkRuntimeService, runtimeContext);
LOG.debug("Starting Spark Job. Context: {}", runtimeContext);
if (SparkRuntimeContextConfig.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
sparkRuntimeService.start();
} else {
ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), sparkRuntimeService);
}
return controller;
} catch (Throwable t) {
closeAllQuietly(closeables);
throw Throwables.propagate(t);
}
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class HBaseQueueDebugger method scanQueues.
private void scanQueues(List<NamespaceMeta> namespaceMetas) throws Exception {
final QueueStatistics totalStats = new QueueStatistics();
for (NamespaceMeta namespaceMeta : namespaceMetas) {
final NamespaceId namespaceId = new NamespaceId(namespaceMeta.getName());
final Collection<ApplicationSpecification> apps = store.getAllApplications(namespaceId);
for (final ApplicationSpecification app : apps) {
ApplicationId appId = new ApplicationId(namespaceMeta.getName(), app.getName(), app.getAppVersion());
Collection<FlowSpecification> flows = app.getFlows().values();
for (final FlowSpecification flow : flows) {
final ProgramId flowId = appId.program(ProgramType.FLOW, flow.getName());
impersonator.doAs(flowId, new Callable<Void>() {
@Override
public Void call() throws Exception {
SimpleQueueSpecificationGenerator queueSpecGenerator = new SimpleQueueSpecificationGenerator(flowId.getParent());
Table<QueueSpecificationGenerator.Node, String, Set<QueueSpecification>> table = queueSpecGenerator.create(flow);
for (Table.Cell<QueueSpecificationGenerator.Node, String, Set<QueueSpecification>> cell : table.cellSet()) {
if (cell.getRowKey().getType() == FlowletConnection.Type.FLOWLET) {
for (QueueSpecification queue : cell.getValue()) {
QueueStatistics queueStats = scanQueue(queue.getQueueName(), null);
totalStats.add(queueStats);
}
}
}
return null;
}
});
}
}
}
System.out.printf("Total results for all queues: %s\n", totalStats.getReport(showTxTimestampOnly()));
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class FlowQueuePendingCorrector method run.
/**
* Corrects queue.pending metric for all flowlets in an application.
*/
public void run(ApplicationId appId) throws Exception {
ApplicationSpecification app = store.getApplication(appId);
run(appId, app);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class FlowQueuePendingCorrector method run.
/**
* Corrects queue.pending metric for a flowlet.
*/
public void run(FlowId flowId, String producerFlowlet, String consumerFlowlet, String flowletQueue) throws Exception {
ApplicationSpecification app = store.getApplication(flowId.getParent());
Preconditions.checkArgument(app != null, flowId.getApplication() + " not found");
Preconditions.checkArgument(app.getFlows().containsKey(flowId.getProgram()), flowId + " not found");
FlowSpecification flow = app.getFlows().get(flowId.getProgram());
run(flowId, producerFlowlet, consumerFlowlet, flowletQueue, flow);
}
Aggregations