use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ProgramGenerationStageTest method testProgramGenerationForToyApp.
@Test
public void testProgramGenerationForToyApp() throws Exception {
cConf.set(Constants.AppFabric.OUTPUT_DIR, "programs");
LocationFactory lf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
// have to do this since we are not going through the route of create namespace -> deploy application
// in real scenarios, the namespace directory would already be created
Location namespaceLocation = lf.create(DefaultId.APPLICATION.getNamespace());
Locations.mkdirsIfNotExists(namespaceLocation);
LocationFactory jarLf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appArchive = AppJarHelper.createDeploymentJar(jarLf, ToyApp.class);
ApplicationSpecification appSpec = Specifications.from(new ToyApp());
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification newSpec = adapter.fromJson(adapter.toJson(appSpec));
ProgramGenerationStage pgmStage = new ProgramGenerationStage();
// Can do better here - fixed right now to run the test.
pgmStage.process(new StageContext(Object.class));
pgmStage.process(new ApplicationDeployable(NamespaceId.DEFAULT.artifact("ToyApp", "1.0"), appArchive, DefaultId.APPLICATION, newSpec, null, ApplicationDeployScope.USER));
Assert.assertTrue(true);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method getInstances.
/**
* Returns the number of instances for all program runnables that are passed into the data. The data is an array of
* Json objects where each object must contain the following three elements: appId, programType, and programId
* (flow name, service name). Retrieving instances only applies to flows, and user
* services. For flows, another parameter, "runnableId", must be provided. This corresponds to the
* flowlet/runnable for which to retrieve the instances.
* <p>
* Example input:
* <pre><code>
* [{"appId": "App1", "programType": "Service", "programId": "Service1", "runnableId": "Runnable1"},
* {"appId": "App1", "programType": "Mapreduce", "programId": "Mapreduce2"},
* {"appId": "App2", "programType": "Flow", "programId": "Flow1", "runnableId": "Flowlet1"}]
* </code></pre>
* </p><p>
* The response will be an array of JsonObjects each of which will contain the three input parameters
* as well as 3 fields:
* <ul>
* <li>"provisioned" which maps to the number of instances actually provided for the input runnable;</li>
* <li>"requested" which maps to the number of instances the user has requested for the input runnable; and</li>
* <li>"statusCode" which maps to the http status code for the data in that JsonObjects (200, 400, 404).</li>
* </ul>
* </p><p>
* If an error occurs in the input (for the example above, Flowlet1 does not exist), then all JsonObjects for
* which the parameters have a valid instances will have the provisioned and requested fields status code fields
* but all JsonObjects for which the parameters are not valid will have an error message and statusCode.
* </p><p>
* For example, if there is no Flowlet1 in the above data, then the response could be 200 OK with the following data:
* </p>
* <pre><code>
* [{"appId": "App1", "programType": "Service", "programId": "Service1", "runnableId": "Runnable1",
* "statusCode": 200, "provisioned": 2, "requested": 2},
* {"appId": "App1", "programType": "Mapreduce", "programId": "Mapreduce2", "statusCode": 400,
* "error": "Program type 'Mapreduce' is not a valid program type to get instances"},
* {"appId": "App2", "programType": "Flow", "programId": "Flow1", "runnableId": "Flowlet1", "statusCode": 404,
* "error": "Program": Flowlet1 not found"}]
* </code></pre>
*/
@POST
@Path("/instances")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void getInstances(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId) throws IOException, BadRequestException {
List<BatchRunnable> runnables = validateAndGetBatchInput(request, BATCH_RUNNABLES_TYPE);
// cache app specs to perform fewer store lookups
Map<ApplicationId, ApplicationSpecification> appSpecs = new HashMap<>();
List<BatchRunnableInstances> output = new ArrayList<>(runnables.size());
for (BatchRunnable runnable : runnables) {
// cant get instances for things that are not flows, services, or workers
if (!canHaveInstances(runnable.getProgramType())) {
output.add(new BatchRunnableInstances(runnable, HttpResponseStatus.BAD_REQUEST.code(), String.format("Program type '%s' is not a valid program type to get instances", runnable.getProgramType().getPrettyName())));
continue;
}
ApplicationId appId = new ApplicationId(namespaceId, runnable.getAppId());
// populate spec cache if this is the first time we've seen the appid.
if (!appSpecs.containsKey(appId)) {
appSpecs.put(appId, store.getApplication(appId));
}
ApplicationSpecification spec = appSpecs.get(appId);
if (spec == null) {
output.add(new BatchRunnableInstances(runnable, HttpResponseStatus.NOT_FOUND.code(), String.format("App: %s not found", appId)));
continue;
}
ProgramId programId = appId.program(runnable.getProgramType(), runnable.getProgramId());
output.add(getProgramInstances(runnable, spec, programId));
}
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(output));
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method getMapReduceInfo.
/**
* Relays job-level and task-level information about a particular MapReduce program run.
*/
@GET
@Path("/apps/{app-id}/mapreduce/{mapreduce-id}/runs/{run-id}/info")
public void getMapReduceInfo(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("mapreduce-id") String mapreduceId, @PathParam("run-id") String runId) throws IOException, NotFoundException {
ProgramId programId = new ProgramId(namespaceId, appId, ProgramType.MAPREDUCE, mapreduceId);
ProgramRunId run = programId.run(runId);
ApplicationSpecification appSpec = store.getApplication(programId.getParent());
if (appSpec == null) {
throw new NotFoundException(programId.getApplication());
}
if (!appSpec.getMapReduce().containsKey(mapreduceId)) {
throw new NotFoundException(programId);
}
RunRecordMeta runRecordMeta = store.getRun(run);
if (runRecordMeta == null) {
throw new NotFoundException(run);
}
MRJobInfo mrJobInfo = mrJobInfoFetcher.getMRJobInfo(Id.Run.fromEntityId(run));
mrJobInfo.setState(runRecordMeta.getStatus().name());
// Multiple startTs / endTs by 1000, to be consistent with Task-level start/stop times returned by JobClient
// in milliseconds. RunRecord returns seconds value.
// The start time of the MRJob is when the run record has been marked as STARTED
mrJobInfo.setStartTime(TimeUnit.SECONDS.toMillis(runRecordMeta.getStartTs()));
Long stopTs = runRecordMeta.getStopTs();
if (stopTs != null) {
mrJobInfo.setStopTime(TimeUnit.SECONDS.toMillis(stopTs));
}
// JobClient (in DistributedMRJobInfoFetcher) can return NaN as some of the values, and GSON otherwise fails
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
responder.sendJson(HttpResponseStatus.OK, gson.toJson(mrJobInfo, mrJobInfo.getClass()));
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method doGetSchedules.
protected void doGetSchedules(HttpResponder responder, ApplicationId applicationId, @Nullable String workflow, @Nullable String triggerTypeStr, @Nullable String statusStr) throws Exception {
ApplicationSpecification appSpec = store.getApplication(applicationId);
if (appSpec == null) {
throw new NotFoundException(applicationId);
}
ProgramScheduleStatus status;
try {
status = statusStr == null ? null : ProgramScheduleStatus.valueOf(statusStr);
} catch (IllegalArgumentException e) {
throw new BadRequestException(String.format("Invalid schedule status '%s'. Must be one of %s.", statusStr, Joiner.on(',').join(ProgramScheduleStatus.values())), e);
}
ProtoTrigger.Type triggerType;
try {
triggerType = triggerTypeStr == null ? null : ProtoTrigger.Type.valueOfCategoryName(triggerTypeStr);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e.getMessage(), e);
}
Predicate<ProgramScheduleRecord> predicate = record -> true;
if (status != null) {
predicate = predicate.and(record -> record.getMeta().getStatus().equals(status));
}
if (triggerType != null) {
predicate = predicate.and(record -> record.getSchedule().getTrigger().getType().equals(triggerType));
}
Collection<ProgramScheduleRecord> schedules;
if (workflow != null) {
WorkflowId workflowId = applicationId.workflow(workflow);
if (appSpec.getWorkflows().get(workflow) == null) {
throw new NotFoundException(workflowId);
}
schedules = programScheduleService.list(workflowId, predicate);
} else {
schedules = programScheduleService.list(applicationId, predicate);
}
List<ScheduleDetail> details = schedules.stream().map(ProgramScheduleRecord::toScheduleDetail).collect(Collectors.toList());
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(details, Schedulers.SCHEDULE_DETAILS_TYPE));
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method getStatus.
/**
* Returns status of a type specified by the type{flows,workflows,mapreduce,spark,services,schedules}.
*/
@GET
@Path("/apps/{app-id}/versions/{version-id}/{program-type}/{program-id}/status")
public void getStatus(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("version-id") String versionId, @PathParam("program-type") String type, @PathParam("program-id") String programId) throws Exception {
ApplicationId applicationId = new ApplicationId(namespaceId, appId, versionId);
if (SCHEDULES.equals(type)) {
JsonObject json = new JsonObject();
ScheduleId scheduleId = applicationId.schedule(programId);
ApplicationSpecification appSpec = store.getApplication(applicationId);
if (appSpec == null) {
throw new NotFoundException(applicationId);
}
json.addProperty("status", programScheduleService.getStatus(scheduleId).toString());
responder.sendJson(HttpResponseStatus.OK, json.toString());
return;
}
ProgramType programType;
try {
programType = ProgramType.valueOfCategoryName(type);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
}
ProgramId program = applicationId.program(programType, programId);
ProgramStatus programStatus = lifecycleService.getProgramStatus(program);
Map<String, String> status = ImmutableMap.of("status", programStatus.name());
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(status));
}
Aggregations