use of co.cask.cdap.proto.ScheduleDetail in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method toScheduleDetail.
private ScheduleDetail toScheduleDetail(ScheduleUpdateDetail updateDetail, ProgramSchedule existing) {
ScheduleUpdateDetail.Schedule scheduleUpdate = updateDetail.getSchedule();
if (scheduleUpdate == null) {
return new ScheduleDetail(null, null, null, updateDetail.getProperties(), null, null, null);
}
Trigger trigger = null;
if (scheduleUpdate.getCronExpression() != null && (scheduleUpdate.getStreamName() != null || scheduleUpdate.getDataTriggerMB() != null)) {
throw new IllegalArgumentException(String.format("Cannot define time trigger with cron expression and define stream size trigger with" + " stream name and data trigger configuration in the same schedule update details %s. " + "Schedule update detail must contain only one trigger.", updateDetail));
}
NamespaceId namespaceId = existing.getProgramId().getNamespaceId();
if (scheduleUpdate.getCronExpression() != null) {
trigger = new TimeTrigger(updateDetail.getSchedule().getCronExpression());
} else if (existing.getTrigger() instanceof StreamSizeTrigger) {
// if the existing trigger is StreamSizeTrigger, use the field in the existing trigger if the corresponding field
// in schedule update detail is null
StreamSizeTrigger existingTrigger = (StreamSizeTrigger) existing.getTrigger();
String streamName = Objects.firstNonNull(scheduleUpdate.getStreamName(), existingTrigger.getStreamId().getStream());
int dataTriggerMB = Objects.firstNonNull(scheduleUpdate.getDataTriggerMB(), existingTrigger.getTriggerMB());
trigger = new StreamSizeTrigger(namespaceId.stream(streamName), dataTriggerMB);
} else if (scheduleUpdate.getStreamName() != null && scheduleUpdate.getDataTriggerMB() != null) {
trigger = new StreamSizeTrigger(namespaceId.stream(scheduleUpdate.getStreamName()), scheduleUpdate.getDataTriggerMB());
} else if (scheduleUpdate.getStreamName() != null || scheduleUpdate.getDataTriggerMB() != null) {
throw new IllegalArgumentException(String.format("Only one of stream name and data trigger MB is defined in schedule update details %s. " + "Must provide both stream name and data trigger MB to update the existing schedule with " + "trigger of type %s to a schedule with stream size trigger.", updateDetail, existing.getTrigger().getClass()));
}
List<Constraint> constraints = toConstraints(scheduleUpdate.getRunConstraints());
return new ScheduleDetail(null, scheduleUpdate.getDescription(), null, updateDetail.getProperties(), trigger, constraints, null);
}
use of co.cask.cdap.proto.ScheduleDetail in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method readScheduleDetailBody.
private ScheduleDetail readScheduleDetailBody(HttpRequest request, String scheduleName, boolean isUpdate, Function<JsonElement, ScheduleDetail> toScheduleDetail) throws BadRequestException, IOException {
// TODO: remove backward compatibility with ScheduleSpecification, use fromJson(ScheduleDetail.class)
JsonElement json;
try (Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()), Charsets.UTF_8)) {
// The schedule spec in the request body does not contain the program information
json = GSON.fromJson(reader, JsonElement.class);
} catch (IOException e) {
throw new IOException("Error reading request body", e);
} catch (JsonSyntaxException e) {
throw new BadRequestException("Request body is invalid json: " + e.getMessage());
}
if (!json.isJsonObject()) {
throw new BadRequestException("Expected a json object in the request body but received " + GSON.toJson(json));
}
ScheduleDetail scheduleDetail;
if (((JsonObject) json).get("schedule") != null) {
// field only exists in legacy ScheduleSpec/UpdateDetail
try {
scheduleDetail = toScheduleDetail.apply(json);
} catch (JsonSyntaxException e) {
throw new BadRequestException("Error parsing request body as a schedule " + (isUpdate ? "update details" : "specification") + " (in backward compatibility mode): " + e.getMessage());
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
}
} else {
try {
scheduleDetail = GSON.fromJson(json, ScheduleDetail.class);
} catch (JsonSyntaxException e) {
throw new BadRequestException("Error parsing request body as a schedule specification: " + e.getMessage());
}
}
// If the schedule name is present in the request body, it should match the name in path params
if (scheduleDetail.getName() != null && !scheduleName.equals(scheduleDetail.getName())) {
throw new BadRequestException(String.format("Schedule name in the body of the request (%s) does not match the schedule name in the path parameter (%s)", scheduleDetail.getName(), scheduleName));
}
return scheduleDetail;
}
use of co.cask.cdap.proto.ScheduleDetail in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method doAddSchedule.
private void doAddSchedule(HttpRequest request, HttpResponder responder, String namespace, String appName, String appVersion, String scheduleName) throws Exception {
final ApplicationId applicationId = new ApplicationId(namespace, appName, appVersion);
ScheduleDetail scheduleFromRequest = readScheduleDetailBody(request, scheduleName, false, new Function<JsonElement, ScheduleDetail>() {
@Override
public ScheduleDetail apply(@Nullable JsonElement input) {
ScheduleSpecification scheduleSpec = GSON.fromJson(input, ScheduleSpecification.class);
return toScheduleDetail(applicationId, scheduleSpec);
}
});
if (scheduleFromRequest.getProgram() == null) {
throw new BadRequestException("No program was specified for the schedule");
}
if (scheduleFromRequest.getProgram().getProgramType() == null) {
throw new BadRequestException("No program type was specified for the schedule");
}
if (scheduleFromRequest.getProgram().getProgramName() == null) {
throw new BadRequestException("No program name was specified for the schedule");
}
if (scheduleFromRequest.getTrigger() == null) {
throw new BadRequestException("No trigger was specified for the schedule");
}
ProgramType programType = ProgramType.valueOfSchedulableType(scheduleFromRequest.getProgram().getProgramType());
String programName = scheduleFromRequest.getProgram().getProgramName();
ProgramId programId = applicationId.program(programType, programName);
if (lifecycleService.getProgramSpecification(programId) == null) {
throw new NotFoundException(programId);
}
String description = Objects.firstNonNull(scheduleFromRequest.getDescription(), "");
Map<String, String> properties = Objects.firstNonNull(scheduleFromRequest.getProperties(), EMPTY_PROPERTIES);
List<? extends Constraint> constraints = Objects.firstNonNull(scheduleFromRequest.getConstraints(), NO_CONSTRAINTS);
long timeoutMillis = Objects.firstNonNull(scheduleFromRequest.getTimeoutMillis(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS);
ProgramSchedule schedule = new ProgramSchedule(scheduleName, description, programId, properties, scheduleFromRequest.getTrigger(), constraints, timeoutMillis);
programScheduler.addSchedule(schedule);
responder.sendStatus(HttpResponseStatus.OK);
}
use of co.cask.cdap.proto.ScheduleDetail in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method doGetSchedules.
protected void doGetSchedules(HttpResponder responder, String namespace, String app, String version, @Nullable String workflow, @Nullable String format) throws NotFoundException, BadRequestException {
boolean asScheduleSpec = returnScheduleAsSpec(format);
ApplicationId applicationId = new ApplicationId(namespace, app, version);
ApplicationSpecification appSpec = store.getApplication(applicationId);
if (appSpec == null) {
throw new NotFoundException(applicationId);
}
List<ProgramSchedule> schedules;
if (workflow != null) {
WorkflowId workflowId = applicationId.workflow(workflow);
if (appSpec.getWorkflows().get(workflow) == null) {
throw new NotFoundException(workflowId);
}
schedules = programScheduler.listSchedules(workflowId);
} else {
schedules = programScheduler.listSchedules(applicationId);
}
List<ScheduleDetail> details = Schedulers.toScheduleDetails(schedules);
if (asScheduleSpec) {
List<ScheduleSpecification> specs = ScheduleDetail.toScheduleSpecs(details);
responder.sendJson(HttpResponseStatus.OK, specs, Schedulers.SCHEDULE_SPECS_TYPE, GSON_FOR_SCHEDULES);
} else {
responder.sendJson(HttpResponseStatus.OK, details, Schedulers.SCHEDULE_DETAILS_TYPE, GSON_FOR_SCHEDULES);
}
}
use of co.cask.cdap.proto.ScheduleDetail in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testStreamSizeSchedules.
@Test
public void testStreamSizeSchedules() throws Exception {
// Steps for the test:
// 1. Deploy the app
// 2. Verify the schedules
// 3. Ingest data in the stream
// 4. Verify the history after waiting a while
// 5. Suspend the schedule
// 6. Ingest data in the stream
// 7. Verify there are no runs after the suspend by looking at the history
// 8. Resume the schedule
// 9. Verify there are runs after the resume by looking at the history
String appName = "AppWithStreamSizeSchedule";
String sampleSchedule1 = "SampleSchedule1";
String sampleSchedule2 = "SampleSchedule2";
String workflowName = "SampleWorkflow";
String streamName = "stream";
Id.Program programId = Id.Program.from(TEST_NAMESPACE2, appName, ProgramType.WORKFLOW, workflowName);
StringBuilder longStringBuilder = new StringBuilder();
for (int i = 0; i < 10000; i++) {
longStringBuilder.append("dddddddddd");
}
String longString = longStringBuilder.toString();
// deploy app with schedule in namespace 2
HttpResponse response = deploy(AppWithStreamSizeSchedule.class, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE2);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
Assert.assertEquals(200, resumeSchedule(TEST_NAMESPACE2, appName, sampleSchedule1));
Assert.assertEquals(200, resumeSchedule(TEST_NAMESPACE2, appName, sampleSchedule2));
// get schedules
List<ScheduleDetail> schedules = getSchedules(TEST_NAMESPACE2, appName, workflowName);
Assert.assertEquals(2, schedules.size());
String scheduleName1 = schedules.get(0).getName();
String scheduleName2 = schedules.get(1).getName();
Assert.assertNotNull(scheduleName1);
Assert.assertFalse(scheduleName1.isEmpty());
// Change notification threshold for stream
response = doPut(String.format("/v3/namespaces/%s/streams/%s/properties", TEST_NAMESPACE2, streamName), "{'notification.threshold.mb': 1}");
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
response = doGet(String.format("/v3/namespaces/%s/streams/%s", TEST_NAMESPACE2, streamName));
String json = EntityUtils.toString(response.getEntity());
StreamProperties properties = new Gson().fromJson(json, StreamProperties.class);
Assert.assertEquals(1, properties.getNotificationThresholdMB().intValue());
// Ingest over 1MB of data in stream
for (int i = 0; i < 12; ++i) {
response = doPost(String.format("/v3/namespaces/%s/streams/%s", TEST_NAMESPACE2, streamName), longString);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
}
// Only schedule 1 should get executed
verifyProgramRuns(programId, "completed");
//Check schedule status
assertSchedule(programId, scheduleName1, true, 30, TimeUnit.SECONDS);
assertSchedule(programId, scheduleName2, true, 30, TimeUnit.SECONDS);
Assert.assertEquals(200, suspendSchedule(TEST_NAMESPACE2, appName, scheduleName1));
Assert.assertEquals(200, suspendSchedule(TEST_NAMESPACE2, appName, scheduleName2));
//check paused state
assertSchedule(programId, scheduleName1, false, 30, TimeUnit.SECONDS);
assertSchedule(programId, scheduleName2, false, 30, TimeUnit.SECONDS);
int workflowRuns = getProgramRuns(programId, "completed").size();
// Should still be one
Assert.assertEquals(1, workflowRuns);
// Sleep for some time and verify there are no more scheduled jobs after the suspend.
for (int i = 0; i < 12; ++i) {
response = doPost(String.format("/v3/namespaces/%s/streams/%s", TEST_NAMESPACE2, streamName), longString);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
}
TimeUnit.SECONDS.sleep(5);
int workflowRunsAfterSuspend = getProgramRuns(programId, "completed").size();
Assert.assertEquals(workflowRuns, workflowRunsAfterSuspend);
Assert.assertEquals(200, resumeSchedule(TEST_NAMESPACE2, appName, scheduleName1));
//check scheduled state
assertSchedule(programId, scheduleName1, true, 30, TimeUnit.SECONDS);
// an additional run should execute and complete after resuming the schedule
assertRunHistory(programId, "completed", 1 + workflowRunsAfterSuspend, 60, TimeUnit.SECONDS);
//Check status of a non existing schedule
try {
assertSchedule(programId, "invalid", true, 2, TimeUnit.SECONDS);
Assert.fail();
} catch (Exception e) {
// expected
}
Assert.assertEquals(200, suspendSchedule(TEST_NAMESPACE2, appName, scheduleName1));
//check paused state
assertSchedule(programId, scheduleName1, false, 30, TimeUnit.SECONDS);
//Schedule operations using invalid namespace
try {
assertSchedule(Id.Program.from(TEST_NAMESPACE1, appName, ProgramType.WORKFLOW, workflowName), scheduleName1, true, 2, TimeUnit.SECONDS);
Assert.fail();
} catch (Exception e) {
// expected
}
Assert.assertEquals(404, suspendSchedule(TEST_NAMESPACE1, appName, scheduleName1));
Assert.assertEquals(404, resumeSchedule(TEST_NAMESPACE1, appName, scheduleName1));
// Wait until any running jobs just before suspend call completes.
TimeUnit.SECONDS.sleep(2);
}
Aggregations