use of co.cask.cdap.internal.schedule.trigger.Trigger in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method toScheduleDetail.
private ScheduleDetail toScheduleDetail(ScheduleUpdateDetail updateDetail, ProgramSchedule existing) {
ScheduleUpdateDetail.Schedule scheduleUpdate = updateDetail.getSchedule();
if (scheduleUpdate == null) {
return new ScheduleDetail(null, null, null, updateDetail.getProperties(), null, null, null);
}
Trigger trigger = null;
if (scheduleUpdate.getCronExpression() != null && (scheduleUpdate.getStreamName() != null || scheduleUpdate.getDataTriggerMB() != null)) {
throw new IllegalArgumentException(String.format("Cannot define time trigger with cron expression and define stream size trigger with" + " stream name and data trigger configuration in the same schedule update details %s. " + "Schedule update detail must contain only one trigger.", updateDetail));
}
NamespaceId namespaceId = existing.getProgramId().getNamespaceId();
if (scheduleUpdate.getCronExpression() != null) {
trigger = new TimeTrigger(updateDetail.getSchedule().getCronExpression());
} else if (existing.getTrigger() instanceof StreamSizeTrigger) {
// if the existing trigger is StreamSizeTrigger, use the field in the existing trigger if the corresponding field
// in schedule update detail is null
StreamSizeTrigger existingTrigger = (StreamSizeTrigger) existing.getTrigger();
String streamName = Objects.firstNonNull(scheduleUpdate.getStreamName(), existingTrigger.getStreamId().getStream());
int dataTriggerMB = Objects.firstNonNull(scheduleUpdate.getDataTriggerMB(), existingTrigger.getTriggerMB());
trigger = new StreamSizeTrigger(namespaceId.stream(streamName), dataTriggerMB);
} else if (scheduleUpdate.getStreamName() != null && scheduleUpdate.getDataTriggerMB() != null) {
trigger = new StreamSizeTrigger(namespaceId.stream(scheduleUpdate.getStreamName()), scheduleUpdate.getDataTriggerMB());
} else if (scheduleUpdate.getStreamName() != null || scheduleUpdate.getDataTriggerMB() != null) {
throw new IllegalArgumentException(String.format("Only one of stream name and data trigger MB is defined in schedule update details %s. " + "Must provide both stream name and data trigger MB to update the existing schedule with " + "trigger of type %s to a schedule with stream size trigger.", updateDetail, existing.getTrigger().getClass()));
}
List<Constraint> constraints = toConstraints(scheduleUpdate.getRunConstraints());
return new ScheduleDetail(null, scheduleUpdate.getDescription(), null, updateDetail.getProperties(), trigger, constraints, null);
}
use of co.cask.cdap.internal.schedule.trigger.Trigger in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method combineForUpdate.
private ProgramSchedule combineForUpdate(ScheduleDetail scheduleDetail, ProgramSchedule existing) throws BadRequestException {
String description = Objects.firstNonNull(scheduleDetail.getDescription(), existing.getDescription());
ProgramId programId = scheduleDetail.getProgram() == null ? existing.getProgramId() : existing.getProgramId().getParent().program(scheduleDetail.getProgram().getProgramType() == null ? existing.getProgramId().getType() : ProgramType.valueOfSchedulableType(scheduleDetail.getProgram().getProgramType()), Objects.firstNonNull(scheduleDetail.getProgram().getProgramName(), existing.getProgramId().getProgram()));
if (!programId.equals(existing.getProgramId())) {
throw new BadRequestException(String.format("Must update the schedule '%s' with the same program as '%s'. " + "To change the program in a schedule, please delete the schedule and create a new one.", existing.getName(), existing.getProgramId().toString()));
}
Map<String, String> properties = Objects.firstNonNull(scheduleDetail.getProperties(), existing.getProperties());
Trigger trigger = Objects.firstNonNull(scheduleDetail.getTrigger(), existing.getTrigger());
List<? extends Constraint> constraints = Objects.firstNonNull(scheduleDetail.getConstraints(), existing.getConstraints());
Long timeoutMillis = Objects.firstNonNull(scheduleDetail.getTimeoutMillis(), existing.getTimeoutMillis());
return new ProgramSchedule(existing.getName(), description, programId, properties, trigger, constraints, timeoutMillis);
}
use of co.cask.cdap.internal.schedule.trigger.Trigger in project cdap by caskdata.
the class DeleteAndCreateSchedulesStage method toProgramSchedule.
private ProgramSchedule toProgramSchedule(ApplicationId appId, ScheduleCreationSpec scheduleCreationSpec) {
ProgramId programId = appId.workflow(scheduleCreationSpec.getProgramName());
Trigger trigger = scheduleCreationSpec.getTrigger();
return new ProgramSchedule(scheduleCreationSpec.getName(), scheduleCreationSpec.getDescription(), programId, scheduleCreationSpec.getProperties(), trigger, scheduleCreationSpec.getConstraints(), scheduleCreationSpec.getTimeoutMillis());
}
use of co.cask.cdap.internal.schedule.trigger.Trigger in project cdap by caskdata.
the class ProgramLifecycleHttpHandler method toScheduleDetail.
private ScheduleDetail toScheduleDetail(ApplicationId appId, ScheduleSpecification scheduleSpec) {
if (scheduleSpec.getSchedule() == null) {
throw new IllegalArgumentException("Schedule specification must contain schedule");
}
Trigger trigger;
if (scheduleSpec.getSchedule() instanceof TimeSchedule) {
trigger = new TimeTrigger(((TimeSchedule) scheduleSpec.getSchedule()).getCronEntry());
} else {
StreamSizeSchedule streamSchedule = (StreamSizeSchedule) scheduleSpec.getSchedule();
StreamId streamId = appId.getParent().stream(streamSchedule.getStreamName());
trigger = new StreamSizeTrigger(streamId, streamSchedule.getDataTriggerMB());
}
List<Constraint> runConstraints = toConstraints(scheduleSpec.getSchedule().getRunConstraints());
return new ScheduleDetail(scheduleSpec.getSchedule().getName(), scheduleSpec.getSchedule().getDescription(), scheduleSpec.getProgram(), scheduleSpec.getProperties(), trigger, runConstraints, null);
}
use of co.cask.cdap.internal.schedule.trigger.Trigger in project cdap by caskdata.
the class ApplicationVerificationStage method verifyPrograms.
protected void verifyPrograms(ApplicationId appId, ApplicationSpecification specification) {
Iterable<ProgramSpecification> programSpecs = Iterables.concat(specification.getFlows().values(), specification.getMapReduce().values(), specification.getWorkflows().values());
VerifyResult result;
for (ProgramSpecification programSpec : programSpecs) {
result = getVerifier(programSpec.getClass()).verify(appId, programSpec);
if (!result.isSuccess()) {
throw new RuntimeException(result.getMessage());
}
}
for (Map.Entry<String, WorkflowSpecification> entry : specification.getWorkflows().entrySet()) {
verifyWorkflowSpecifications(specification, entry.getValue());
}
for (Map.Entry<String, ScheduleCreationSpec> entry : specification.getProgramSchedules().entrySet()) {
String programName = entry.getValue().getProgramName();
if (!specification.getWorkflows().containsKey(programName)) {
throw new RuntimeException(String.format("Schedule '%s' is invalid: Workflow '%s' is not configured " + "in application '%s'", entry.getValue().getName(), programName, specification.getName()));
}
// TODO StreamSizeSchedules should be resilient to stream inexistence [CDAP-1446]
Trigger trigger = entry.getValue().getTrigger();
if (trigger instanceof StreamSizeTrigger) {
StreamId streamId = ((StreamSizeTrigger) trigger).getStreamId();
if (!specification.getStreams().containsKey(streamId.getStream()) && store.getStream(streamId.getParent(), streamId.getStream()) == null) {
throw new RuntimeException(String.format("Schedule '%s' uses a Stream '%s' that does not exit", entry.getValue().getName(), streamId));
}
}
}
}
Aggregations