use of co.cask.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class DefaultAppConfigurer method addSchedule.
@Override
public void addSchedule(Schedule schedule, SchedulableProgramType programType, String programName, Map<String, String> properties) {
Preconditions.checkNotNull(schedule, "Schedule cannot be null.");
Preconditions.checkNotNull(schedule.getName(), "Schedule name cannot be null.");
Preconditions.checkArgument(!schedule.getName().isEmpty(), "Schedule name cannot be empty.");
Preconditions.checkNotNull(programName, "Program name cannot be null.");
Preconditions.checkArgument(!programName.isEmpty(), "Program name cannot be empty.");
Preconditions.checkArgument(!schedules.containsKey(schedule.getName()), "Schedule with the name '" + schedule.getName() + "' already exists.");
if (schedule instanceof StreamSizeSchedule) {
Preconditions.checkArgument(((StreamSizeSchedule) schedule).getDataTriggerMB() > 0, "Schedule data trigger must be greater than 0.");
}
// TODO: [CDAP-11575] Temporary solution before REST API is merged. ScheduleSpecification will be removed and
// the block of code below will be refactored
ScheduleSpecification spec = new ScheduleSpecification(schedule, new ScheduleProgramInfo(programType, programName), properties);
schedules.put(schedule.getName(), spec);
ScheduleCreationSpec creationSpec = Schedulers.toScheduleCreationSpec(deployNamespace.toEntityId(), schedule, programName, properties);
doAddSchedule(creationSpec);
}
use of co.cask.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class ApplicationSpecificationCodec method deserialize.
@Override
public ApplicationSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String name = jsonObj.get("name").getAsString();
String appVersion = ApplicationId.DEFAULT_VERSION;
if (jsonObj.has("appVersion")) {
appVersion = jsonObj.get("appVersion").getAsString();
}
String description = jsonObj.get("description").getAsString();
String configuration = null;
if (jsonObj.has("configuration")) {
configuration = jsonObj.get("configuration").getAsString();
}
ArtifactId artifactId = context.deserialize(jsonObj.get("artifactId"), ArtifactId.class);
Map<String, StreamSpecification> streams = deserializeMap(jsonObj.get("streams"), context, StreamSpecification.class);
Map<String, String> datasetModules = deserializeMap(jsonObj.get("datasetModules"), context, String.class);
Map<String, DatasetCreationSpec> datasetInstances = deserializeMap(jsonObj.get("datasetInstances"), context, DatasetCreationSpec.class);
Map<String, FlowSpecification> flows = deserializeMap(jsonObj.get("flows"), context, FlowSpecification.class);
Map<String, MapReduceSpecification> mapReduces = deserializeMap(jsonObj.get("mapReduces"), context, MapReduceSpecification.class);
Map<String, SparkSpecification> sparks = deserializeMap(jsonObj.get("sparks"), context, SparkSpecification.class);
Map<String, WorkflowSpecification> workflows = deserializeMap(jsonObj.get("workflows"), context, WorkflowSpecification.class);
Map<String, ServiceSpecification> services = deserializeMap(jsonObj.get("services"), context, ServiceSpecification.class);
Map<String, ScheduleSpecification> schedules = deserializeMap(jsonObj.get("schedules"), context, ScheduleSpecification.class);
Map<String, ScheduleCreationSpec> programSchedules = deserializeMap(jsonObj.get("programSchedules"), context, ScheduleCreationSpec.class);
Map<String, WorkerSpecification> workers = deserializeMap(jsonObj.get("workers"), context, WorkerSpecification.class);
Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
return new DefaultApplicationSpecification(name, appVersion, description, configuration, artifactId, streams, datasetModules, datasetInstances, flows, mapReduces, sparks, workflows, services, schedules, programSchedules, workers, plugins);
}
use of co.cask.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class ApplicationVerificationStage method verifyPrograms.
protected void verifyPrograms(ApplicationId appId, ApplicationSpecification specification) {
Iterable<ProgramSpecification> programSpecs = Iterables.concat(specification.getFlows().values(), specification.getMapReduce().values(), specification.getWorkflows().values());
VerifyResult result;
for (ProgramSpecification programSpec : programSpecs) {
result = getVerifier(programSpec.getClass()).verify(appId, programSpec);
if (!result.isSuccess()) {
throw new RuntimeException(result.getMessage());
}
}
for (Map.Entry<String, WorkflowSpecification> entry : specification.getWorkflows().entrySet()) {
verifyWorkflowSpecifications(specification, entry.getValue());
}
for (Map.Entry<String, ScheduleCreationSpec> entry : specification.getProgramSchedules().entrySet()) {
String programName = entry.getValue().getProgramName();
if (!specification.getWorkflows().containsKey(programName)) {
throw new RuntimeException(String.format("Schedule '%s' is invalid: Workflow '%s' is not configured " + "in application '%s'", entry.getValue().getName(), programName, specification.getName()));
}
// TODO StreamSizeSchedules should be resilient to stream inexistence [CDAP-1446]
Trigger trigger = entry.getValue().getTrigger();
if (trigger instanceof StreamSizeTrigger) {
StreamId streamId = ((StreamSizeTrigger) trigger).getStreamId();
if (!specification.getStreams().containsKey(streamId.getStream()) && store.getStream(streamId.getParent(), streamId.getStream()) == null) {
throw new RuntimeException(String.format("Schedule '%s' uses a Stream '%s' that does not exit", entry.getValue().getName(), streamId));
}
}
}
}
use of co.cask.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class Schedulers method toScheduleCreationSpec.
public static ScheduleCreationSpec toScheduleCreationSpec(NamespaceId deployNamespace, Schedule schedule, String programName, Map<String, String> properties) {
Trigger trigger;
if (schedule instanceof TimeSchedule) {
trigger = new TimeTrigger(((TimeSchedule) schedule).getCronEntry());
} else {
StreamSizeSchedule streamSizeSchedule = ((StreamSizeSchedule) schedule);
trigger = new StreamSizeTrigger(deployNamespace.stream(streamSizeSchedule.getStreamName()), streamSizeSchedule.getDataTriggerMB());
}
Integer maxConcurrentRuns = schedule.getRunConstraints().getMaxConcurrentRuns();
List<Constraint> constraints = maxConcurrentRuns == null ? ImmutableList.<Constraint>of() : ImmutableList.<Constraint>of(new ConcurrencyConstraint(maxConcurrentRuns));
return new ScheduleCreationSpec(schedule.getName(), schedule.getDescription(), programName, properties, trigger, constraints, Schedulers.JOB_QUEUE_TIMEOUT_MILLIS);
}
Aggregations