use of io.cdap.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class DefaultAppConfigurer method createSpecification.
public ApplicationSpecification createSpecification(@Nullable String applicationName, @Nullable String applicationVersion) {
// applicationName can be null only for apps before 3.2 that were not upgraded
ArtifactScope scope = artifactId.getNamespace().equals(Id.Namespace.SYSTEM) ? ArtifactScope.SYSTEM : ArtifactScope.USER;
ArtifactId artifactId = new ArtifactId(this.artifactId.getName(), this.artifactId.getVersion(), scope);
String namespace = deployNamespace.toEntityId().getNamespace();
String appName = applicationName == null ? name : applicationName;
String appVersion = applicationVersion == null ? ApplicationId.DEFAULT_VERSION : applicationVersion;
Map<String, ScheduleCreationSpec> builtScheduleSpecs = new HashMap<>();
for (Map.Entry<String, ScheduleCreationSpec> entry : scheduleSpecs.entrySet()) {
// If the ScheduleCreationSpec is really a builder, then build the ScheduleCreationSpec
if (entry.getValue() instanceof DefaultScheduleBuilder.ScheduleCreationBuilder) {
DefaultScheduleBuilder.ScheduleCreationBuilder builder = (DefaultScheduleBuilder.ScheduleCreationBuilder) entry.getValue();
builtScheduleSpecs.put(entry.getKey(), builder.build(namespace, appName, appVersion));
} else {
builtScheduleSpecs.put(entry.getKey(), entry.getValue());
}
}
return new DefaultApplicationSpecification(appName, appVersion, ProjectInfo.getVersion().toString(), description, configuration, artifactId, getDatasetModules(), getDatasetSpecs(), mapReduces, sparks, workflows, services, builtScheduleSpecs, workers, getPlugins());
}
use of io.cdap.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class ApplicationVerificationStage method verifyPrograms.
protected void verifyPrograms(ApplicationId appId, ApplicationSpecification specification) {
Iterable<ProgramSpecification> programSpecs = Iterables.concat(specification.getMapReduce().values(), specification.getWorkflows().values());
VerifyResult result;
for (ProgramSpecification programSpec : programSpecs) {
Verifier<ProgramSpecification> verifier = getVerifier(programSpec.getClass());
result = verifier.verify(appId, programSpec);
if (!result.isSuccess()) {
throw new RuntimeException(result.getMessage());
}
}
for (Map.Entry<String, WorkflowSpecification> entry : specification.getWorkflows().entrySet()) {
verifyWorkflowSpecifications(specification, entry.getValue());
}
for (Map.Entry<String, ScheduleCreationSpec> entry : specification.getProgramSchedules().entrySet()) {
String programName = entry.getValue().getProgramName();
if (!specification.getWorkflows().containsKey(programName)) {
throw new RuntimeException(String.format("Schedule '%s' is invalid: Workflow '%s' is not configured " + "in application '%s'", entry.getValue().getName(), programName, specification.getName()));
}
}
}
use of io.cdap.cdap.internal.schedule.ScheduleCreationSpec in project cdap by caskdata.
the class ApplicationSpecificationCodec method deserialize.
@Override
public ApplicationSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String name = jsonObj.get("name").getAsString();
String appVersion = ApplicationId.DEFAULT_VERSION;
if (jsonObj.has("appVersion")) {
appVersion = jsonObj.get("appVersion").getAsString();
}
String appCDAPVersion = jsonObj.has("appCDAPVersion") ? jsonObj.get("appCDAPVersion").getAsString() : null;
String description = jsonObj.get("description").getAsString();
String configuration = null;
if (jsonObj.has("configuration")) {
configuration = jsonObj.get("configuration").getAsString();
}
ArtifactId artifactId = context.deserialize(jsonObj.get("artifactId"), ArtifactId.class);
Map<String, String> datasetModules = deserializeMap(jsonObj.get("datasetModules"), context, String.class);
Map<String, DatasetCreationSpec> datasetInstances = deserializeMap(jsonObj.get("datasetInstances"), context, DatasetCreationSpec.class);
Map<String, MapReduceSpecification> mapReduces = deserializeMap(jsonObj.get("mapReduces"), context, MapReduceSpecification.class);
Map<String, SparkSpecification> sparks = deserializeMap(jsonObj.get("sparks"), context, SparkSpecification.class);
Map<String, WorkflowSpecification> workflows = deserializeMap(jsonObj.get("workflows"), context, WorkflowSpecification.class);
Map<String, ServiceSpecification> services = deserializeMap(jsonObj.get("services"), context, ServiceSpecification.class);
Map<String, ScheduleCreationSpec> programSchedules = deserializeMap(jsonObj.get("programSchedules"), context, ScheduleCreationSpec.class);
Map<String, WorkerSpecification> workers = deserializeMap(jsonObj.get("workers"), context, WorkerSpecification.class);
Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
return new DefaultApplicationSpecification(name, appVersion, appCDAPVersion, description, configuration, artifactId, datasetModules, datasetInstances, mapReduces, sparks, workflows, services, programSchedules, workers, plugins);
}
Aggregations