use of co.cask.cdap.internal.app.DefaultApplicationSpecification in project cdap by caskdata.
the class ApplicationRegistrationStage method getApplicationSpecificationWithExistingSchedules.
/**
* uses the existing app spec schedules, however if workflows are deleted in the new app spec,
* we want to remove the schedules assigned to those deleted workflow from the existing app spec schedules.
* construct and return a app spec based on this filtered schedules and programs from new app spec.
* @param input
* @return {@link ApplicationSpecification} updated spec.
*/
private ApplicationSpecification getApplicationSpecificationWithExistingSchedules(ApplicationWithPrograms input) {
Map<String, ScheduleSpecification> filteredExistingSchedules = new HashMap<>();
if (input.getExistingAppSpec() != null) {
final Set<String> deletedWorkflows = Maps.difference(input.getExistingAppSpec().getWorkflows(), input.getSpecification().getWorkflows()).entriesOnlyOnLeft().keySet();
// predicate to filter schedules if their workflow is deleted
Predicate<Map.Entry<String, ScheduleSpecification>> byScheduleProgramStatus = new Predicate<Map.Entry<String, ScheduleSpecification>>() {
@Override
public boolean apply(Map.Entry<String, ScheduleSpecification> input) {
return !deletedWorkflows.contains(input.getValue().getProgram().getProgramName());
}
};
filteredExistingSchedules = Maps.filterEntries(input.getExistingAppSpec().getSchedules(), byScheduleProgramStatus);
}
ApplicationSpecification newSpecification = input.getSpecification();
return new DefaultApplicationSpecification(newSpecification.getName(), newSpecification.getAppVersion(), newSpecification.getDescription(), newSpecification.getConfiguration(), newSpecification.getArtifactId(), newSpecification.getStreams(), newSpecification.getDatasetModules(), newSpecification.getDatasets(), newSpecification.getFlows(), newSpecification.getMapReduce(), newSpecification.getSpark(), newSpecification.getWorkflows(), newSpecification.getServices(), filteredExistingSchedules, newSpecification.getProgramSchedules(), newSpecification.getWorkers(), newSpecification.getPlugins());
}
use of co.cask.cdap.internal.app.DefaultApplicationSpecification in project cdap by caskdata.
the class DefaultAppConfigurer method createSpecification.
public ApplicationSpecification createSpecification(@Nullable String applicationName, @Nullable String applicationVersion) {
// applicationName can be null only for apps before 3.2 that were not upgraded
ArtifactScope scope = artifactId.getNamespace().equals(Id.Namespace.SYSTEM) ? ArtifactScope.SYSTEM : ArtifactScope.USER;
ArtifactId artifactId = new ArtifactId(this.artifactId.getName(), this.artifactId.getVersion(), scope);
String namespace = deployNamespace.toEntityId().getNamespace();
String appName = applicationName == null ? name : applicationName;
String appVersion = applicationVersion == null ? ApplicationId.DEFAULT_VERSION : applicationVersion;
Map<String, ScheduleCreationSpec> builtScheduleSpecs = new HashMap<>();
for (Map.Entry<String, ScheduleCreationSpec> entry : scheduleSpecs.entrySet()) {
// If the ScheduleCreationSpec is really a builder, then build the ScheduleCreationSpec
if (entry.getValue() instanceof DefaultScheduleBuilder.ScheduleCreationBuilder) {
DefaultScheduleBuilder.ScheduleCreationBuilder builder = (DefaultScheduleBuilder.ScheduleCreationBuilder) entry.getValue();
builtScheduleSpecs.put(entry.getKey(), builder.build(namespace, appName, appVersion));
} else {
builtScheduleSpecs.put(entry.getKey(), entry.getValue());
}
}
return new DefaultApplicationSpecification(appName, appVersion, description, configuration, artifactId, getStreams(), getDatasetModules(), getDatasetSpecs(), flows, mapReduces, sparks, workflows, services, builtScheduleSpecs, workers, getPlugins());
}
use of co.cask.cdap.internal.app.DefaultApplicationSpecification in project cdap by caskdata.
the class MapReduceContextConfigTest method testManyMacrosInAppSpec.
@Test
public void testManyMacrosInAppSpec() {
Configuration hConf = new Configuration();
MapReduceContextConfig cfg = new MapReduceContextConfig(hConf);
StringBuilder appCfg = new StringBuilder();
for (int i = 0; i < 100; i++) {
appCfg.append("${").append(i).append("}");
hConf.setInt(String.valueOf(i), i);
}
ApplicationSpecification appSpec = new DefaultApplicationSpecification("name", "desc", appCfg.toString(), new ArtifactId("artifact", new ArtifactVersion("1.0.0"), ArtifactScope.USER), Collections.<String, StreamSpecification>emptyMap(), Collections.<String, String>emptyMap(), Collections.<String, DatasetCreationSpec>emptyMap(), Collections.<String, FlowSpecification>emptyMap(), Collections.<String, MapReduceSpecification>emptyMap(), Collections.<String, SparkSpecification>emptyMap(), Collections.<String, WorkflowSpecification>emptyMap(), Collections.<String, ServiceSpecification>emptyMap(), Collections.<String, ScheduleCreationSpec>emptyMap(), Collections.<String, WorkerSpecification>emptyMap(), Collections.<String, Plugin>emptyMap());
cfg.setApplicationSpecification(appSpec);
Assert.assertEquals(appSpec.getConfiguration(), cfg.getApplicationSpecification().getConfiguration());
}
Aggregations