use of com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails in project cloudbreak by hortonworks.
the class StackToBlueprintPreparationObjectConverter method convert.
@Override
public BlueprintPreparationObject convert(Stack source) {
try {
Optional<SmartSenseSubscription> aDefault = smartSenseSubscriptionService.getDefault();
Cluster cluster = clusterService.getById(source.getCluster().getId());
FileSystem fileSystem = cluster.getFileSystem();
LdapConfig ldapConfig = cluster.getLdapConfig();
StackRepoDetails hdpRepo = clusterComponentConfigProvider.getHDPRepo(cluster.getId());
String stackRepoDetailsHdpVersion = hdpRepo != null ? hdpRepo.getHdpVersion() : null;
Map<String, List<InstanceMetaData>> groupInstances = instanceGroupMetadataCollector.collectMetadata(source);
HdfConfigs hdfConfigs = hdfConfigProvider.createHdfConfig(cluster.getHostGroups(), groupInstances, cluster.getBlueprint().getBlueprintText());
BlueprintStackInfo blueprintStackInfo = stackInfoService.blueprintStackInfo(cluster.getBlueprint().getBlueprintText());
FileSystemConfigurationView fileSystemConfigurationView = null;
if (source.getCluster().getFileSystem() != null) {
fileSystemConfigurationView = new FileSystemConfigurationView(fileSystemConfigurationProvider.fileSystemConfiguration(fileSystem, source), fileSystem == null ? false : fileSystem.isDefaultFs());
}
IdentityUser identityUser = userDetailsService.getDetails(cluster.getOwner(), UserFilterField.USERID);
return BlueprintPreparationObject.Builder.builder().withFlexSubscription(source.getFlexSubscription()).withRdsConfigs(postgresConfigService.createRdsConfigIfNeeded(source, cluster)).withHostgroups(hostGroupService.getByCluster(cluster.getId())).withGateway(cluster.getGateway()).withBlueprintView(new BlueprintView(cluster, blueprintStackInfo)).withStackRepoDetailsHdpVersion(stackRepoDetailsHdpVersion).withFileSystemConfigurationView(fileSystemConfigurationView).withGeneralClusterConfigs(generalClusterConfigsProvider.generalClusterConfigs(source, cluster, identityUser)).withSmartSenseSubscriptionId(aDefault.isPresent() ? aDefault.get().getSubscriptionId() : null).withLdapConfig(ldapConfig).withHdfConfigs(hdfConfigs).withKerberosConfig(cluster.isSecure() ? cluster.getKerberosConfig() : null).build();
} catch (BlueprintProcessingException e) {
throw new CloudbreakServiceException(e.getMessage(), e);
} catch (IOException e) {
throw new CloudbreakServiceException(e.getMessage(), e);
}
}
use of com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails in project cloudbreak by hortonworks.
the class StackToStackResponseConverter method convertComponentConfig.
private void convertComponentConfig(StackResponse stackJson, Stack source) {
try {
if (source.getCluster() != null) {
StackRepoDetails stackRepoDetails = clusterComponentConfigProvider.getHDPRepo(source.getCluster().getId());
if (stackRepoDetails != null && stackRepoDetails.getStack() != null) {
String repositoryVersion = stackRepoDetails.getStack().get(StackRepoDetails.REPOSITORY_VERSION);
if (!StringUtils.isEmpty(repositoryVersion)) {
stackJson.setHdpVersion(repositoryVersion);
} else {
stackJson.setHdpVersion(stackRepoDetails.getHdpVersion());
}
}
AmbariRepo ambariRepo = clusterComponentConfigProvider.getAmbariRepo(source.getCluster().getId());
if (ambariRepo != null) {
stackJson.setAmbariVersion(ambariRepo.getVersion());
}
}
CloudbreakDetails cloudbreakDetails = componentConfigProvider.getCloudbreakDetails(source.getId());
if (cloudbreakDetails != null) {
stackJson.setCloudbreakDetails(getConversionService().convert(cloudbreakDetails, CloudbreakDetailsJson.class));
}
} catch (RuntimeException e) {
LOGGER.error("Failed to convert dynamic component.", e);
}
}
use of com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails in project cloudbreak by hortonworks.
the class ImageService method createHDPRepo.
private StackRepoDetails createHDPRepo(StackDetails hdpStack) {
StackRepoDetails repo = new StackRepoDetails();
repo.setHdpVersion(hdpStack.getVersion());
repo.setStack(hdpStack.getRepo().getStack());
repo.setUtil(hdpStack.getRepo().getUtil());
return repo;
}
use of com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails in project cloudbreak by hortonworks.
the class ImageService method getComponents.
private List<Component> getComponents(Stack stack, Map<InstanceGroupType, String> userData, com.sequenceiq.cloudbreak.cloud.model.catalog.Image imgFromCatalog, String imageName, String imageCatalogUrl, String imageCatalogName, String imageId) throws JsonProcessingException, CloudbreakImageCatalogException {
List<Component> components = new ArrayList<>();
Image image = new Image(imageName, userData, imgFromCatalog.getOsType(), imageCatalogUrl, imageCatalogName, imageId);
Component imageComponent = new Component(ComponentType.IMAGE, ComponentType.IMAGE.name(), new Json(image), stack);
components.add(imageComponent);
if (imgFromCatalog.getStackDetails() != null) {
components.add(getAmbariComponent(stack, imgFromCatalog));
StackDetails stackDetails = imgFromCatalog.getStackDetails();
Component stackRepoComponent;
if (!imgFromCatalog.getStackDetails().getRepo().getKnox().isEmpty()) {
StackRepoDetails hdfRepo = createHDFRepo(stackDetails);
stackRepoComponent = new Component(ComponentType.HDF_REPO_DETAILS, ComponentType.HDF_REPO_DETAILS.name(), new Json(hdfRepo), stack);
} else {
StackRepoDetails repo = createHDPRepo(stackDetails);
stackRepoComponent = new Component(ComponentType.HDP_REPO_DETAILS, ComponentType.HDP_REPO_DETAILS.name(), new Json(repo), stack);
}
components.add(stackRepoComponent);
}
return components;
}
use of com.sequenceiq.cloudbreak.cloud.model.component.StackRepoDetails in project cloudbreak by hortonworks.
the class ClusterCommonService method recreateCluster.
private void recreateCluster(Long stackId, UpdateClusterJson updateJson) {
IdentityUser user = authenticatedUserService.getCbUser();
Set<HostGroup> hostGroups = new HashSet<>();
for (HostGroupRequest json : updateJson.getHostgroups()) {
HostGroup hostGroup = conversionService.convert(json, HostGroup.class);
hostGroup = hostGroupDecorator.decorate(hostGroup, json, user, stackId, false, false);
hostGroups.add(hostGroup);
}
AmbariStackDetailsJson stackDetails = updateJson.getAmbariStackDetails();
StackRepoDetails stackRepoDetails = null;
if (stackDetails != null) {
stackRepoDetails = conversionService.convert(stackDetails, StackRepoDetails.class);
}
clusterService.recreate(stackId, updateJson.getBlueprintId(), hostGroups, updateJson.getValidateBlueprint(), stackRepoDetails, updateJson.getKerberosPassword(), updateJson.getKerberosPrincipal());
}
Aggregations