use of com.epam.pipeline.entity.pipeline.ResolvedConfiguration in project cloud-pipeline by epam.
the class ParameterMapper method resolveParameters.
/**
* Gets configuration for a list of {@link AbstractRunConfigurationEntry} resolving
* template parameters from
* @param entity to use for parameters template mapping
* @param entries to resolve
* @param projectData metadata of associated project {@link com.epam.pipeline.entity.pipeline.Folder}
* @return configuration for all input entries
*/
public ResolvedConfiguration resolveParameters(MetadataEntity entity, List<? extends AbstractRunConfigurationEntry> entries, Map<String, PipeConfValue> projectData) {
if (CollectionUtils.isEmpty(entries)) {
return new ResolvedConfiguration(entity, Collections.emptyMap());
}
if (entity == null) {
return new ResolvedConfiguration(null, entries.stream().collect(Collectors.toMap(AbstractRunConfigurationEntry::getName, this::getEntryConfiguration)));
}
Map<MetadataKey, MetadataEntity> entityReferences = loadReferences(entity);
Map<String, PipelineConfiguration> resolved = new HashMap<>();
entries.forEach(entry -> {
checkClassIdMatch(entity, entry.getRootEntityId());
PipelineConfiguration configuration = getEntryConfiguration(entry);
if (MapUtils.isNotEmpty(configuration.getParameters())) {
configuration.setParameters(mapParameters(entity, projectData, configuration.getParameters(), entityReferences));
}
resolved.put(entry.getName(), configuration);
});
return new ResolvedConfiguration(entity, resolved);
}
use of com.epam.pipeline.entity.pipeline.ResolvedConfiguration in project cloud-pipeline by epam.
the class ParameterMapper method resolveConfigurations.
public List<ResolvedConfiguration> resolveConfigurations(AnalysisConfiguration<? extends AbstractRunConfigurationEntry> configuration) {
FolderWithMetadata project = folderManager.getProject(configuration.getConfigurationId(), AclClass.CONFIGURATION);
Map<String, PipeConfValue> projectData = project == null ? new HashMap<>() : project.getData();
List<? extends AbstractRunConfigurationEntry> entries = configuration.getEntries();
if (CollectionUtils.isEmpty(configuration.getEntitiesIds())) {
return Collections.singletonList(resolveParameters(entries, projectData));
}
// In case of array references one entity may be expanded to
// list of references entities, e.g. SampleSet is expanded
// to list of Sample entities
// TODO: The only reason to store it as map - is to add association to run
// TODO: to initial entity, from which link comes. Find better solution.
Map<Long, List<MetadataEntity>> targetEntities = fetchAndExpandInputEntities(configuration);
// resolve all parameter references in configurations
Map<Long, ResolvedConfiguration> resolvedConfigurations = targetEntities.values().stream().flatMap(Collection::stream).collect(Collectors.toMap(BaseEntity::getId, entity -> resolveParameters(entity, entries, projectData)));
return targetEntities.entrySet().stream().map(idToEntities -> idToEntities.getValue().stream().map(entity -> {
ResolvedConfiguration currentConfiguration = resolvedConfigurations.get(entity.getId());
currentConfiguration.getAssociatedEntityIds().add(idToEntities.getKey());
return currentConfiguration;
}).collect(Collectors.toList())).flatMap(Collection::stream).collect(Collectors.toList());
}
use of com.epam.pipeline.entity.pipeline.ResolvedConfiguration in project cloud-pipeline by epam.
the class CloudPlatformRunner method runConfiguration.
private List<PipelineRun> runConfiguration(Long configurationId, List<RunConfigurationEntry> entries, ResolvedConfiguration resolvedConfigurations) {
SplitConfig splitConfig = new SplitConfig(entries);
RunConfigurationEntry mainEntry = splitConfig.getMain();
List<RunConfigurationEntry> childEntries = splitConfig.getChildEntries();
boolean isMasterNFSServer = pipelineConfigurationManager.hasNFSParameter(mainEntry.getConfiguration()) || childEntries.stream().noneMatch(entry -> pipelineConfigurationManager.hasNFSParameter(entry.getConfiguration()));
boolean nfsStarted = isMasterNFSServer;
PipelineConfiguration mainConfiguration = resolvedConfigurations.getConfiguration(mainEntry.getName());
List<PipelineConfiguration> childConfigurations = childEntries.stream().map(entry -> resolvedConfigurations.getConfiguration(entry.getName())).collect(Collectors.toList());
int masterNodeCount = getNodeCount(mainConfiguration.getNodeCount(), 0);
int totalNodes = childConfigurations.stream().map(PipelineConfiguration::getNodeCount).mapToInt(nodeCount -> getNodeCount(nodeCount, 1)).sum();
totalNodes += masterNodeCount;
log.debug("Running total {} nodes", totalNodes + 1);
mainConfiguration.setNodeCount(totalNodes);
// create master run
List<PipelineRun> masterRun = runConfigurationEntry(mainEntry, mainConfiguration, 1, null, isMasterNFSServer, resolvedConfigurations.getAllAssociatedIds(), configurationId);
List<PipelineRun> launched = new ArrayList<>(masterRun);
String clusterId = String.valueOf(masterRun.get(0).getId());
// create master workers
if (masterNodeCount > 0) {
mainEntry.getConfiguration().setWorkerCmd(WORKER_CMD_TEMPLATE);
launched.addAll(runConfigurationEntry(mainEntry, mainConfiguration, masterNodeCount, clusterId, false, resolvedConfigurations.getAllAssociatedIds(), configurationId));
}
// create all other workers
for (int i = 0; i < childConfigurations.size(); i++) {
PipelineConfiguration childConfig = childConfigurations.get(i);
boolean startNFS = !nfsStarted && pipelineConfigurationManager.hasNFSParameter(childConfig);
nfsStarted = nfsStarted || startNFS;
int copies = getNodeCount(childConfig.getNodeCount(), 1);
launched.addAll(runConfigurationEntry(childEntries.get(i), childConfig, copies, clusterId, startNFS, resolvedConfigurations.getAllAssociatedIds(), configurationId));
}
return launched;
}
Aggregations