use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.
the class Builder method installStage.
private Set<Feature> installStage(Profile installedProfile, Set<Feature> allBootFeatures, FeaturesProcessor processor) throws Exception {
LOGGER.info("Install stage");
//
// Handle installed profiles
//
Profile installedOverlay = Profiles.getOverlay(installedProfile, allProfiles, environment);
Profile installedEffective = Profiles.getEffective(installedOverlay, false);
Downloader downloader = manager.createDownloader();
// Load startup repositories
LOGGER.info(" Loading installed repositories");
Map<String, Features> installedRepositories = loadRepositories(manager, installedEffective.getRepositories(), true, processor);
// Compute startup feature dependencies
Set<Feature> allInstalledFeatures = new HashSet<>();
for (Features repo : installedRepositories.values()) {
allInstalledFeatures.addAll(repo.getFeature());
}
// Add boot features for search
allInstalledFeatures.addAll(allBootFeatures);
FeatureSelector selector = new FeatureSelector(allInstalledFeatures);
Set<Feature> installedFeatures = selector.getMatching(installedEffective.getFeatures());
ArtifactInstaller installer = new ArtifactInstaller(systemDirectory, downloader, blacklist);
for (Feature feature : installedFeatures) {
if (feature.isBlacklisted()) {
LOGGER.info(" Feature " + feature.getId() + " is blacklisted, ignoring");
continue;
}
LOGGER.info(" Feature {} is defined as an installed feature", feature.getId());
for (Bundle bundle : feature.getBundle()) {
if (!ignoreDependencyFlag || !bundle.isDependency()) {
installer.installArtifact(bundle);
}
}
// Install config files
for (ConfigFile configFile : feature.getConfigfile()) {
installer.installArtifact(configFile.getLocation().trim());
}
for (Conditional cond : feature.getConditional()) {
if (cond.isBlacklisted()) {
LOGGER.info(" Conditionial " + cond.getConditionId() + " is blacklisted, ignoring");
}
for (Bundle bundle : cond.getBundle()) {
if (!ignoreDependencyFlag || !bundle.isDependency()) {
installer.installArtifact(bundle);
}
}
}
}
for (String location : installedEffective.getBundles()) {
installer.installArtifact(location);
}
downloader.await();
return allInstalledFeatures;
}
use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.
the class Builder method bootStage.
private Set<Feature> bootStage(Profile bootProfile, Profile startupEffective, FeaturesProcessor processor) throws Exception {
LOGGER.info("Boot stage");
//
// Handle boot profiles
//
Profile bootOverlay = Profiles.getOverlay(bootProfile, allProfiles, environment);
Profile bootEffective = Profiles.getEffective(bootOverlay, false);
// Load startup repositories
LOGGER.info(" Loading boot repositories");
Map<String, Features> bootRepositories = loadRepositories(manager, bootEffective.getRepositories(), true, processor);
// Compute startup feature dependencies
Set<Feature> allBootFeatures = new HashSet<>();
for (Features repo : bootRepositories.values()) {
allBootFeatures.addAll(repo.getFeature());
}
// Generate a global feature
Map<String, Dependency> generatedDep = new HashMap<>();
generatedBootFeatureName = UUID.randomUUID().toString();
Feature generated = new Feature();
generated.setName(generatedBootFeatureName);
// Add feature dependencies
for (String nameOrPattern : bootEffective.getFeatures()) {
// KARAF-5273: feature may be a pattern
for (String dependency : FeatureSelector.getMatchingFeatures(nameOrPattern, bootRepositories.values())) {
Dependency dep = generatedDep.get(dependency);
if (dep == null) {
dep = createDependency(dependency);
generated.getFeature().add(dep);
generatedDep.put(dep.getName(), dep);
}
dep.setDependency(false);
}
}
// Add bundles
for (String location : bootEffective.getBundles()) {
location = location.replace("profile:", "file:etc/");
int intLevel = -100;
if (location.contains(START_LEVEL)) {
// extract start-level for this bundle
String level = location.substring(location.indexOf(START_LEVEL));
level = level.substring(START_LEVEL.length() + 1);
if (level.startsWith("\"")) {
level = level.substring(1, level.length() - 1);
}
intLevel = Integer.parseInt(level);
LOGGER.debug("bundle start-level: " + level);
location = location.substring(0, location.indexOf(START_LEVEL) - 1);
LOGGER.debug("new bundle location after strip start-level: " + location);
}
Bundle bun = new Bundle();
if (intLevel > 0) {
bun.setStartLevel(intLevel);
}
bun.setLocation(location);
generated.getBundle().add(bun);
}
Features rep = new Features();
rep.setName(UUID.randomUUID().toString());
rep.getRepository().addAll(bootEffective.getRepositories());
rep.getFeature().add(generated);
allBootFeatures.add(generated);
Downloader downloader = manager.createDownloader();
// Compute startup feature dependencies
FeatureSelector selector = new FeatureSelector(allBootFeatures);
Set<Feature> bootFeatures = selector.getMatching(singletonList(generated.getName()));
for (Feature feature : bootFeatures) {
if (feature.isBlacklisted()) {
LOGGER.info(" Feature " + feature.getId() + " is blacklisted, ignoring");
continue;
}
LOGGER.info(" Feature " + feature.getId() + " is defined as a boot feature");
// add the feature in the system folder
Set<BundleInfo> bundleInfos = new HashSet<>();
for (Bundle bundle : feature.getBundle()) {
if (!ignoreDependencyFlag || !bundle.isDependency()) {
bundleInfos.add(bundle);
}
}
for (Conditional cond : feature.getConditional()) {
if (cond.isBlacklisted()) {
LOGGER.info(" Conditionial " + cond.getConditionId() + " is blacklisted, ignoring");
}
for (Bundle bundle : cond.getBundle()) {
if (!ignoreDependencyFlag || !bundle.isDependency()) {
bundleInfos.add(bundle);
}
}
}
// Build optional features and known prerequisites
Map<String, List<String>> prereqs = new HashMap<>();
prereqs.put("blueprint:", Arrays.asList("deployer", "aries-blueprint"));
prereqs.put("spring:", Arrays.asList("deployer", "spring"));
prereqs.put("wrap:", Collections.singletonList("wrap"));
prereqs.put("war:", Collections.singletonList("war"));
ArtifactInstaller installer = new ArtifactInstaller(systemDirectory, downloader, blacklist);
for (BundleInfo bundleInfo : bundleInfos) {
installer.installArtifact(bundleInfo);
for (Map.Entry<String, List<String>> entry : prereqs.entrySet()) {
if (bundleInfo.getLocation().trim().startsWith(entry.getKey())) {
for (String prereq : entry.getValue()) {
Dependency dep = generatedDep.get(prereq);
if (dep == null) {
dep = new Dependency();
dep.setName(prereq);
generated.getFeature().add(dep);
generatedDep.put(dep.getName(), dep);
}
dep.setPrerequisite(true);
}
}
}
}
new ConfigInstaller(etcDirectory, pidsToExtract).installConfigs(feature, downloader, installer);
// Install libraries
List<String> libraries = new ArrayList<>();
for (Library library : feature.getLibraries()) {
String lib = library.getLocation() + ";type:=" + library.getType() + ";export:=" + library.isExport() + ";delegate:=" + library.isDelegate();
libraries.add(lib);
}
Path configPropertiesPath = etcDirectory.resolve("config.properties");
Properties configProperties = new Properties(configPropertiesPath.toFile());
downloadLibraries(downloader, configProperties, libraries, " ");
downloader.await();
// Reformat clauses
reformatClauses(configProperties, Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA);
reformatClauses(configProperties, Constants.FRAMEWORK_BOOTDELEGATION);
configProperties.save();
}
// If there are bundles to install, we can't use the boot features only
// so keep the generated feature
Path featuresCfgFile = etcDirectory.resolve("org.apache.karaf.features.cfg");
if (!generated.getBundle().isEmpty()) {
File output = etcDirectory.resolve(rep.getName() + ".xml").toFile();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JaxbUtil.marshal(rep, baos);
ByteArrayInputStream bais;
String repoUrl;
if (karafVersion == KarafVersion.v24) {
String str = baos.toString();
str = str.replace("http://karaf.apache.org/xmlns/features/v1.3.0", "http://karaf.apache.org/xmlns/features/v1.2.0");
str = str.replaceAll(" dependency=\".*?\"", "");
str = str.replaceAll(" prerequisite=\".*?\"", "");
for (Feature f : rep.getFeature()) {
for (Dependency d : f.getFeature()) {
if (d.isPrerequisite()) {
if (!startupEffective.getFeatures().contains(d.getName())) {
LOGGER.warn("Feature " + d.getName() + " is a prerequisite and should be installed as a startup feature.");
}
}
}
}
bais = new ByteArrayInputStream(str.getBytes());
repoUrl = "file:etc/" + output.getName();
} else {
bais = new ByteArrayInputStream(baos.toByteArray());
repoUrl = "file:${karaf.etc}/" + output.getName();
}
Files.copy(bais, output.toPath());
Properties featuresProperties = new Properties(featuresCfgFile.toFile());
featuresProperties.put(FEATURES_REPOSITORIES, repoUrl);
featuresProperties.put(FEATURES_BOOT, generated.getName());
featuresProperties.save();
} else {
String repos = getRepos(rep);
String boot = getBootFeatures(generatedDep);
Properties featuresProperties = new Properties(featuresCfgFile.toFile());
featuresProperties.put(FEATURES_REPOSITORIES, repos);
featuresProperties.put(FEATURES_BOOT, boot);
reformatClauses(featuresProperties, FEATURES_REPOSITORIES);
reformatClauses(featuresProperties, FEATURES_BOOT);
featuresProperties.save();
}
downloader.await();
return allBootFeatures;
}
use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.
the class AssemblyDeployCallback method installConfigs.
@Override
public void installConfigs(org.apache.karaf.features.Feature feature) throws IOException {
assertNotBlacklisted(feature);
// Install
Downloader downloader = manager.createDownloader();
for (Config config : ((Feature) feature).getConfig()) {
Path configFile = etcDirectory.resolve(config.getName() + ".cfg");
if (Files.exists(configFile) && !config.isAppend()) {
LOGGER.info(" not changing existing config file: {}", homeDirectory.relativize(configFile));
continue;
}
if (config.isExternal()) {
downloader.download(config.getValue().trim(), provider -> {
Path input = provider.getFile().toPath();
byte[] data = Files.readAllBytes(input);
if (config.isAppend()) {
LOGGER.info(" appending to config file: {}", homeDirectory.relativize(configFile));
Files.write(configFile, data, StandardOpenOption.APPEND);
} else {
LOGGER.info(" adding config file: {}", homeDirectory.relativize(configFile));
Files.write(configFile, data);
}
});
} else {
byte[] data = config.getValue().getBytes();
if (config.isAppend()) {
LOGGER.info(" appending to config file: {}", homeDirectory.relativize(configFile));
Files.write(configFile, data, StandardOpenOption.APPEND);
} else {
LOGGER.info(" adding config file: {}", homeDirectory.relativize(configFile));
Files.write(configFile, data);
}
}
}
for (final ConfigFile configFile : ((Feature) feature).getConfigfile()) {
String path = configFile.getFinalname();
if (path.startsWith("/")) {
path = path.substring(1);
}
path = substFinalName(path);
final Path output = homeDirectory.resolve(path);
final String finalPath = path;
if (configFile.isOverride() || !Files.exists(output)) {
downloader.download(configFile.getLocation(), provider -> {
Path input = provider.getFile().toPath();
if (configFile.isOverride()) {
LOGGER.info(" overwriting config file: {}", finalPath);
} else {
LOGGER.info(" adding config file: {}", finalPath);
}
Files.copy(input, output, StandardCopyOption.REPLACE_EXISTING);
});
}
}
}
use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.
the class AssemblyDeployCallback method installLibraries.
@Override
public void installLibraries(org.apache.karaf.features.Feature feature) throws IOException {
assertNotBlacklisted(feature);
Downloader downloader = manager.createDownloader();
List<String> libraries = new ArrayList<>();
for (Library library : ((Feature) feature).getLibraries()) {
String lib = library.getLocation() + ";type:=" + library.getType() + ";export:=" + library.isExport() + ";delegate:=" + library.isDelegate();
libraries.add(lib);
}
if (!libraries.isEmpty()) {
Path configPropertiesPath = etcDirectory.resolve("config.properties");
Properties configProperties = new Properties(configPropertiesPath.toFile());
builder.downloadLibraries(downloader, configProperties, libraries, " ");
}
try {
downloader.await();
} catch (Exception e) {
throw new IOException("Error downloading configuration files", e);
}
}
use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.
the class Subsystem method downloadBundles.
/**
* Downloads bundles for all the features in current and child subsystems. But also collects bundles
* as {@link DependencyInfo}.
*
* @param manager The {@link DownloadManager} to use.
* @param featureResolutionRange The feature resolution range to use.
* @param serviceRequirements The {@link FeaturesService.ServiceRequirementsBehavior} behavior to use.
* @param repos The {@link RepositoryManager} to use.
* @param callback The {@link SubsystemResolverCallback} to use.
*/
@SuppressWarnings("InfiniteLoopStatement")
public void downloadBundles(DownloadManager manager, String featureResolutionRange, final FeaturesService.ServiceRequirementsBehavior serviceRequirements, RepositoryManager repos, SubsystemResolverCallback callback) throws Exception {
for (Subsystem child : children) {
child.downloadBundles(manager, featureResolutionRange, serviceRequirements, repos, callback);
}
// collect BundleInfos for given feature - both direct <feature>/<bundle>s and <feature>/<conditional>/<bundle>s
final Map<BundleInfo, Conditional> infos = new HashMap<>();
final Downloader downloader = manager.createDownloader();
if (feature != null) {
for (Conditional cond : feature.getConditional()) {
if (!cond.isBlacklisted()) {
for (final BundleInfo bi : cond.getBundles()) {
// bundles from conditional features will be added as non-mandatory requirements
infos.put(bi, cond);
}
}
}
for (BundleInfo bi : feature.getBundles()) {
infos.put(bi, null);
}
}
// infos.keySet().removeIf(Blacklisting::isBlacklisted);
for (Iterator<BundleInfo> iterator = infos.keySet().iterator(); iterator.hasNext(); ) {
BundleInfo bi = iterator.next();
if (bi.isBlacklisted()) {
iterator.remove();
if (callback != null) {
callback.bundleBlacklisted(bi);
}
}
}
// all downloaded bundles
final Map<String, ResourceImpl> bundles = new ConcurrentHashMap<>();
// resources for locations that were overriden in OSGi mode - to check whether the override should actually
// take place, by checking resource's headers
final Map<String, ResourceImpl> overrides = new ConcurrentHashMap<>();
boolean removeServiceRequirements = serviceRequirementsBehavior(feature, serviceRequirements);
// download collected BundleInfo locations
for (Map.Entry<BundleInfo, Conditional> entry : infos.entrySet()) {
final BundleInfo bi = entry.getKey();
final String loc = bi.getLocation();
downloader.download(loc, provider -> {
// always download location (could be overriden)
ResourceImpl resource = createResource(loc, getMetadata(provider), removeServiceRequirements);
bundles.put(loc, resource);
if (bi.isOverriden() == BundleInfo.BundleOverrideMode.OSGI) {
// also download original from original bundle URI to check if we should override by comparing
// symbolic name - requires MANIFEST.MF header access. If there should be no override, we'll get
// back to original URI
downloader.download(bi.getOriginalLocation(), provider2 -> {
ResourceImpl originalResource = createResource(bi.getOriginalLocation(), getMetadata(provider2), removeServiceRequirements);
bundles.put(bi.getOriginalLocation(), originalResource);
// an entry in overrides map means that given location was overriden
overrides.put(loc, originalResource);
});
}
});
}
// download direct bundle: requirements - without consulting overrides
for (Clause bundle : Parser.parseClauses(this.bundles.toArray(new String[this.bundles.size()]))) {
final String loc = bundle.getName();
downloader.download(loc, provider -> bundles.put(loc, createResource(loc, getMetadata(provider), removeServiceRequirements)));
}
// resolution process
if (feature != null) {
for (Library library : feature.getLibraries()) {
if (library.isExport()) {
final String loc = library.getLocation();
downloader.download(loc, provider -> bundles.put(loc, createResource(loc, getMetadata(provider), removeServiceRequirements)));
}
}
}
downloader.await();
// opposite to what we had before. Currently bundles are already overriden at model level, but
// as we finally have access to headers, we can compare symbolic names and if override mode is OSGi, then
// we can restore original resource if there should be no override.
Overrides.override(bundles, overrides);
if (feature != null) {
// Add conditionals
Map<Conditional, Resource> resConds = new HashMap<>();
for (Conditional cond : feature.getConditional()) {
if (cond.isBlacklisted()) {
continue;
}
FeatureResource resCond = FeatureResource.build(feature, cond, featureResolutionRange, bundles);
// feature's subsystem will optionally require conditional feature resource
addIdentityRequirement(this, resCond, false);
// but it's a mandatory requirement in other way
addIdentityRequirement(resCond, this, true);
installable.add(resCond);
resConds.put(cond, resCond);
}
// Add features and make it require given subsystem that represents logical feature requirement
FeatureResource resFeature = FeatureResource.build(feature, featureResolutionRange, bundles);
addIdentityRequirement(resFeature, this);
installable.add(resFeature);
// Add dependencies
for (Map.Entry<BundleInfo, Conditional> entry : infos.entrySet()) {
final BundleInfo bi = entry.getKey();
final String loc = bi.getLocation();
final Conditional cond = entry.getValue();
ResourceImpl res = bundles.get(loc);
int sl = bi.getStartLevel() <= 0 ? feature.getStartLevel() : bi.getStartLevel();
if (cond != null) {
// bundle of conditional feature will have mandatory requirement on it
addIdentityRequirement(res, resConds.get(cond), true);
}
boolean mandatory = !bi.isDependency() && cond == null;
if (bi.isDependency()) {
addDependency(res, mandatory, bi.isStart(), sl, bi.isBlacklisted());
} else {
doAddDependency(res, mandatory, bi.isStart(), sl, bi.isBlacklisted());
}
}
for (Library library : feature.getLibraries()) {
if (library.isExport()) {
final String loc = library.getLocation();
ResourceImpl res = bundles.get(loc);
addDependency(res, false, false, 0, false);
}
}
for (String uri : feature.getResourceRepositories()) {
BaseRepository repo = repos.getRepository(feature.getRepositoryUrl(), uri);
for (Resource resource : repo.getResources()) {
ResourceImpl res = cloneResource(resource);
addDependency(res, false, true, 0, false);
}
}
}
for (Clause bundle : Parser.parseClauses(this.bundles.toArray(new String[this.bundles.size()]))) {
final String loc = bundle.getName();
boolean dependency = Boolean.parseBoolean(bundle.getAttribute("dependency"));
boolean start = bundle.getAttribute("start") == null || Boolean.parseBoolean(bundle.getAttribute("start"));
boolean blacklisted = bundle.getAttribute("blacklisted") != null && Boolean.parseBoolean(bundle.getAttribute("blacklisted"));
int startLevel = 0;
try {
startLevel = Integer.parseInt(bundle.getAttribute("start-level"));
} catch (NumberFormatException e) {
// Ignore
}
if (dependency) {
addDependency(bundles.get(loc), false, start, startLevel, blacklisted);
} else {
doAddDependency(bundles.get(loc), true, start, startLevel, blacklisted);
// non dependency bundle will be added as osgi.identity req on type=osgi.bundle
addIdentityRequirement(this, bundles.get(loc));
}
}
// Compute dependencies
for (DependencyInfo info : dependencies.values()) {
installable.add(info.resource);
// bundle resource will have a requirement on its feature's subsystem too
// when bundle is declared with dependency="true", it will have a requirement on its region's subsystem
addIdentityRequirement(info.resource, this, info.mandatory);
}
}
Aggregations