use of org.apache.karaf.features.internal.model.Features in project karaf by apache.
the class FeaturesServiceImpl method getFeatureXml.
@Override
public String getFeatureXml(Feature feature) {
try {
StringWriter sw = new StringWriter();
Features r = new Features();
r.getFeature().add((org.apache.karaf.features.internal.model.Feature) feature);
JaxbUtil.marshal(r, sw);
String[] strs = sw.toString().split("\n");
StringJoiner joiner = new StringJoiner("\n");
for (int i = 2; i < strs.length - 1; i++) {
joiner.add(strs[i]);
}
return joiner.toString();
} catch (JAXBException e) {
return null;
}
}
use of org.apache.karaf.features.internal.model.Features in project karaf by apache.
the class FeaturesServiceImpl method installFeatures.
@Override
public void installFeatures(Set<String> featuresIn, String region, EnumSet<Option> options) throws Exception {
Set<FeatureReq> toInstall = map(featuresIn, FeatureReq::parseNameAndRange);
State state = copyState();
Map<String, Set<String>> requires = copy(state.requirements);
if (region == null || region.isEmpty()) {
region = ROOT_REGION;
}
Set<String> requirements = requires.computeIfAbsent(region, k -> new HashSet<>());
Set<FeatureReq> existingFeatures = map(requirements, FeatureReq::parseRequirement);
Set<FeatureReq> toAdd = computeFeaturesToAdd(options, toInstall);
toAdd.forEach(f -> {
if (f.isBlacklisted()) {
print("Skipping blacklisted feature: " + f, options.contains(Option.Verbose));
} else {
requirements.add(f.toRequirement());
}
});
List<FeatureReq> notBlacklisted = toAdd.stream().filter(fr -> !fr.isBlacklisted()).collect(Collectors.toList());
if (notBlacklisted.size() > 0) {
print("Adding features: " + join(notBlacklisted), options.contains(Option.Verbose));
}
if (options.contains(Option.Upgrade)) {
Set<FeatureReq> toRemove = computeFeaturesToRemoveOnUpdate(toAdd, existingFeatures);
toRemove.forEach(f -> requirements.remove(f.toRequirement()));
if (!toRemove.isEmpty()) {
print("Removing features: " + join(toRemove), options.contains(Option.Verbose));
}
}
doProvisionInThread(requires, emptyMap(), state, getFeaturesById(), options);
}
use of org.apache.karaf.features.internal.model.Features in project karaf by apache.
the class Builder method loadRepositories.
private Map<String, Features> loadRepositories(DownloadManager manager, Collection<String> repositories, final boolean install, FeaturesProcessor processor) throws Exception {
final Map<String, Features> loaded = new HashMap<>();
final Downloader downloader = manager.createDownloader();
for (String repository : repositories) {
downloader.download(repository, new DownloadCallback() {
@Override
public void downloaded(final StreamProvider provider) throws Exception {
String url = provider.getUrl();
if (processor.isRepositoryBlacklisted(url)) {
LOGGER.info(" feature repository " + url + " is blacklisted");
return;
}
synchronized (loaded) {
if (!loaded.containsKey(provider.getUrl())) {
if (install) {
synchronized (provider) {
Path path = ArtifactInstaller.pathFromProviderUrl(systemDirectory, url);
Files.createDirectories(path.getParent());
LOGGER.info(" adding feature repository: " + url);
Files.copy(provider.getFile().toPath(), path, StandardCopyOption.REPLACE_EXISTING);
}
}
try (InputStream is = provider.open()) {
Features featuresModel = JaxbUtil.unmarshal(url, is, false);
// always process according to processor configuration
featuresModel.setBlacklisted(processor.isRepositoryBlacklisted(url));
processor.process(featuresModel);
loaded.put(provider.getUrl(), featuresModel);
for (String innerRepository : featuresModel.getRepository()) {
downloader.download(innerRepository, this);
}
}
}
}
}
});
}
downloader.await();
return loaded;
}
use of org.apache.karaf.features.internal.model.Features in project karaf by apache.
the class Builder method doGenerateAssembly.
private void doGenerateAssembly() throws Exception {
LOGGER.info("Generating Karaf assembly: " + homeDirectory);
//
// Create download manager - combination of pax-url-aether and a resolver wrapper that may
// alter the way pax-url-aether resolver works
//
MavenResolver resolver = createMavenResolver();
manager = new CustomDownloadManager(resolver, executor, null, translatedUrls);
this.resolver = new ResolverImpl(new Slf4jResolverLog(LOGGER));
//
// Unzip KARs
//
LOGGER.info("Unzipping kars");
Downloader downloader = manager.createDownloader();
for (String kar : kars.keySet()) {
downloader.download(kar, null);
}
downloader.await();
// stage as the KAR and with the same "add all" flag as the KAR itself
for (String karUri : kars.keySet()) {
LOGGER.info(" processing KAR: " + karUri);
Kar kar = new Kar(manager.getProviders().get(karUri).getFile().toURI());
kar.extract(systemDirectory.toFile(), homeDirectory.toFile());
RepositoryInfo info = kars.get(karUri);
for (URI repositoryUri : kar.getFeatureRepos()) {
LOGGER.info(" found repository: " + repositoryUri);
repositories.put(repositoryUri.toString(), info);
}
}
//
// Load profiles
//
LOGGER.info("Loading profiles from:");
profilesUris.forEach(p -> LOGGER.info(" " + p));
allProfiles = loadExternalProfiles(profilesUris);
if (allProfiles.size() > 0) {
StringBuilder sb = new StringBuilder();
LOGGER.info(" Found profiles: " + allProfiles.keySet().stream().collect(Collectors.joining(", ")));
}
// Generate initial profile to collect overrides and blacklisting instructions
Profile initialProfile = ProfileBuilder.Factory.create("initial").setParents(new ArrayList<>(profiles.keySet())).getProfile();
Profile initialOverlay = Profiles.getOverlay(initialProfile, allProfiles, environment);
Profile initialEffective = Profiles.getEffective(initialOverlay, false);
//
// Handle blacklist - we'll use SINGLE instance of Blacklist for all further downloads
//
blacklist = processBlacklist(initialEffective);
//
// Configure blacklisting and overriding features processor
//
boolean needFeaturesProcessorFileCopy = false;
String existingProcessorDefinitionURI = null;
Path existingProcessorDefinition = etcDirectory.resolve("org.apache.karaf.features.xml");
if (existingProcessorDefinition.toFile().isFile()) {
existingProcessorDefinitionURI = existingProcessorDefinition.toFile().toURI().toString();
LOGGER.info("Found existing features processor configuration: {}", homeDirectory.relativize(existingProcessorDefinition));
}
if (featuresProcessingLocation != null && featuresProcessingLocation.toFile().isFile() && !featuresProcessingLocation.equals(existingProcessorDefinition)) {
if (existingProcessorDefinitionURI != null) {
LOGGER.warn("Explicitly configured {} will be used for features processor configuration.", homeDirectory.relativize(featuresProcessingLocation));
} else {
LOGGER.info("Found features processor configuration: {}", homeDirectory.relativize(featuresProcessingLocation));
}
existingProcessorDefinitionURI = featuresProcessingLocation.toFile().toURI().toString();
// when there are no other (configured via Maven for example) processing instructions (e.g., blacklisting)
// we don't have to generate this file and may take original content
needFeaturesProcessorFileCopy = true;
}
// now we can configure blacklisting features processor which may have already defined (in XML)
// configuration for bundle replacements or feature overrides.
FeaturesProcessorImpl processor = new FeaturesProcessorImpl(existingProcessorDefinitionURI, null, blacklist, new HashSet<>());
// add overrides from initialProfile
Set<String> overrides = processOverrides(initialEffective.getOverrides());
processor.addOverrides(overrides);
//
// Propagate feature installation from repositories
//
LOGGER.info("Loading repositories");
Map<String, Features> karRepositories = loadRepositories(manager, repositories.keySet(), false, processor);
for (String repo : repositories.keySet()) {
RepositoryInfo info = repositories.get(repo);
if (info.addAll) {
LOGGER.info(" adding all non-blacklisted features from repository: " + repo + " (stage: " + info.stage + ")");
for (Feature feature : karRepositories.get(repo).getFeature()) {
if (feature.isBlacklisted()) {
LOGGER.info(" feature {}/{} is blacklisted - skipping.", feature.getId(), feature.getVersion());
} else {
features.put(feature.getId(), info.stage);
}
}
}
}
if (generateConsistencyReport != null) {
File directory = new File(generateConsistencyReport);
if (directory.isDirectory()) {
LOGGER.info("Writing bundle report");
generateConsistencyReport(karRepositories, new File(directory, "bundle-report-full.xml"), true);
generateConsistencyReport(karRepositories, new File(directory, "bundle-report.xml"), false);
Files.copy(getClass().getResourceAsStream("/bundle-report.xslt"), directory.toPath().resolve("bundle-report.xslt"), StandardCopyOption.REPLACE_EXISTING);
}
}
//
// Generate profiles. If user has configured additional profiles, they'll be used as parents
// of the generated ones.
//
Profile startupProfile = generateProfile(Stage.Startup, profiles, repositories, features, bundles);
allProfiles.put(startupProfile.getId(), startupProfile);
// generated startup profile should be used (together with configured startup and boot profiles) as parent
// of the generated boot profile - similar visibility rule (boot stage requires startup stage) is applied
// for repositories and features
profiles.put(startupProfile.getId(), Stage.Boot);
Profile bootProfile = generateProfile(Stage.Boot, profiles, repositories, features, bundles);
allProfiles.put(bootProfile.getId(), bootProfile);
Profile installedProfile = generateProfile(Stage.Installed, profiles, repositories, features, bundles);
allProfiles.put(installedProfile.getId(), installedProfile);
//
// Compute "overlay" profile - a single profile with all parent profiles included (when there's the same
// file in both profiles, parent profile's version has lower priority)
//
ProfileBuilder builder = ProfileBuilder.Factory.create(UUID.randomUUID().toString()).setParents(Arrays.asList(startupProfile.getId(), bootProfile.getId(), installedProfile.getId()));
config.forEach((k, v) -> builder.addConfiguration(Profile.INTERNAL_PID, Profile.CONFIG_PREFIX + k, v));
system.forEach((k, v) -> builder.addConfiguration(Profile.INTERNAL_PID, Profile.SYSTEM_PREFIX + k, v));
// profile with all the parents configured and stage-agnostic blacklisting configuration added
blacklistedRepositoryURIs.forEach(builder::addBlacklistedRepository);
blacklistedFeatureIdentifiers.forEach(builder::addBlacklistedFeature);
blacklistedBundleURIs.forEach(builder::addBlacklistedBundle);
// final profilep
Profile overallProfile = builder.getProfile();
// profile with parents included and "flattened" using inheritance rules (child files overwrite parent
// files and child PIDs are merged with parent PIDs and same properties are taken from child profiles)
Profile overallOverlay = Profiles.getOverlay(overallProfile, allProfiles, environment);
// profile with property placeholders resolved or left unchanged (if there's no property value available,
// so property placeholders are preserved - like ${karaf.base})
Profile overallEffective = Profiles.getEffective(overallOverlay, false);
if (writeProfiles) {
Path profiles = etcDirectory.resolve("profiles");
LOGGER.info("Adding profiles to {}", homeDirectory.relativize(profiles));
allProfiles.forEach((id, profile) -> {
try {
Profiles.writeProfile(profiles, profile);
} catch (IOException e) {
LOGGER.warn("Problem writing profile {}: {}", id, e.getMessage());
}
});
}
manager = new CustomDownloadManager(resolver, executor, overallEffective, translatedUrls);
// Hashtable<String, String> profileProps = new Hashtable<>(overallEffective.getConfiguration(ORG_OPS4J_PAX_URL_MVN_PID));
// final Map<String, String> properties = new HashMap<>();
// properties.put("karaf.default.repository", "system");
// InterpolationHelper.performSubstitution(profileProps, properties::get, false, false, true);
//
// Write config and system properties
//
LOGGER.info("Configuring etc/config.properties and etc/system.properties");
Path configPropertiesPath = etcDirectory.resolve("config.properties");
Properties configProperties = new Properties(configPropertiesPath.toFile());
configProperties.putAll(overallEffective.getConfig());
configProperties.save();
Path systemPropertiesPath = etcDirectory.resolve("system.properties");
Properties systemProperties = new Properties(systemPropertiesPath.toFile());
systemProperties.putAll(overallEffective.getSystem());
systemProperties.save();
//
// Download libraries
//
// TODO: handle karaf 2.x and 3.x libraries
downloader = manager.createDownloader();
LOGGER.info("Downloading libraries for generated profiles");
downloadLibraries(downloader, configProperties, overallEffective.getLibraries(), "");
LOGGER.info("Downloading additional libraries");
downloadLibraries(downloader, configProperties, libraries, "");
downloader.await();
// Reformat clauses
reformatClauses(configProperties, Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA);
reformatClauses(configProperties, Constants.FRAMEWORK_BOOTDELEGATION);
configProperties.save();
//
// Write all configuration files
//
LOGGER.info("Writing configurations");
for (Map.Entry<String, byte[]> config : overallEffective.getFileConfigurations().entrySet()) {
Path configFile = etcDirectory.resolve(config.getKey());
if (Files.exists(configFile)) {
LOGGER.info(" not changing existing config file: {}", homeDirectory.relativize(configFile));
} else {
LOGGER.info(" adding config file: {}", homeDirectory.relativize(configFile));
Files.createDirectories(configFile.getParent());
Files.write(configFile, config.getValue());
}
}
// 'improve' configuration files.
if (propertyEdits != null) {
KarafPropertiesEditor editor = new KarafPropertiesEditor();
editor.setInputEtc(etcDirectory.toFile()).setOutputEtc(etcDirectory.toFile()).setEdits(propertyEdits);
editor.run();
}
if (processor.hasInstructions()) {
Path featuresProcessingXml = etcDirectory.resolve("org.apache.karaf.features.xml");
if (hasOwnInstructions() || overrides.size() > 0) {
// just generate new etc/org.apache.karaf.features.xml file (with external config + builder config)
try (FileOutputStream fos = new FileOutputStream(featuresProcessingXml.toFile())) {
LOGGER.info("Generating features processor configuration: {}", homeDirectory.relativize(featuresProcessingXml));
processor.writeInstructions(fos);
}
} else if (needFeaturesProcessorFileCopy) {
// we may simply copy configured features processor XML configuration
LOGGER.info("Copying features processor configuration: {} -> {}", homeDirectory.relativize(featuresProcessingLocation), homeDirectory.relativize(featuresProcessingXml));
Files.copy(featuresProcessingLocation, featuresProcessingXml, StandardCopyOption.REPLACE_EXISTING);
}
}
//
// Startup stage
//
Profile startupEffective = startupStage(startupProfile, processor);
//
// Boot stage
//
Set<Feature> allBootFeatures = bootStage(bootProfile, startupEffective, processor);
//
// Installed stage
//
installStage(installedProfile, allBootFeatures, processor);
}
use of org.apache.karaf.features.internal.model.Features in project karaf by apache.
the class RepositoryCacheImpl method create.
@Override
public Repository create(URI uri, boolean validate) {
if (featuresProcessor != null && featuresProcessor.isRepositoryBlacklisted(uri.toString())) {
return new RepositoryImpl(uri, new Features(), true);
}
RepositoryImpl repository = new RepositoryImpl(uri, validate);
if (featuresProcessor != null) {
// maybe it could be done better - first we have to set if entire repo is blacklisted
repository.setBlacklisted(featuresProcessor.isRepositoryBlacklisted(uri.toString()));
// processing features will take the above flag into account to blacklist (if needed) the features
repository.processFeatures(featuresProcessor);
}
return repository;
}
Aggregations