Search in sources :

Example 1 with Downloader

use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.

the class Builder method resolve.

private Map<String, Integer> resolve(DownloadManager manager, Resolver resolver, Collection<Features> repositories, Collection<String> features, Collection<String> bundles, Collection<String> overrides, Collection<String> optionals) throws Exception {
    BundleRevision systemBundle = getSystemBundle();
    AssemblyDeployCallback callback = new AssemblyDeployCallback(manager, this, systemBundle, repositories);
    Deployer deployer = new Deployer(manager, resolver, callback, callback);
    // Install framework
    Deployer.DeploymentRequest request = createDeploymentRequest();
    // Add overrides
    request.overrides.addAll(overrides);
    // Add optional resources
    final List<Resource> resources = new ArrayList<>();
    Downloader downloader = manager.createDownloader();
    for (String optional : optionals) {
        downloader.download(optional, provider -> {
            Resource resource = ResourceBuilder.build(provider.getUrl(), getHeaders(provider));
            synchronized (resources) {
                resources.add(resource);
            }
        });
    }
    downloader.await();
    request.globalRepository = new BaseRepository(resources);
    // Install features
    for (String feature : features) {
        MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, feature);
    }
    for (String bundle : bundles) {
        MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, "bundle:" + bundle);
    }
    Set<String> prereqs = new HashSet<>();
    while (true) {
        try {
            deployer.deploy(callback.getDeploymentState(), request);
            break;
        } catch (Deployer.PartialDeploymentException e) {
            if (!prereqs.containsAll(e.getMissing())) {
                prereqs.addAll(e.getMissing());
            } else {
                throw new Exception("Deployment aborted due to loop in missing prerequisites: " + e.getMissing());
            }
        }
    }
    return callback.getStartupBundles();
}
Also used : ArrayList(java.util.ArrayList) Resource(org.osgi.resource.Resource) BaseRepository(org.apache.karaf.features.internal.repository.BaseRepository) Downloader(org.apache.karaf.features.internal.download.Downloader) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) BundleRevision(org.osgi.framework.wiring.BundleRevision) Deployer(org.apache.karaf.features.internal.service.Deployer) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Example 2 with Downloader

use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.

the class SubsystemResolver method resolve.

public Map<Resource, List<Wire>> resolve(Set<String> overrides, String featureResolutionRange, String serviceRequirements, final Repository globalRepository, String outputFile) throws Exception {
    if (root == null) {
        return Collections.emptyMap();
    }
    // Download bundles
    RepositoryManager repos = new RepositoryManager();
    root.downloadBundles(manager, overrides, featureResolutionRange, serviceRequirements, repos);
    // Populate digraph and resolve
    digraph = new StandardRegionDigraph(null, null);
    populateDigraph(digraph, root);
    Downloader downloader = manager.createDownloader();
    SubsystemResolveContext context = new SubsystemResolveContext(root, digraph, globalRepository, downloader, serviceRequirements);
    if (outputFile != null) {
        Map<String, Object> json = new HashMap<>();
        if (globalRepository != null) {
            json.put("globalRepository", toJson(globalRepository));
        }
        json.put("repository", toJson(context.getRepository()));
        try {
            wiring = resolver.resolve(context);
            json.put("success", "true");
            json.put("wiring", toJson(wiring));
        } catch (Exception e) {
            json.put("success", "false");
            json.put("exception", e.toString());
            throw e;
        } finally {
            try (Writer writer = Files.newBufferedWriter(Paths.get(outputFile), StandardCharsets.UTF_8, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
                JsonWriter.write(writer, json, true);
            }
        }
    } else {
        wiring = resolver.resolve(context);
    }
    downloader.await();
    // Remove wiring to the fake environment resource
    if (environmentResource != null) {
        for (List<Wire> wires : wiring.values()) {
            wires.removeIf(wire -> wire.getProvider() == environmentResource);
        }
    }
    // Fragments are always wired to their host only, so create fake wiring to
    // the subsystem the host is wired to
    associateFragments();
    return wiring;
}
Also used : Downloader(org.apache.karaf.features.internal.download.Downloader) Wire(org.osgi.resource.Wire) BundleException(org.osgi.framework.BundleException) InvalidSyntaxException(org.osgi.framework.InvalidSyntaxException) JsonWriter(org.apache.karaf.util.json.JsonWriter) Writer(java.io.Writer) StandardRegionDigraph(org.eclipse.equinox.internal.region.StandardRegionDigraph)

Example 3 with Downloader

use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.

the class Builder method loadExternalProfiles.

/**
 * Loads all profiles declared in profile URIs. These will be used in addition to generated
 * <em>startup</em>, <em>boot</em> and <em>installed</em> profiles.
 */
private Map<String, Profile> loadExternalProfiles(List<String> profilesUris) throws IOException, MultiException, InterruptedException {
    Map<String, Profile> profiles = new LinkedHashMap<>();
    Map<String, Profile> filteredProfiles = new LinkedHashMap<>();
    for (String profilesUri : profilesUris) {
        String uri = profilesUri;
        if (uri.startsWith("jar:") && uri.contains("!/")) {
            uri = uri.substring("jar:".length(), uri.indexOf("!/"));
        }
        if (!uri.startsWith("file:")) {
            Downloader downloader = manager.createDownloader();
            downloader.download(uri, null);
            downloader.await();
            StreamProvider provider = manager.getProviders().get(uri);
            profilesUri = profilesUri.replace(uri, provider.getFile().toURI().toString());
        }
        URI profileURI = URI.create(profilesUri);
        Path profilePath;
        try {
            profilePath = Paths.get(profileURI);
        } catch (FileSystemNotFoundException e) {
            // file system does not exist, try to create it
            FileSystem fs = FileSystems.newFileSystem(profileURI, new HashMap<>(), Builder.class.getClassLoader());
            profilePath = fs.provider().getPath(profileURI);
        }
        profiles.putAll(Profiles.loadProfiles(profilePath));
        // Handle blacklisted profiles
        List<ProfileNamePattern> blacklistedProfilePatterns = blacklistedProfileNames.stream().map(ProfileNamePattern::new).collect(Collectors.toList());
        for (String profileName : profiles.keySet()) {
            boolean blacklisted = false;
            for (ProfileNamePattern pattern : blacklistedProfilePatterns) {
                if (pattern.matches(profileName)) {
                    LOGGER.info("   blacklisting profile {} from {}", profileName, profilePath);
                    // TODO review blacklist policy options
                    if (blacklistPolicy == BlacklistPolicy.Discard) {
                        // Override blacklisted profiles with empty one
                        filteredProfiles.put(profileName, ProfileBuilder.Factory.create(profileName).getProfile());
                    } else {
                    // Remove profile completely
                    }
                    // no need to check other patterns
                    blacklisted = true;
                    break;
                }
            }
            if (!blacklisted) {
                filteredProfiles.put(profileName, profiles.get(profileName));
            }
        }
    }
    return filteredProfiles;
}
Also used : Path(java.nio.file.Path) StreamProvider(org.apache.karaf.features.internal.download.StreamProvider) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Downloader(org.apache.karaf.features.internal.download.Downloader) URI(java.net.URI) Profile(org.apache.karaf.profile.Profile) LinkedHashMap(java.util.LinkedHashMap) FileSystemNotFoundException(java.nio.file.FileSystemNotFoundException) FileSystem(java.nio.file.FileSystem)

Example 4 with Downloader

use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.

the class Builder method loadRepositories.

private Map<String, Features> loadRepositories(DownloadManager manager, Collection<String> repositories, final boolean install, FeaturesProcessor processor) throws Exception {
    final Map<String, Features> loaded = new HashMap<>();
    final Downloader downloader = manager.createDownloader();
    for (String repository : repositories) {
        downloader.download(repository, new DownloadCallback() {

            @Override
            public void downloaded(final StreamProvider provider) throws Exception {
                String url = provider.getUrl();
                if (processor.isRepositoryBlacklisted(url)) {
                    LOGGER.info("   feature repository " + url + " is blacklisted");
                    return;
                }
                synchronized (loaded) {
                    if (!loaded.containsKey(provider.getUrl())) {
                        if (install) {
                            synchronized (provider) {
                                Path path = ArtifactInstaller.pathFromProviderUrl(systemDirectory, url);
                                Files.createDirectories(path.getParent());
                                LOGGER.info("      adding feature repository: " + url);
                                Files.copy(provider.getFile().toPath(), path, StandardCopyOption.REPLACE_EXISTING);
                            }
                        }
                        try (InputStream is = provider.open()) {
                            Features featuresModel = JaxbUtil.unmarshal(url, is, false);
                            // always process according to processor configuration
                            featuresModel.setBlacklisted(processor.isRepositoryBlacklisted(url));
                            processor.process(featuresModel);
                            loaded.put(provider.getUrl(), featuresModel);
                            for (String innerRepository : featuresModel.getRepository()) {
                                downloader.download(innerRepository, this);
                            }
                        }
                    }
                }
            }
        });
    }
    downloader.await();
    return loaded;
}
Also used : Path(java.nio.file.Path) StreamProvider(org.apache.karaf.features.internal.download.StreamProvider) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DownloadCallback(org.apache.karaf.features.internal.download.DownloadCallback) ByteArrayInputStream(java.io.ByteArrayInputStream) ZipInputStream(java.util.zip.ZipInputStream) InputStream(java.io.InputStream) Downloader(org.apache.karaf.features.internal.download.Downloader) Features(org.apache.karaf.features.internal.model.Features) MultiException(org.apache.karaf.features.internal.util.MultiException) MalformedURLException(java.net.MalformedURLException) FileSystemNotFoundException(java.nio.file.FileSystemNotFoundException) IOException(java.io.IOException)

Example 5 with Downloader

use of org.apache.karaf.features.internal.download.Downloader in project karaf by apache.

the class Builder method doGenerateAssembly.

private void doGenerateAssembly() throws Exception {
    LOGGER.info("Generating Karaf assembly: " + homeDirectory);
    // 
    // Create download manager - combination of pax-url-aether and a resolver wrapper that may
    // alter the way pax-url-aether resolver works
    // 
    MavenResolver resolver = createMavenResolver();
    manager = new CustomDownloadManager(resolver, executor, null, translatedUrls);
    this.resolver = new ResolverImpl(new Slf4jResolverLog(LOGGER));
    // 
    // Unzip KARs
    // 
    LOGGER.info("Unzipping kars");
    Downloader downloader = manager.createDownloader();
    for (String kar : kars.keySet()) {
        downloader.download(kar, null);
    }
    downloader.await();
    // stage as the KAR and with the same "add all" flag as the KAR itself
    for (String karUri : kars.keySet()) {
        LOGGER.info("   processing KAR: " + karUri);
        Kar kar = new Kar(manager.getProviders().get(karUri).getFile().toURI());
        kar.extract(systemDirectory.toFile(), homeDirectory.toFile());
        RepositoryInfo info = kars.get(karUri);
        for (URI repositoryUri : kar.getFeatureRepos()) {
            LOGGER.info("      found repository: " + repositoryUri);
            repositories.put(repositoryUri.toString(), info);
        }
    }
    // 
    // Load profiles
    // 
    LOGGER.info("Loading profiles from:");
    profilesUris.forEach(p -> LOGGER.info("   " + p));
    allProfiles = loadExternalProfiles(profilesUris);
    if (allProfiles.size() > 0) {
        StringBuilder sb = new StringBuilder();
        LOGGER.info("   Found profiles: " + allProfiles.keySet().stream().collect(Collectors.joining(", ")));
    }
    // Generate initial profile to collect overrides and blacklisting instructions
    Profile initialProfile = ProfileBuilder.Factory.create("initial").setParents(new ArrayList<>(profiles.keySet())).getProfile();
    Profile initialOverlay = Profiles.getOverlay(initialProfile, allProfiles, environment);
    Profile initialEffective = Profiles.getEffective(initialOverlay, false);
    // 
    // Handle blacklist - we'll use SINGLE instance of Blacklist for all further downloads
    // 
    blacklist = processBlacklist(initialEffective);
    // 
    // Configure blacklisting and overriding features processor
    // 
    boolean needFeaturesProcessorFileCopy = false;
    String existingProcessorDefinitionURI = null;
    Path existingProcessorDefinition = etcDirectory.resolve("org.apache.karaf.features.xml");
    if (existingProcessorDefinition.toFile().isFile()) {
        existingProcessorDefinitionURI = existingProcessorDefinition.toFile().toURI().toString();
        LOGGER.info("Found existing features processor configuration: {}", homeDirectory.relativize(existingProcessorDefinition));
    }
    if (featuresProcessingLocation != null && featuresProcessingLocation.toFile().isFile() && !featuresProcessingLocation.equals(existingProcessorDefinition)) {
        if (existingProcessorDefinitionURI != null) {
            LOGGER.warn("Explicitly configured {} will be used for features processor configuration.", homeDirectory.relativize(featuresProcessingLocation));
        } else {
            LOGGER.info("Found features processor configuration: {}", homeDirectory.relativize(featuresProcessingLocation));
        }
        existingProcessorDefinitionURI = featuresProcessingLocation.toFile().toURI().toString();
        // when there are no other (configured via Maven for example) processing instructions (e.g., blacklisting)
        // we don't have to generate this file and may take original content
        needFeaturesProcessorFileCopy = true;
    }
    // now we can configure blacklisting features processor which may have already defined (in XML)
    // configuration for bundle replacements or feature overrides.
    FeaturesProcessorImpl processor = new FeaturesProcessorImpl(existingProcessorDefinitionURI, null, blacklist, new HashSet<>());
    // add overrides from initialProfile
    Set<String> overrides = processOverrides(initialEffective.getOverrides());
    processor.addOverrides(overrides);
    // 
    // Propagate feature installation from repositories
    // 
    LOGGER.info("Loading repositories");
    Map<String, Features> karRepositories = loadRepositories(manager, repositories.keySet(), false, processor);
    for (String repo : repositories.keySet()) {
        RepositoryInfo info = repositories.get(repo);
        if (info.addAll) {
            LOGGER.info("   adding all non-blacklisted features from repository: " + repo + " (stage: " + info.stage + ")");
            for (Feature feature : karRepositories.get(repo).getFeature()) {
                if (feature.isBlacklisted()) {
                    LOGGER.info("      feature {}/{} is blacklisted - skipping.", feature.getId(), feature.getVersion());
                } else {
                    features.put(feature.getId(), info.stage);
                }
            }
        }
    }
    if (generateConsistencyReport != null) {
        File directory = new File(generateConsistencyReport);
        if (directory.isDirectory()) {
            LOGGER.info("Writing bundle report");
            generateConsistencyReport(karRepositories, new File(directory, "bundle-report-full.xml"), true);
            generateConsistencyReport(karRepositories, new File(directory, "bundle-report.xml"), false);
            Files.copy(getClass().getResourceAsStream("/bundle-report.xslt"), directory.toPath().resolve("bundle-report.xslt"), StandardCopyOption.REPLACE_EXISTING);
        }
    }
    // 
    // Generate profiles. If user has configured additional profiles, they'll be used as parents
    // of the generated ones.
    // 
    Profile startupProfile = generateProfile(Stage.Startup, profiles, repositories, features, bundles);
    allProfiles.put(startupProfile.getId(), startupProfile);
    // generated startup profile should be used (together with configured startup and boot profiles) as parent
    // of the generated boot profile - similar visibility rule (boot stage requires startup stage) is applied
    // for repositories and features
    profiles.put(startupProfile.getId(), Stage.Boot);
    Profile bootProfile = generateProfile(Stage.Boot, profiles, repositories, features, bundles);
    allProfiles.put(bootProfile.getId(), bootProfile);
    Profile installedProfile = generateProfile(Stage.Installed, profiles, repositories, features, bundles);
    allProfiles.put(installedProfile.getId(), installedProfile);
    // 
    // Compute "overlay" profile - a single profile with all parent profiles included (when there's the same
    // file in both profiles, parent profile's version has lower priority)
    // 
    ProfileBuilder builder = ProfileBuilder.Factory.create(UUID.randomUUID().toString()).setParents(Arrays.asList(startupProfile.getId(), bootProfile.getId(), installedProfile.getId()));
    config.forEach((k, v) -> builder.addConfiguration(Profile.INTERNAL_PID, Profile.CONFIG_PREFIX + k, v));
    system.forEach((k, v) -> builder.addConfiguration(Profile.INTERNAL_PID, Profile.SYSTEM_PREFIX + k, v));
    // profile with all the parents configured and stage-agnostic blacklisting configuration added
    blacklistedRepositoryURIs.forEach(builder::addBlacklistedRepository);
    blacklistedFeatureIdentifiers.forEach(builder::addBlacklistedFeature);
    blacklistedBundleURIs.forEach(builder::addBlacklistedBundle);
    // final profilep
    Profile overallProfile = builder.getProfile();
    // profile with parents included and "flattened" using inheritance rules (child files overwrite parent
    // files and child PIDs are merged with parent PIDs and same properties are taken from child profiles)
    Profile overallOverlay = Profiles.getOverlay(overallProfile, allProfiles, environment);
    // profile with property placeholders resolved or left unchanged (if there's no property value available,
    // so property placeholders are preserved - like ${karaf.base})
    Profile overallEffective = Profiles.getEffective(overallOverlay, false);
    if (writeProfiles) {
        Path profiles = etcDirectory.resolve("profiles");
        LOGGER.info("Adding profiles to {}", homeDirectory.relativize(profiles));
        allProfiles.forEach((id, profile) -> {
            try {
                Profiles.writeProfile(profiles, profile);
            } catch (IOException e) {
                LOGGER.warn("Problem writing profile {}: {}", id, e.getMessage());
            }
        });
    }
    manager = new CustomDownloadManager(resolver, executor, overallEffective, translatedUrls);
    // Hashtable<String, String> profileProps = new Hashtable<>(overallEffective.getConfiguration(ORG_OPS4J_PAX_URL_MVN_PID));
    // final Map<String, String> properties = new HashMap<>();
    // properties.put("karaf.default.repository", "system");
    // InterpolationHelper.performSubstitution(profileProps, properties::get, false, false, true);
    // 
    // Write config and system properties
    // 
    LOGGER.info("Configuring etc/config.properties and etc/system.properties");
    Path configPropertiesPath = etcDirectory.resolve("config.properties");
    Properties configProperties = new Properties(configPropertiesPath.toFile());
    configProperties.putAll(overallEffective.getConfig());
    configProperties.save();
    Path systemPropertiesPath = etcDirectory.resolve("system.properties");
    Properties systemProperties = new Properties(systemPropertiesPath.toFile());
    systemProperties.putAll(overallEffective.getSystem());
    systemProperties.save();
    // 
    // Download libraries
    // 
    // TODO: handle karaf 2.x and 3.x libraries
    downloader = manager.createDownloader();
    LOGGER.info("Downloading libraries for generated profiles");
    downloadLibraries(downloader, configProperties, overallEffective.getLibraries(), "");
    LOGGER.info("Downloading additional libraries");
    downloadLibraries(downloader, configProperties, libraries, "");
    downloader.await();
    // Reformat clauses
    reformatClauses(configProperties, Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA);
    reformatClauses(configProperties, Constants.FRAMEWORK_BOOTDELEGATION);
    configProperties.save();
    // 
    // Write all configuration files
    // 
    LOGGER.info("Writing configurations");
    for (Map.Entry<String, byte[]> config : overallEffective.getFileConfigurations().entrySet()) {
        Path configFile = etcDirectory.resolve(config.getKey());
        if (Files.exists(configFile)) {
            LOGGER.info("   not changing existing config file: {}", homeDirectory.relativize(configFile));
        } else {
            LOGGER.info("   adding config file: {}", homeDirectory.relativize(configFile));
            Files.createDirectories(configFile.getParent());
            Files.write(configFile, config.getValue());
        }
    }
    // 'improve' configuration files.
    if (propertyEdits != null) {
        KarafPropertiesEditor editor = new KarafPropertiesEditor();
        editor.setInputEtc(etcDirectory.toFile()).setOutputEtc(etcDirectory.toFile()).setEdits(propertyEdits);
        editor.run();
    }
    if (processor.hasInstructions()) {
        Path featuresProcessingXml = etcDirectory.resolve("org.apache.karaf.features.xml");
        if (hasOwnInstructions() || overrides.size() > 0) {
            // just generate new etc/org.apache.karaf.features.xml file (with external config + builder config)
            try (FileOutputStream fos = new FileOutputStream(featuresProcessingXml.toFile())) {
                LOGGER.info("Generating features processor configuration: {}", homeDirectory.relativize(featuresProcessingXml));
                processor.writeInstructions(fos);
            }
        } else if (needFeaturesProcessorFileCopy) {
            // we may simply copy configured features processor XML configuration
            LOGGER.info("Copying features processor configuration: {} -> {}", homeDirectory.relativize(featuresProcessingLocation), homeDirectory.relativize(featuresProcessingXml));
            Files.copy(featuresProcessingLocation, featuresProcessingXml, StandardCopyOption.REPLACE_EXISTING);
        }
    }
    // 
    // Startup stage
    // 
    Profile startupEffective = startupStage(startupProfile, processor);
    // 
    // Boot stage
    // 
    Set<Feature> allBootFeatures = bootStage(bootProfile, startupEffective, processor);
    // 
    // Installed stage
    // 
    installStage(installedProfile, allBootFeatures, processor);
}
Also used : ArrayList(java.util.ArrayList) Downloader(org.apache.karaf.features.internal.download.Downloader) ResolverImpl(org.apache.felix.resolver.ResolverImpl) Properties(org.apache.felix.utils.properties.Properties) URI(java.net.URI) Feature(org.apache.karaf.features.internal.model.Feature) ProfileBuilder(org.apache.karaf.profile.ProfileBuilder) Profile(org.apache.karaf.profile.Profile) FeaturesProcessorImpl(org.apache.karaf.features.internal.service.FeaturesProcessorImpl) MavenResolver(org.ops4j.pax.url.mvn.MavenResolver) Features(org.apache.karaf.features.internal.model.Features) Path(java.nio.file.Path) IOException(java.io.IOException) FileOutputStream(java.io.FileOutputStream) KarafPropertiesEditor(org.apache.karaf.tools.utils.KarafPropertiesEditor) Kar(org.apache.karaf.kar.internal.Kar) ConfigFile(org.apache.karaf.features.internal.model.ConfigFile) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap)

Aggregations

Downloader (org.apache.karaf.features.internal.download.Downloader)17 HashMap (java.util.HashMap)9 Features (org.apache.karaf.features.internal.model.Features)8 ArrayList (java.util.ArrayList)7 LinkedHashMap (java.util.LinkedHashMap)7 Feature (org.apache.karaf.features.internal.model.Feature)7 IOException (java.io.IOException)6 Path (java.nio.file.Path)6 ConfigFile (org.apache.karaf.features.internal.model.ConfigFile)6 Profile (org.apache.karaf.profile.Profile)6 HashSet (java.util.HashSet)5 LinkedHashSet (java.util.LinkedHashSet)5 Map (java.util.Map)5 ByteArrayInputStream (java.io.ByteArrayInputStream)4 Properties (org.apache.felix.utils.properties.Properties)4 Library (org.apache.karaf.features.Library)4 StreamProvider (org.apache.karaf.features.internal.download.StreamProvider)4 Bundle (org.apache.karaf.features.internal.model.Bundle)4 Conditional (org.apache.karaf.features.internal.model.Conditional)4 BaseRepository (org.apache.karaf.features.internal.repository.BaseRepository)4