Search in sources :

Example 21 with Configuration

use of io.fabric8.annotations.Configuration in project fabric8 by jboss-fuse.

the class OpenShiftPomDeployerTest method doTest.

protected void doTest(String folder, String[] artifactUrls, String[] repoUrls, String expectedCamelDependencyScope, String expectedHawtioDependencyScope) throws Exception {
    File sourceDir = new File(baseDir, "src/test/resources/" + folder);
    assertDirectoryExists(sourceDir);
    File pomSource = new File(sourceDir, "pom.xml");
    assertFileExists(pomSource);
    File outputDir = new File(baseDir, "target/" + getClass().getName() + "/" + folder);
    outputDir.mkdirs();
    assertDirectoryExists(outputDir);
    File pom = new File(outputDir, "pom.xml");
    Files.copy(pomSource, pom);
    assertFileExists(pom);
    git = Git.init().setDirectory(outputDir).setGitDir(new File(outputDir, ".git")).call();
    assertDirectoryExists(new File(outputDir, ".git"));
    git.add().addFilepattern("pom.xml").call();
    git.commit().setMessage("Initial import").call();
    // now we have the git repo setup; lets run the update
    OpenShiftPomDeployer deployer = new OpenShiftPomDeployer(git, outputDir, deployDir, webAppDir);
    System.out.println("About to update the pom " + pom + " with artifacts: " + Arrays.asList(artifactUrls));
    List<Parser> artifacts = new ArrayList<Parser>();
    for (String artifactUrl : artifactUrls) {
        artifacts.add(Parser.parsePathWithSchemePrefix(artifactUrl));
    }
    List<MavenRepositoryURL> repos = new ArrayList<MavenRepositoryURL>();
    for (String repoUrl : repoUrls) {
        repos.add(new MavenRepositoryURL(repoUrl));
    }
    deployer.update(artifacts, repos);
    System.out.println("Completed the new pom is: ");
    System.out.println(Files.toString(pom));
    Document xml = XmlUtils.parseDoc(pom);
    Element plugins = assertXPathElement(xml, "project/profiles/profile[id = 'openshift']/build/plugins");
    Element cleanExecution = assertXPathElement(plugins, "plugin[artifactId = 'maven-clean-plugin']/executions/execution[id = 'fuse-fabric-clean']");
    Element dependencySharedExecution = assertXPathElement(plugins, "plugin[artifactId = 'maven-dependency-plugin']/executions/execution[id = 'fuse-fabric-deploy-shared']");
    Element dependencyWebAppsExecution = assertXPathElement(plugins, "plugin[artifactId = 'maven-dependency-plugin']/executions/execution[id = 'fuse-fabric-deploy-webapps']");
    Element warPluginWarName = xpath("plugin[artifactId = 'maven-war-plugin']/configuration/warName").element(plugins);
    if (warPluginWarName != null) {
        String warName = warPluginWarName.getTextContent();
        System.out.println("WarName is now:  " + warName);
        assertTrue("Should not have ROOT war name", !"ROOT".equals(warName));
    }
    Element dependencies = assertXPathElement(xml, "project/dependencies");
    Element repositories = assertXPathElement(xml, "project/repositories");
    for (Parser artifact : artifacts) {
        // lets check there's only 1 dependency for group & artifact and it has the right version
        String group = groupId(artifact);
        String artifactId = artifact.getArtifact();
        Element dependency = assertSingleDependencyForGroupAndArtifact(dependencies, group, artifactId);
        Element version = assertXPathElement(dependency, "version");
        assertEquals("Version", artifact.getVersion(), version.getTextContent());
    }
    // lets check we either preserve scope, add provided or don't add a scope if there's none present in the underlying pom
    assertDependencyScope(dependencies, "org.apache.camel", "camel-core", expectedCamelDependencyScope);
    assertDependencyScope(dependencies, "org.drools", "drools-wb-distribution-wars", "provided");
    assertDependencyScope(dependencies, "io.hawt", "hawtio-web", expectedHawtioDependencyScope);
    assertRepositoryUrl(repositories, "https://maven.repository.redhat.com/ga/");
    assertRepositoryUrl(repositories, "https://repo.fusesource.com/nexus/content/groups/ea/");
}
Also used : OpenShiftPomDeployer(io.fabric8.openshift.agent.OpenShiftPomDeployer) Element(org.w3c.dom.Element) ArrayList(java.util.ArrayList) MavenRepositoryURL(io.fabric8.maven.util.MavenRepositoryURL) Document(org.w3c.dom.Document) File(java.io.File) Parser(io.fabric8.maven.util.Parser)

Example 22 with Configuration

use of io.fabric8.annotations.Configuration in project fabric8 by jboss-fuse.

the class Deployer method deploy.

/**
 * @param dstate  deployment state
 * @param request deployment request
 * @throws Exception
 */
public void deploy(DeploymentState dstate, final DeploymentRequest request) throws Exception {
    String threadFactoryName = deploymentAgentId != null ? String.format("%s-deployer", deploymentAgentId) : "deployer";
    ExecutorService deploymentsExecutor = Executors.newSingleThreadExecutor(new NamedThreadFactory(threadFactoryName));
    boolean noRefreshUnmanaged = request.options.contains(Constants.Option.NoAutoRefreshUnmanagedBundles);
    boolean noRefreshManaged = request.options.contains(Constants.Option.NoAutoRefreshManagedBundles);
    boolean noRefresh = request.options.contains(Constants.Option.NoAutoRefreshBundles);
    boolean noStart = request.options.contains(Constants.Option.NoAutoStartBundles);
    boolean verbose = request.options.contains(Constants.Option.Verbose);
    boolean silent = request.options.contains(Constants.Option.Silent);
    boolean simulate = request.options.contains(Constants.Option.Simulate);
    boolean noManageBundles = request.options.contains(Constants.Option.NoAutoManageBundles);
    int display = silent ? 0 : verbose ? DISPLAY_LOG | DISPLAY_STDOUT : DISPLAY_LOG;
    // TODO: add an option to unmanage bundles instead of uninstalling those
    Map<String, Set<Long>> managedBundles = copy(dstate.state.managedBundles);
    Map<String, Set<Bundle>> unmanagedBundles = apply(diff(dstate.bundlesPerRegion, dstate.state.managedBundles), map(dstate.bundles));
    // Resolve
    SubsystemResolver resolver = new SubsystemResolver(manager);
    resolver.prepare(dstate.features.values(), request.requirements, apply(unmanagedBundles, adapt(BundleRevision.class)));
    Set<String> prereqs = resolver.collectPrerequisites();
    if (!prereqs.isEmpty()) {
        for (Iterator<String> iterator = prereqs.iterator(); iterator.hasNext(); ) {
            String prereq = iterator.next();
            String[] parts = prereq.split("/");
            VersionRange range;
            if (parts[1].equals("0.0.0")) {
                range = VersionRange.ANY_VERSION;
            } else if (!parts[1].startsWith("[") && !parts[1].startsWith("(")) {
                range = new VersionRange(Macro.transform(request.featureResolutionRange, parts[1]));
            } else {
                range = new VersionRange(parts[1]);
            }
            boolean found = false;
            for (Set<String> featureSet : dstate.state.installedFeatures.values()) {
                for (String feature : featureSet) {
                    String[] p = feature.split("/");
                    found = parts[0].equals(p[0]) && range.contains(VersionTable.getVersion(p[1]));
                    if (found)
                        break;
                }
                if (found)
                    break;
            }
            if (found) {
                iterator.remove();
            }
        }
    }
    if (!prereqs.isEmpty()) {
        DeploymentRequest newRequest = new DeploymentRequest();
        newRequest.bundleUpdateRange = request.bundleUpdateRange;
        newRequest.featureResolutionRange = request.featureResolutionRange;
        newRequest.globalRepository = request.globalRepository;
        newRequest.options = request.options;
        newRequest.overrides = request.overrides;
        newRequest.requirements = copy(dstate.state.requirements);
        for (String prereq : prereqs) {
            addToMapSet(newRequest.requirements, Constants.ROOT_REGION, prereq);
        }
        newRequest.stateChanges = Collections.emptyMap();
        newRequest.updateSnaphots = request.updateSnaphots;
        deploy(dstate, newRequest);
        throw new PartialDeploymentException(prereqs);
    }
    callback.phase("resolving");
    resolver.resolve(new MetadataBuilder(request.metadata), request.overrides, request.featureResolutionRange, request.globalRepository);
    Map<String, StreamProvider> providers = resolver.getProviders();
    Map<String, Set<Resource>> featuresPerRegion = resolver.getFeaturesPerRegions();
    Map<String, Set<String>> installedFeatures = apply(featuresPerRegion, featureId());
    Map<String, Set<String>> newFeatures = diff(installedFeatures, dstate.state.installedFeatures);
    Map<String, Set<String>> delFeatures = diff(dstate.state.installedFeatures, installedFeatures);
    // 
    // Compute requested features state
    // 
    Map<String, Map<String, String>> stateFeatures = copy(dstate.state.stateFeatures);
    for (Map.Entry<String, Set<String>> entry : delFeatures.entrySet()) {
        Map<String, String> map = stateFeatures.get(entry.getKey());
        if (map != null) {
            map.keySet().removeAll(entry.getValue());
            if (map.isEmpty()) {
                stateFeatures.remove(entry.getKey());
            }
        }
    }
    for (Map.Entry<String, Map<String, Constants.RequestedState>> entry1 : request.stateChanges.entrySet()) {
        String region = entry1.getKey();
        Map<String, String> regionStates = stateFeatures.get(region);
        if (regionStates != null) {
            for (Map.Entry<String, Constants.RequestedState> entry2 : entry1.getValue().entrySet()) {
                String feature = entry2.getKey();
                if (regionStates.containsKey(feature)) {
                    regionStates.put(feature, entry2.getValue().name());
                }
            }
        }
    }
    for (Map.Entry<String, Set<String>> entry : newFeatures.entrySet()) {
        for (String feature : entry.getValue()) {
            Map<String, String> map = stateFeatures.get(entry.getKey());
            if (map == null) {
                map = new HashMap<>();
                stateFeatures.put(entry.getKey(), map);
            }
            map.put(feature, noStart ? Constants.RequestedState.Installed.name() : Constants.RequestedState.Started.name());
        }
    }
    // Compute information for each bundle
    Map<String, Map<String, BundleInfo>> bundleInfos = resolver.getBundleInfos();
    // 
    // Compute deployment
    // 
    Deployer.Deployment deployment = computeDeployment(dstate, request, resolver);
    // 
    // Compute the set of bundles to refresh
    // 
    // sort is only used for display
    Map<Bundle, String> toRefresh = new TreeMap<>(new BundleComparator());
    for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
        for (Bundle b : regionDeployment.toDelete) {
            toRefresh.put(b, "Bundle will be uninstalled");
        }
        for (Bundle b : regionDeployment.toUpdate.keySet()) {
            toRefresh.put(b, "Bundle will be updated");
        }
    }
    if (!noRefreshManaged) {
        computeBundlesToRefresh(toRefresh, dstate.bundles.values(), deployment.resToBnd, resolver.getWiring());
    }
    if (noRefreshUnmanaged) {
        toRefresh.keySet().removeAll(flatten(unmanagedBundles));
    }
    // Automatically turn unmanaged bundles into managed bundles
    // if they are required by a feature and no other unmanaged
    // bundles have a requirement on it
    // sort is only used for display
    Set<Bundle> toManage = new TreeSet<>(new BundleComparator());
    if (!noManageBundles) {
        Set<Resource> features = resolver.getFeatures().keySet();
        Set<? extends Resource> unmanaged = apply(flatten(unmanagedBundles), adapt(BundleRevision.class));
        Set<Resource> requested = new HashSet<>();
        // Gather bundles required by a feature
        if (resolver.getWiring() != null) {
            for (List<Wire> wires : resolver.getWiring().values()) {
                for (Wire wire : wires) {
                    if (features.contains(wire.getRequirer()) && unmanaged.contains(wire.getProvider())) {
                        requested.add(wire.getProvider());
                    }
                }
            }
        }
        // Now, we know which bundles are completely unmanaged
        unmanaged.removeAll(requested);
        // Check if bundles have wires from really unmanaged bundles
        if (resolver.getWiring() != null) {
            for (List<Wire> wires : resolver.getWiring().values()) {
                for (Wire wire : wires) {
                    if (requested.contains(wire.getProvider()) && unmanaged.contains(wire.getRequirer())) {
                        requested.remove(wire.getProvider());
                    }
                }
            }
        }
        if (!requested.isEmpty()) {
            Map<Long, String> bundleToRegion = new HashMap<>();
            for (Map.Entry<String, Set<Long>> entry : dstate.bundlesPerRegion.entrySet()) {
                for (long id : entry.getValue()) {
                    bundleToRegion.put(id, entry.getKey());
                }
            }
            for (Resource rev : requested) {
                Bundle bundle = ((BundleRevision) rev).getBundle();
                long id = bundle.getBundleId();
                addToMapSet(managedBundles, bundleToRegion.get(id), id);
                toManage.add(bundle);
            }
        }
    }
    Set<Bundle> toStart = new HashSet<>();
    Set<Bundle> toResolve = new HashSet<>();
    Set<Bundle> toStop = new HashSet<>();
    // 
    // Compute bundle states
    // 
    Map<Resource, Constants.RequestedState> states = new HashMap<>();
    // Find all features state
    Map<Resource, Constants.RequestedState> featuresState = new HashMap<>();
    Map<Resource, Set<Resource>> conditionals = new HashMap<>();
    if (resolver.getFeaturesPerRegions() != null) {
        for (Map.Entry<String, Set<Resource>> entry : resolver.getFeaturesPerRegions().entrySet()) {
            String region = entry.getKey();
            Map<String, String> fss = stateFeatures.get(region);
            for (Resource feature : entry.getValue()) {
                Set<Resource> conditions = new HashSet<>();
                for (Wire wire : resolver.getWiring().get(feature)) {
                    if (IDENTITY_NAMESPACE.equals(wire.getRequirement().getNamespace()) && FeatureResource.CONDITIONAL_TRUE.equals(wire.getRequirement().getDirectives().get(FeatureResource.REQUIREMENT_CONDITIONAL_DIRECTIVE))) {
                        conditions.add(wire.getProvider());
                    }
                }
                if (conditions.isEmpty()) {
                    String fs = fss.get(getFeatureId(feature));
                    featuresState.put(feature, Constants.RequestedState.valueOf(fs));
                } else {
                    conditionals.put(feature, conditions);
                }
            }
        }
    }
    // Compute conditional features state
    for (Resource feature : conditionals.keySet()) {
        Constants.RequestedState state = null;
        for (Resource cond : conditionals.get(feature)) {
            Constants.RequestedState s = featuresState.get(cond);
            if (state == null) {
                state = s;
            } else if (state == Constants.RequestedState.Started && s == Constants.RequestedState.Resolved) {
                state = Constants.RequestedState.Resolved;
            }
        }
        featuresState.put(feature, state);
    }
    // Propagate Resolved state
    for (Resource feature : featuresState.keySet()) {
        if (featuresState.get(feature) == Constants.RequestedState.Resolved) {
            propagateState(states, feature, Constants.RequestedState.Resolved, resolver);
        }
    }
    // Propagate Started state
    for (Resource feature : featuresState.keySet()) {
        if (featuresState.get(feature) == Constants.RequestedState.Started) {
            propagateState(states, feature, Constants.RequestedState.Started, resolver);
        }
    }
    // Put default Started state for other bundles
    for (Resource resource : resolver.getBundles().keySet()) {
        if (!states.containsKey(resource)) {
            states.put(resource, Constants.RequestedState.Started);
        }
    }
    // Override explicit disabled state
    for (Resource resource : featuresState.keySet()) {
        if (resource instanceof FeatureResource) {
            FeatureResource featureResource = (FeatureResource) resource;
            List<BundleInfo> bundles = featureResource.getFeature().getBundles();
            for (BundleInfo bundleInfo : bundles) {
                if (bundleInfo.isStart() == false) {
                    Set<String> candidates = new HashSet<>();
                    String fullLocation = bundleInfo.getLocation();
                    int protocolMarker = fullLocation.lastIndexOf(":");
                    fullLocation = fullLocation.substring(protocolMarker + 1);
                    String[] split = fullLocation.split("/");
                    if (split.length >= 3) {
                        String fullLocationKey = split[0] + "." + split[1] + "/" + split[2];
                        candidates.add(fullLocationKey);
                    }
                    String location = bundleInfo.getLocation();
                    Parser parser = new Parser(location);
                    String id = parser.getArtifact();
                    String version = parser.getVersion();
                    String key = id + "/" + version;
                    String keyDotted = id + "/" + version.replaceAll("_", ".");
                    String prefix = parser.getGroup();
                    if (parser.getGroup().contains(":")) {
                        prefix = parser.getGroup().split(":")[1];
                    }
                    String fullKey = prefix + "." + key;
                    String fullKeyDotted = prefix + "." + keyDotted;
                    candidates.add(key);
                    candidates.add(keyDotted);
                    candidates.add(fullKey);
                    candidates.add(fullKeyDotted);
                    for (Iterator<Resource> iter = states.keySet().iterator(); iter.hasNext(); ) {
                        Resource res = iter.next();
                        String resourceStringRepresentation = res.toString();
                        if (candidates.contains(resourceStringRepresentation)) {
                            states.put(res, Constants.RequestedState.Installed);
                            break;
                        }
                    }
                }
            }
        }
    }
    // Only keep bundles resources
    states.keySet().retainAll(resolver.getBundles().keySet());
    // 
    for (Map.Entry<Resource, Constants.RequestedState> entry : states.entrySet()) {
        Bundle bundle = deployment.resToBnd.get(entry.getKey());
        if (bundle != null) {
            switch(entry.getValue()) {
                case Started:
                    toResolve.add(bundle);
                    toStart.add(bundle);
                    break;
                case Resolved:
                    toResolve.add(bundle);
                    toStop.add(bundle);
                    break;
            }
        }
    }
    // 
    // Compute bundle all start levels and start levels to update
    // 
    Map<Resource, Integer> startLevels = new HashMap<>();
    final Map<Bundle, Integer> toUpdateStartLevel = new HashMap<>();
    for (Map.Entry<String, Set<Resource>> entry : resolver.getBundlesPerRegions().entrySet()) {
        String region = entry.getKey();
        for (Resource resource : entry.getValue()) {
            BundleInfo bi = bundleInfos.get(region).get(getUri(resource));
            if (bi != null) {
                int sl = bi.getStartLevel() > 0 ? bi.getStartLevel() : dstate.initialBundleStartLevel;
                startLevels.put(resource, sl);
                Bundle bundle = deployment.resToBnd.get(resource);
                if (bundle != null) {
                    int curSl = bundle.adapt(BundleStartLevel.class).getStartLevel();
                    if (sl != curSl) {
                        toUpdateStartLevel.put(bundle, sl);
                        if (sl > dstate.currentStartLevel) {
                            toStop.add(bundle);
                        }
                    }
                }
            }
        }
    }
    // 
    // Log deployment
    // 
    logDeployment(deployment, display);
    if (!noRefresh && !toRefresh.isEmpty()) {
        print("  Bundles to refresh:", display);
        for (Map.Entry<Bundle, String> entry : toRefresh.entrySet()) {
            Bundle bundle = entry.getKey();
            print("    " + bundle.getSymbolicName() + " / " + bundle.getVersion() + " (" + entry.getValue() + ")", display);
        }
    }
    if (!toManage.isEmpty()) {
        print("  Managing bundle:", display);
        for (Bundle bundle : toManage) {
            print("    " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
        }
    }
    if (simulate) {
        return;
    }
    callback.phase("installing");
    // 
    // Execute deployment
    // 
    // #1: stop bundles that needs to be updated or uninstalled or refreshed in order
    // #2: uninstall needed bundles
    // #3: update regions
    // #4: update bundles
    // #5: install bundles
    // #6: save state
    // #7: install configuration
    // #8: refresh bundles
    // #9: start bundles in order
    // #10: send events
    // 
    // 
    // Handle updates on the agent bundle
    // 
    Deployer.RegionDeployment rootRegionDeployment = deployment.regions.get(Constants.ROOT_REGION);
    // We don't support uninstalling the bundle
    if (rootRegionDeployment != null && rootRegionDeployment.toDelete.contains(dstate.serviceBundle)) {
        throw new UnsupportedOperationException("Uninstalling the agent bundle is not supported");
    }
    // Ensure all classes are loaded if the agent will be refreshed
    if (toRefresh.containsKey(dstate.serviceBundle)) {
        OsgiUtils.ensureAllClassesLoaded(dstate.serviceBundle);
    }
    // When restarting, the resolution will be attempted again
    if (rootRegionDeployment != null && rootRegionDeployment.toUpdate.containsKey(dstate.serviceBundle)) {
        callback.phase("updating agent");
        callback.persistResolveRequest(request);
        // save the new checksum persistently
        if (deployment.bundleChecksums.containsKey(dstate.serviceBundle.getBundleId())) {
            State state = dstate.state.copy();
            state.bundleChecksums.put(dstate.serviceBundle.getBundleId(), deployment.bundleChecksums.get(dstate.serviceBundle.getBundleId()));
            callback.saveState(state);
        }
        Resource resource = rootRegionDeployment.toUpdate.get(dstate.serviceBundle);
        String uri = getUri(resource);
        print("The agent bundle needs is being updated with " + uri, display);
        toRefresh.clear();
        toRefresh.put(dstate.serviceBundle, "DeploymentAgent bundle is being updated");
        computeBundlesToRefresh(toRefresh, dstate.bundles.values(), Collections.<Resource, Bundle>emptyMap(), Collections.<Resource, List<Wire>>emptyMap());
        callback.stopBundle(dstate.serviceBundle, STOP_TRANSIENT);
        try (InputStream is = getBundleInputStream(resource, providers)) {
            callback.updateBundle(dstate.serviceBundle, uri, is);
        }
        callback.refreshPackages(toRefresh.keySet());
        callback.startBundle(dstate.serviceBundle);
        return;
    }
    // 
    if (!newFeatures.isEmpty()) {
        Set<Feature> set = apply(flatten(newFeatures), map(dstate.features));
        for (Feature feature : set) {
            Downloader downloader = manager.createDownloader();
            for (ConfigFile configFile : feature.getConfigurationFiles()) {
                downloader.download(configFile.getLocation(), null);
            }
            downloader.await();
        }
    }
    // 
    for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
        toStop.addAll(regionDeployment.toUpdate.keySet());
        toStop.addAll(regionDeployment.toDelete);
    }
    if (!noRefresh) {
        Set<Bundle> toRefreshToStopEarly = new HashSet<>(toRefresh.keySet());
        toRefreshToStopEarly.remove(dstate.serviceBundle);
        toStop.addAll(toRefreshToStopEarly);
        toStart.addAll(toRefreshToStopEarly);
    }
    removeFragmentsAndBundlesInState(toStop, UNINSTALLED | RESOLVED | STOPPING);
    if (!toStop.isEmpty()) {
        callback.phase("updating (stopping bundles)");
        print("Stopping bundles:", display);
        while (!toStop.isEmpty()) {
            List<Bundle> bs = getBundlesToStop(toStop);
            for (final Bundle bundle : bs) {
                print("  " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
                List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {

                    @Override
                    public Void call() throws Exception {
                        try {
                            LOGGER.info("Scheduled stop for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
                            // If the bundle start level will be changed, stop it persistently to
                            // avoid a restart when the start level is actually changed
                            callback.stopBundle(bundle, toUpdateStartLevel.containsKey(bundle) ? 0 : STOP_TRANSIENT);
                        } catch (BundleException e) {
                            LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
                        }
                        return null;
                    }
                }), request.bundleStartTimeout, TimeUnit.SECONDS);
                // synch on Future's output, limited by the TimeUnit above
                for (Future<Void> f : futures) {
                    try {
                        f.get();
                    } catch (CancellationException e) {
                        LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
                    }
                }
                toStop.remove(bundle);
            }
        }
    }
    // 
    // Delete bundles
    // 
    boolean hasToDelete = false;
    for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
        if (hasToDelete = !regionDeployment.toDelete.isEmpty()) {
            break;
        }
    }
    if (hasToDelete) {
        callback.phase("updating (uninstalling bundles)");
        print("Uninstalling bundles:", display);
        for (Map.Entry<String, RegionDeployment> entry : deployment.regions.entrySet()) {
            String name = entry.getKey();
            Deployer.RegionDeployment regionDeployment = entry.getValue();
            for (Bundle bundle : regionDeployment.toDelete) {
                print("  " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
                callback.uninstall(bundle);
                removeFromMapSet(managedBundles, name, bundle.getBundleId());
            }
        }
    }
    // 
    // Update regions
    // 
    {
        // Add bundles
        Map<String, Set<Long>> bundles = new HashMap<>();
        add(bundles, apply(unmanagedBundles, bundleId()));
        add(bundles, managedBundles);
        // Compute policies
        RegionDigraph computedDigraph = resolver.getFlatDigraph();
        Map<String, Map<String, Map<String, Set<String>>>> policies = copy(dstate.filtersPerRegion);
        // Only keep regions which still have bundles
        policies.keySet().retainAll(bundles.keySet());
        // Fix broken filters
        for (String name : policies.keySet()) {
            policies.get(name).keySet().retainAll(policies.keySet());
        }
        // Update managed regions
        for (Region computedRegion : computedDigraph.getRegions()) {
            String name = computedRegion.getName();
            Map<String, Map<String, Set<String>>> policy = policies.get(name);
            if (policy == null) {
                policy = new HashMap<>();
                policies.put(name, policy);
            }
            for (RegionDigraph.FilteredRegion fr : computedRegion.getEdges()) {
                String r2 = fr.getRegion().getName();
                Map<String, Set<String>> filters = new HashMap<>();
                Map<String, Collection<String>> current = fr.getFilter().getSharingPolicy();
                for (String ns : current.keySet()) {
                    for (String f : current.get(ns)) {
                        addToMapSet(filters, ns, f);
                    }
                }
                policy.put(r2, filters);
            }
        }
        // Apply all changes
        callback.replaceDigraph(policies, bundles);
    }
    // 
    // Update bundles
    // 
    boolean hasToUpdate = false;
    for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
        if (hasToUpdate = !regionDeployment.toUpdate.isEmpty()) {
            break;
        }
    }
    if (hasToUpdate) {
        callback.phase("updating (updating bundles)");
        print("Updating bundles:", display);
        for (Map.Entry<String, RegionDeployment> rde : deployment.regions.entrySet()) {
            for (Map.Entry<Bundle, Resource> entry : rde.getValue().toUpdate.entrySet()) {
                Bundle bundle = entry.getKey();
                Resource resource = entry.getValue();
                String uri = getUri(resource);
                print("  " + uri, display);
                try (InputStream is = getBundleInputStream(resource, providers)) {
                    callback.updateBundle(bundle, uri, is);
                }
                toStart.add(bundle);
            }
        }
    }
    // 
    for (Map.Entry<Bundle, Integer> entry : toUpdateStartLevel.entrySet()) {
        Bundle bundle = entry.getKey();
        int sl = entry.getValue();
        callback.setBundleStartLevel(bundle, sl);
    }
    // 
    // Install bundles
    // 
    boolean hasToInstall = false;
    for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
        if (hasToInstall = !regionDeployment.toInstall.isEmpty()) {
            break;
        }
    }
    if (hasToInstall) {
        callback.phase("updating (installing bundles)");
        print("Installing bundles:", display);
        for (Map.Entry<String, RegionDeployment> entry : deployment.regions.entrySet()) {
            String name = entry.getKey();
            Deployer.RegionDeployment regionDeployment = entry.getValue();
            for (Resource resource : regionDeployment.toInstall) {
                String uri = getUri(resource);
                print("  " + uri, display);
                Bundle bundle;
                long crc;
                try (ChecksumUtils.CRCInputStream is = new ChecksumUtils.CRCInputStream(getBundleInputStream(resource, providers))) {
                    bundle = callback.installBundle(name, uri, is);
                    // calculate CRC normally
                    crc = is.getCRC();
                    try {
                        URI resourceURI = new URI(uri);
                        if ("blueprint".equals(resourceURI.getScheme())) {
                            // ENTESB-6957 calculate proper blueprint file CRC during installation
                            InputStream bis = getBlueprintInputStream(getBundleInputStream(resource, providers));
                            // original stream is closed in either case
                            if (bis != null) {
                                crc = ChecksumUtils.checksum(bis);
                            }
                        }
                    } catch (URISyntaxException ignored) {
                    }
                }
                addToMapSet(managedBundles, name, bundle.getBundleId());
                deployment.resToBnd.put(resource, bundle);
                // save a checksum of installed snapshot bundle
                if (Constants.UPDATE_SNAPSHOTS_CRC.equals(request.updateSnaphots) && isUpdateable(resource) && !deployment.bundleChecksums.containsKey(bundle.getBundleId())) {
                    deployment.bundleChecksums.put(bundle.getBundleId(), crc);
                }
                if (startLevels.containsKey(resource)) {
                    int startLevel = startLevels.get(resource);
                    if (startLevel != dstate.initialBundleStartLevel) {
                        callback.setBundleStartLevel(bundle, startLevel);
                    }
                }
                Constants.RequestedState reqState = states.get(resource);
                if (reqState == null) {
                    reqState = Constants.RequestedState.Started;
                }
                switch(reqState) {
                    case Started:
                        toResolve.add(bundle);
                        toStart.add(bundle);
                        break;
                    case Resolved:
                        toResolve.add(bundle);
                        break;
                }
            }
        }
    }
    // 
    // Update and save state
    // 
    State newState = new State();
    newState.bundleChecksums.putAll(deployment.bundleChecksums);
    newState.requirements.putAll(request.requirements);
    newState.installedFeatures.putAll(installedFeatures);
    newState.stateFeatures.putAll(stateFeatures);
    newState.managedBundles.putAll(managedBundles);
    callback.saveState(newState);
    // 
    if (!newFeatures.isEmpty()) {
        // check if configadmin is started
        callback.phase("updating (installing configurations)");
        Set<Feature> set = apply(flatten(newFeatures), map(dstate.features));
        callback.restoreConfigAdminIfNeeded();
        for (Feature feature : set) {
            callback.installFeatureConfigs(feature);
        }
    }
    callback.phase("finalizing");
    if (!noRefresh) {
        toStop = new HashSet<>();
        toStop.addAll(toRefresh.keySet());
        removeFragmentsAndBundlesInState(toStop, UNINSTALLED | RESOLVED | STOPPING);
        if (!toStop.isEmpty()) {
            callback.phase("finalizing (stopping bundles)");
            print("Stopping bundles:", display);
            while (!toStop.isEmpty()) {
                List<Bundle> bs = getBundlesToStop(toStop);
                for (final Bundle bundle : bs) {
                    print("  " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
                    List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {

                        @Override
                        public Void call() throws Exception {
                            try {
                                LOGGER.info("Scheduled stop for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
                                callback.stopBundle(bundle, STOP_TRANSIENT);
                            } catch (BundleException e) {
                                LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
                            }
                            return null;
                        }
                    }), request.bundleStartTimeout, TimeUnit.SECONDS);
                    // synch on Future's output, limited by the TimeUnit above
                    for (Future<Void> f : futures) {
                        try {
                            f.get();
                        } catch (CancellationException e) {
                            LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
                        }
                    }
                    toStop.remove(bundle);
                    toStart.add(bundle);
                }
            }
        }
        if (!toRefresh.isEmpty()) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("    Bundle refresh explanation:");
                for (Map.Entry entry : toRefresh.entrySet()) {
                    LOGGER.debug("{} is refreshed because of [{}]", entry.getKey(), entry.getValue());
                }
            }
            callback.phase("finalizing (refreshing bundles)");
            print("Refreshing bundles:", display);
            for (Map.Entry<Bundle, String> entry : toRefresh.entrySet()) {
                Bundle bundle = entry.getKey();
                print("    " + bundle.getSymbolicName() + " / " + bundle.getVersion() + " (" + entry.getValue() + ")", display);
            }
            if (!toRefresh.isEmpty()) {
                callback.refreshPackages(toRefresh.keySet());
            }
        }
    }
    // Resolve bundles
    callback.phase("finalizing (resolving bundles)");
    toResolve.addAll(toStart);
    toResolve.addAll(toRefresh.keySet());
    removeBundlesInState(toResolve, UNINSTALLED);
    callback.resolveBundles(toResolve, resolver.getWiring(), deployment.resToBnd);
    final boolean[] agentStarted = new boolean[] { false };
    // Compute bundles to start
    removeFragmentsAndBundlesInState(toStart, UNINSTALLED | ACTIVE | STARTING);
    if (!toStart.isEmpty()) {
        // Compute correct start order
        final List<Throwable> exceptions = new ArrayList<>();
        callback.phase("finalizing (starting bundles)");
        print("Starting bundles:", display);
        while (!toStart.isEmpty()) {
            List<Bundle> bs = getBundlesToStart(toStart, dstate.serviceBundle);
            for (final Bundle bundle : bs) {
                print("  " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
                if ("io.fabric8.fabric-agent".equals(bundle.getSymbolicName())) {
                    agentStarted[0] = true;
                }
                List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {

                    @Override
                    public Void call() throws Exception {
                        try {
                            LOGGER.info("Scheduled start for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
                            callback.startBundle(bundle);
                        } catch (BundleException e) {
                            exceptions.add(e);
                        }
                        return null;
                    }
                }), request.bundleStartTimeout, TimeUnit.SECONDS);
                // synch on Future's output, limited by the TimeUnit above
                for (Future<Void> f : futures) {
                    try {
                        f.get();
                    } catch (CancellationException e) {
                        exceptions.add(new BundleException("Unable to start bundle [" + bundle.getSymbolicName() + "] within " + request.bundleStartTimeout + " seconds"));
                    }
                }
                toStart.remove(bundle);
            }
        }
        deploymentsExecutor.shutdown();
        if (!exceptions.isEmpty()) {
            throw new MultiException("Error restarting bundles", exceptions);
        }
    }
    // Info about final list of deployed bundles
    callback.provisionList(deployment.resToBnd.keySet());
    // list of bundles in special "fabric-startup" feature
    List<String> urls = new LinkedList<>();
    for (Feature ft : dstate.features.values()) {
        if (ft.getName().equals("fabric-startup") && ft.getBundles() != null) {
            for (BundleInfo bi : ft.getBundles()) {
                urls.add(bi.getLocation());
            }
        }
        // special case for Fuse/AMQ...
        if (ft.getName().equals("esb-commands-startup") && ft.getBundles() != null) {
            for (BundleInfo bi : ft.getBundles()) {
                urls.add(bi.getLocation());
            }
        }
    }
    // let's resolve these URIs and make them available from ${karaf.default.repository}
    try {
        LOGGER.info("Storing startup artifacts in default repository: {}", urls);
        AgentUtils.downloadLocations(manager, urls, true);
    } catch (Exception e) {
        LOGGER.warn(e.getMessage(), e);
    }
    if (!noRefresh) {
        // (similar to KARAF-4686/ENTESB-6045 - Cleaning sun.rmi.transport.tcp.TCPEndpointlocalEndpoints cache)
        try {
            Class<?> cls = getClass().getClassLoader().loadClass("org.ops4j.pax.logging.slf4j.Slf4jMDCAdapter");
            Field m_contextField = cls.getDeclaredField("m_context");
            m_contextField.setAccessible(true);
            // nullify org.ops4j.pax.logging.slf4j.Slf4jMDCAdapter.m_context, so it'll be reinitialized from
            // fresh pax-logging-service wiring
            m_contextField.set(null, null);
        } catch (Exception ignored) {
        }
    }
    if (callback.done(agentStarted[0], urls)) {
        print("Done.", display);
    }
}
Also used : BundleStartLevel(org.osgi.framework.startlevel.BundleStartLevel) Downloader(io.fabric8.agent.download.Downloader) Feature(io.fabric8.agent.model.Feature) BundleException(org.osgi.framework.BundleException) StreamProvider(io.fabric8.agent.download.StreamProvider) NamedThreadFactory(io.fabric8.utils.NamedThreadFactory) SubsystemResolver(io.fabric8.agent.region.SubsystemResolver) Parser(io.fabric8.maven.util.Parser) MultiException(io.fabric8.common.util.MultiException) MapUtils.addToMapSet(io.fabric8.agent.internal.MapUtils.addToMapSet) MapUtils.removeFromMapSet(io.fabric8.agent.internal.MapUtils.removeFromMapSet) URI(java.net.URI) FeatureResource(io.fabric8.agent.resolver.FeatureResource) VersionRange(org.apache.felix.utils.version.VersionRange) Wire(org.osgi.resource.Wire) BundleWire(org.osgi.framework.wiring.BundleWire) BundleInfo(io.fabric8.agent.model.BundleInfo) BundleRevision(org.osgi.framework.wiring.BundleRevision) ConfigFile(io.fabric8.agent.model.ConfigFile) Resource(org.osgi.resource.Resource) FeatureResource(io.fabric8.agent.resolver.FeatureResource) Region(org.eclipse.equinox.region.Region) URISyntaxException(java.net.URISyntaxException) Field(java.lang.reflect.Field) RegionDigraph(org.eclipse.equinox.region.RegionDigraph) Bundle(org.osgi.framework.Bundle) ByteArrayInputStream(java.io.ByteArrayInputStream) ZipInputStream(java.util.zip.ZipInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) URISyntaxException(java.net.URISyntaxException) BundleException(org.osgi.framework.BundleException) InvalidSyntaxException(org.osgi.framework.InvalidSyntaxException) IOException(java.io.IOException) MultiException(io.fabric8.common.util.MultiException) ChecksumUtils(io.fabric8.common.util.ChecksumUtils)

Example 23 with Configuration

use of io.fabric8.annotations.Configuration in project fabric8 by jboss-fuse.

the class FeatureConfigInstaller method installFeatureConfigs.

void installFeatureConfigs(Feature feature) throws IOException, InvalidSyntaxException {
    for (Config config : feature.getConfigurations()) {
        Properties props = config.getProperties();
        String[] split = parsePid(config.getName());
        // see http://felix.apache.org/documentation/subprojects/apache-felix-file-install.html#configurations
        String pid = split[0];
        String subname = split[1];
        Configuration cfg = findExistingConfiguration(configAdmin, pid, subname);
        if (cfg == null) {
            Dictionary<String, String> cfgProps = convertToDict(props);
            cfg = createConfiguration(configAdmin, pid, subname);
            String key = createConfigurationKey(pid, subname);
            cfgProps.put(CONFIG_KEY, key);
            cfg.update(cfgProps);
        } else if (config.isAppend()) {
            Dictionary<String, Object> properties = cfg.getProperties();
            // Ignore already managed configurations
            String fabricManagedPid = (String) properties.get(FABRIC_ZOOKEEPER_PID);
            if (Strings.isNotBlank(fabricManagedPid)) {
                continue;
            }
            for (Enumeration<String> propKeys = properties.keys(); propKeys.hasMoreElements(); ) {
                String key = propKeys.nextElement();
                // remove existing entry, since it's about appending.
                if (props.containsKey(key)) {
                    props.remove(key);
                }
            }
            if (props.size() > 0) {
                // convert props to dictionary
                Dictionary<String, String> cfgProps = convertToDict(props);
                cfg.update(cfgProps);
            }
        }
    }
    for (ConfigFile configFile : feature.getConfigurationFiles()) {
        installConfigurationFile(configFile.getLocation(), configFile.getFinalname(), configFile.isOverride());
    }
}
Also used : Dictionary(java.util.Dictionary) Enumeration(java.util.Enumeration) Configuration(org.osgi.service.cm.Configuration) ConfigFile(io.fabric8.agent.model.ConfigFile) Config(io.fabric8.agent.model.Config) Properties(java.util.Properties)

Example 24 with Configuration

use of io.fabric8.annotations.Configuration in project fabric8 by jboss-fuse.

the class FabricServiceImpl method getZookeeperInfo.

// FIXME public access on the impl
public String getZookeeperInfo(String name) {
    assertValid();
    String zooKeeperUrl = null;
    // Also this is required for the integration with the IDE.
    try {
        if (curator.get().getZookeeperClient().isConnected()) {
            Version defaultVersion = getDefaultVersion();
            if (defaultVersion != null) {
                Profile profile = defaultVersion.getRequiredProfile("default");
                if (profile != null) {
                    Map<String, String> zookeeperConfig = profile.getConfiguration(Constants.ZOOKEEPER_CLIENT_PID);
                    if (zookeeperConfig != null) {
                        zooKeeperUrl = getSubstitutedData(curator.get(), zookeeperConfig.get(name));
                    }
                }
            }
        }
    } catch (Exception e) {
    // Ignore it.
    }
    if (zooKeeperUrl == null) {
        try {
            Configuration config = configAdmin.get().getConfiguration(Constants.ZOOKEEPER_CLIENT_PID, null);
            zooKeeperUrl = (String) config.getProperties().get(name);
        } catch (Exception e) {
        // Ignore it.
        }
    }
    return zooKeeperUrl;
}
Also used : BootstrapConfiguration(io.fabric8.zookeeper.bootstrap.BootstrapConfiguration) Configuration(org.osgi.service.cm.Configuration) Version(io.fabric8.api.Version) Profile(io.fabric8.api.Profile) ProfileDependencyException(io.fabric8.api.ProfileDependencyException) EncryptionOperationNotPossibleException(org.jasypt.exceptions.EncryptionOperationNotPossibleException) FabricException(io.fabric8.api.FabricException) IOException(java.io.IOException)

Example 25 with Configuration

use of io.fabric8.annotations.Configuration in project fabric8 by jboss-fuse.

the class ComponentConfigurer method configure.

@Override
public <T> Map<String, ?> configure(final Map<String, ?> configuration, T target, String... ignorePrefix) throws Exception {
    assertValid();
    Map<String, Object> result = new HashMap<>();
    final PropertiesProvider runtimeProperties = new PropertiesProvider() {

        @Override
        public Object getProperty(String key) {
            return bundleContext.getProperty(key);
        }

        @Override
        public Object getRequiredProperty(String key) {
            String value = bundleContext.getProperty(key);
            IllegalStateAssertion.assertNotNull(value, "Cannot obtain property: " + key);
            return value;
        }

        @Override
        public Object getProperty(String key, Object defaultValue) {
            String value = bundleContext.getProperty(key);
            return value != null ? value : defaultValue;
        }
    };
    final PropertiesProvider configurationProvider = new MapPropertiesProvider((Map<String, Object>) configuration);
    final PropertiesProvider[] propertiesProviders = new PropertiesProvider[] { configurationProvider, runtimeProperties };
    PropertiesProvider provider = new SubstitutionPropertiesProvider(propertiesProviders);
    for (Map.Entry<String, ?> entry : configuration.entrySet()) {
        String key = entry.getKey();
        Object value = provider.getProperty(key);
        result.put(key, value);
    }
    ConfigInjection.applyConfiguration(result, target, ignorePrefix);
    return result;
}
Also used : MapPropertiesProvider(io.fabric8.api.gravia.MapPropertiesProvider) PropertiesProvider(io.fabric8.api.gravia.PropertiesProvider) SubstitutionPropertiesProvider(io.fabric8.api.gravia.SubstitutionPropertiesProvider) SubstitutionPropertiesProvider(io.fabric8.api.gravia.SubstitutionPropertiesProvider) MapPropertiesProvider(io.fabric8.api.gravia.MapPropertiesProvider) HashMap(java.util.HashMap) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

IOException (java.io.IOException)29 HashMap (java.util.HashMap)23 File (java.io.File)22 Configuration (org.osgi.service.cm.Configuration)20 Map (java.util.Map)16 BootstrapConfiguration (io.fabric8.zookeeper.bootstrap.BootstrapConfiguration)15 Test (org.junit.Test)13 ArrayList (java.util.ArrayList)12 Container (io.fabric8.api.Container)11 Profile (io.fabric8.api.Profile)11 RuntimeProperties (io.fabric8.api.RuntimeProperties)9 HashSet (java.util.HashSet)9 ConfigMap (io.fabric8.kubernetes.api.model.ConfigMap)8 MojoExecutionException (org.apache.maven.plugin.MojoExecutionException)8 FabricException (io.fabric8.api.FabricException)7 FabricService (io.fabric8.api.FabricService)7 Properties (java.util.Properties)7 DefaultKubernetesClient (io.fabric8.kubernetes.client.DefaultKubernetesClient)6 Util.readAsString (io.fabric8.arquillian.utils.Util.readAsString)5 URL (java.net.URL)5