use of io.fabric8.agent.download.StreamProvider in project fabric8 by jboss-fuse.
the class Agent method loadResources.
//
// State support
//
public static Callable<Map<String, Resource>> loadResources(DownloadManager manager, Map<String, Map<VersionRange, Map<String, String>>> metadata, Set<String> uris) throws MultiException, InterruptedException, MalformedURLException {
final Map<String, Resource> resources = new HashMap<>();
final Downloader downloader = manager.createDownloader();
final MetadataBuilder builder = new MetadataBuilder(metadata);
final DownloadCallback callback = new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
String uri = provider.getUrl();
Map<String, String> headers = builder.getMetadata(uri, provider.getFile());
Resource resource = ResourceBuilder.build(uri, headers);
synchronized (resources) {
resources.put(uri, resource);
}
}
};
for (String uri : uris) {
downloader.download(uri, callback);
}
return new Callable<Map<String, Resource>>() {
@Override
public Map<String, Resource> call() throws Exception {
downloader.await();
return resources;
}
};
}
use of io.fabric8.agent.download.StreamProvider in project fabric8 by jboss-fuse.
the class Deployer method deploy.
/**
* @param dstate deployment state
* @param request deployment request
* @throws Exception
*/
public void deploy(DeploymentState dstate, final DeploymentRequest request) throws Exception {
String threadFactoryName = deploymentAgentId != null ? String.format("%s-deployer", deploymentAgentId) : "deployer";
ExecutorService deploymentsExecutor = Executors.newSingleThreadExecutor(new NamedThreadFactory(threadFactoryName));
boolean noRefreshUnmanaged = request.options.contains(Constants.Option.NoAutoRefreshUnmanagedBundles);
boolean noRefreshManaged = request.options.contains(Constants.Option.NoAutoRefreshManagedBundles);
boolean noRefresh = request.options.contains(Constants.Option.NoAutoRefreshBundles);
boolean noStart = request.options.contains(Constants.Option.NoAutoStartBundles);
boolean verbose = request.options.contains(Constants.Option.Verbose);
boolean silent = request.options.contains(Constants.Option.Silent);
boolean simulate = request.options.contains(Constants.Option.Simulate);
boolean noManageBundles = request.options.contains(Constants.Option.NoAutoManageBundles);
int display = silent ? 0 : verbose ? DISPLAY_LOG | DISPLAY_STDOUT : DISPLAY_LOG;
// TODO: add an option to unmanage bundles instead of uninstalling those
Map<String, Set<Long>> managedBundles = copy(dstate.state.managedBundles);
Map<String, Set<Bundle>> unmanagedBundles = apply(diff(dstate.bundlesPerRegion, dstate.state.managedBundles), map(dstate.bundles));
// Resolve
SubsystemResolver resolver = new SubsystemResolver(manager);
resolver.prepare(dstate.features.values(), request.requirements, apply(unmanagedBundles, adapt(BundleRevision.class)));
Set<String> prereqs = resolver.collectPrerequisites();
if (!prereqs.isEmpty()) {
for (Iterator<String> iterator = prereqs.iterator(); iterator.hasNext(); ) {
String prereq = iterator.next();
String[] parts = prereq.split("/");
VersionRange range;
if (parts[1].equals("0.0.0")) {
range = VersionRange.ANY_VERSION;
} else if (!parts[1].startsWith("[") && !parts[1].startsWith("(")) {
range = new VersionRange(Macro.transform(request.featureResolutionRange, parts[1]));
} else {
range = new VersionRange(parts[1]);
}
boolean found = false;
for (Set<String> featureSet : dstate.state.installedFeatures.values()) {
for (String feature : featureSet) {
String[] p = feature.split("/");
found = parts[0].equals(p[0]) && range.contains(VersionTable.getVersion(p[1]));
if (found)
break;
}
if (found)
break;
}
if (found) {
iterator.remove();
}
}
}
if (!prereqs.isEmpty()) {
DeploymentRequest newRequest = new DeploymentRequest();
newRequest.bundleUpdateRange = request.bundleUpdateRange;
newRequest.featureResolutionRange = request.featureResolutionRange;
newRequest.globalRepository = request.globalRepository;
newRequest.options = request.options;
newRequest.overrides = request.overrides;
newRequest.requirements = copy(dstate.state.requirements);
for (String prereq : prereqs) {
addToMapSet(newRequest.requirements, Constants.ROOT_REGION, prereq);
}
newRequest.stateChanges = Collections.emptyMap();
newRequest.updateSnaphots = request.updateSnaphots;
deploy(dstate, newRequest);
throw new PartialDeploymentException(prereqs);
}
callback.phase("resolving");
resolver.resolve(new MetadataBuilder(request.metadata), request.overrides, request.featureResolutionRange, request.globalRepository);
Map<String, StreamProvider> providers = resolver.getProviders();
Map<String, Set<Resource>> featuresPerRegion = resolver.getFeaturesPerRegions();
Map<String, Set<String>> installedFeatures = apply(featuresPerRegion, featureId());
Map<String, Set<String>> newFeatures = diff(installedFeatures, dstate.state.installedFeatures);
Map<String, Set<String>> delFeatures = diff(dstate.state.installedFeatures, installedFeatures);
//
// Compute requested features state
//
Map<String, Map<String, String>> stateFeatures = copy(dstate.state.stateFeatures);
for (Map.Entry<String, Set<String>> entry : delFeatures.entrySet()) {
Map<String, String> map = stateFeatures.get(entry.getKey());
if (map != null) {
map.keySet().removeAll(entry.getValue());
if (map.isEmpty()) {
stateFeatures.remove(entry.getKey());
}
}
}
for (Map.Entry<String, Map<String, Constants.RequestedState>> entry1 : request.stateChanges.entrySet()) {
String region = entry1.getKey();
Map<String, String> regionStates = stateFeatures.get(region);
if (regionStates != null) {
for (Map.Entry<String, Constants.RequestedState> entry2 : entry1.getValue().entrySet()) {
String feature = entry2.getKey();
if (regionStates.containsKey(feature)) {
regionStates.put(feature, entry2.getValue().name());
}
}
}
}
for (Map.Entry<String, Set<String>> entry : newFeatures.entrySet()) {
for (String feature : entry.getValue()) {
Map<String, String> map = stateFeatures.get(entry.getKey());
if (map == null) {
map = new HashMap<>();
stateFeatures.put(entry.getKey(), map);
}
map.put(feature, noStart ? Constants.RequestedState.Installed.name() : Constants.RequestedState.Started.name());
}
}
// Compute information for each bundle
Map<String, Map<String, BundleInfo>> bundleInfos = resolver.getBundleInfos();
//
// Compute deployment
//
Deployer.Deployment deployment = computeDeployment(dstate, request, resolver);
//
// Compute the set of bundles to refresh
//
// sort is only used for display
Map<Bundle, String> toRefresh = new TreeMap<>(new BundleComparator());
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
for (Bundle b : regionDeployment.toDelete) {
toRefresh.put(b, "Bundle will be uninstalled");
}
for (Bundle b : regionDeployment.toUpdate.keySet()) {
toRefresh.put(b, "Bundle will be updated");
}
}
if (!noRefreshManaged) {
computeBundlesToRefresh(toRefresh, dstate.bundles.values(), deployment.resToBnd, resolver.getWiring());
}
if (noRefreshUnmanaged) {
toRefresh.keySet().removeAll(flatten(unmanagedBundles));
}
// Automatically turn unmanaged bundles into managed bundles
// if they are required by a feature and no other unmanaged
// bundles have a requirement on it
// sort is only used for display
Set<Bundle> toManage = new TreeSet<>(new BundleComparator());
if (!noManageBundles) {
Set<Resource> features = resolver.getFeatures().keySet();
Set<? extends Resource> unmanaged = apply(flatten(unmanagedBundles), adapt(BundleRevision.class));
Set<Resource> requested = new HashSet<>();
// Gather bundles required by a feature
if (resolver.getWiring() != null) {
for (List<Wire> wires : resolver.getWiring().values()) {
for (Wire wire : wires) {
if (features.contains(wire.getRequirer()) && unmanaged.contains(wire.getProvider())) {
requested.add(wire.getProvider());
}
}
}
}
// Now, we know which bundles are completely unmanaged
unmanaged.removeAll(requested);
// Check if bundles have wires from really unmanaged bundles
if (resolver.getWiring() != null) {
for (List<Wire> wires : resolver.getWiring().values()) {
for (Wire wire : wires) {
if (requested.contains(wire.getProvider()) && unmanaged.contains(wire.getRequirer())) {
requested.remove(wire.getProvider());
}
}
}
}
if (!requested.isEmpty()) {
Map<Long, String> bundleToRegion = new HashMap<>();
for (Map.Entry<String, Set<Long>> entry : dstate.bundlesPerRegion.entrySet()) {
for (long id : entry.getValue()) {
bundleToRegion.put(id, entry.getKey());
}
}
for (Resource rev : requested) {
Bundle bundle = ((BundleRevision) rev).getBundle();
long id = bundle.getBundleId();
addToMapSet(managedBundles, bundleToRegion.get(id), id);
toManage.add(bundle);
}
}
}
Set<Bundle> toStart = new HashSet<>();
Set<Bundle> toResolve = new HashSet<>();
Set<Bundle> toStop = new HashSet<>();
//
// Compute bundle states
//
Map<Resource, Constants.RequestedState> states = new HashMap<>();
// Find all features state
Map<Resource, Constants.RequestedState> featuresState = new HashMap<>();
Map<Resource, Set<Resource>> conditionals = new HashMap<>();
if (resolver.getFeaturesPerRegions() != null) {
for (Map.Entry<String, Set<Resource>> entry : resolver.getFeaturesPerRegions().entrySet()) {
String region = entry.getKey();
Map<String, String> fss = stateFeatures.get(region);
for (Resource feature : entry.getValue()) {
Set<Resource> conditions = new HashSet<>();
for (Wire wire : resolver.getWiring().get(feature)) {
if (IDENTITY_NAMESPACE.equals(wire.getRequirement().getNamespace()) && FeatureResource.CONDITIONAL_TRUE.equals(wire.getRequirement().getDirectives().get(FeatureResource.REQUIREMENT_CONDITIONAL_DIRECTIVE))) {
conditions.add(wire.getProvider());
}
}
if (conditions.isEmpty()) {
String fs = fss.get(getFeatureId(feature));
featuresState.put(feature, Constants.RequestedState.valueOf(fs));
} else {
conditionals.put(feature, conditions);
}
}
}
}
// Compute conditional features state
for (Resource feature : conditionals.keySet()) {
Constants.RequestedState state = null;
for (Resource cond : conditionals.get(feature)) {
Constants.RequestedState s = featuresState.get(cond);
if (state == null) {
state = s;
} else if (state == Constants.RequestedState.Started && s == Constants.RequestedState.Resolved) {
state = Constants.RequestedState.Resolved;
}
}
featuresState.put(feature, state);
}
// Propagate Resolved state
for (Resource feature : featuresState.keySet()) {
if (featuresState.get(feature) == Constants.RequestedState.Resolved) {
propagateState(states, feature, Constants.RequestedState.Resolved, resolver);
}
}
// Propagate Started state
for (Resource feature : featuresState.keySet()) {
if (featuresState.get(feature) == Constants.RequestedState.Started) {
propagateState(states, feature, Constants.RequestedState.Started, resolver);
}
}
// Put default Started state for other bundles
for (Resource resource : resolver.getBundles().keySet()) {
if (!states.containsKey(resource)) {
states.put(resource, Constants.RequestedState.Started);
}
}
// Override explicit disabled state
for (Resource resource : featuresState.keySet()) {
if (resource instanceof FeatureResource) {
FeatureResource featureResource = (FeatureResource) resource;
List<BundleInfo> bundles = featureResource.getFeature().getBundles();
for (BundleInfo bundleInfo : bundles) {
if (bundleInfo.isStart() == false) {
Set<String> candidates = new HashSet<>();
String fullLocation = bundleInfo.getLocation();
int protocolMarker = fullLocation.lastIndexOf(":");
fullLocation = fullLocation.substring(protocolMarker + 1);
String[] split = fullLocation.split("/");
if (split.length >= 3) {
String fullLocationKey = split[0] + "." + split[1] + "/" + split[2];
candidates.add(fullLocationKey);
}
String location = bundleInfo.getLocation();
Parser parser = new Parser(location);
String id = parser.getArtifact();
String version = parser.getVersion();
String key = id + "/" + version;
String keyDotted = id + "/" + version.replaceAll("_", ".");
String prefix = parser.getGroup();
if (parser.getGroup().contains(":")) {
prefix = parser.getGroup().split(":")[1];
}
String fullKey = prefix + "." + key;
String fullKeyDotted = prefix + "." + keyDotted;
candidates.add(key);
candidates.add(keyDotted);
candidates.add(fullKey);
candidates.add(fullKeyDotted);
for (Iterator<Resource> iter = states.keySet().iterator(); iter.hasNext(); ) {
Resource res = iter.next();
String resourceStringRepresentation = res.toString();
if (candidates.contains(resourceStringRepresentation)) {
states.put(res, Constants.RequestedState.Installed);
break;
}
}
}
}
}
}
// Only keep bundles resources
states.keySet().retainAll(resolver.getBundles().keySet());
//
for (Map.Entry<Resource, Constants.RequestedState> entry : states.entrySet()) {
Bundle bundle = deployment.resToBnd.get(entry.getKey());
if (bundle != null) {
switch(entry.getValue()) {
case Started:
toResolve.add(bundle);
toStart.add(bundle);
break;
case Resolved:
toResolve.add(bundle);
toStop.add(bundle);
break;
}
}
}
//
// Compute bundle all start levels and start levels to update
//
Map<Resource, Integer> startLevels = new HashMap<>();
final Map<Bundle, Integer> toUpdateStartLevel = new HashMap<>();
for (Map.Entry<String, Set<Resource>> entry : resolver.getBundlesPerRegions().entrySet()) {
String region = entry.getKey();
for (Resource resource : entry.getValue()) {
BundleInfo bi = bundleInfos.get(region).get(getUri(resource));
if (bi != null) {
int sl = bi.getStartLevel() > 0 ? bi.getStartLevel() : dstate.initialBundleStartLevel;
startLevels.put(resource, sl);
Bundle bundle = deployment.resToBnd.get(resource);
if (bundle != null) {
int curSl = bundle.adapt(BundleStartLevel.class).getStartLevel();
if (sl != curSl) {
toUpdateStartLevel.put(bundle, sl);
if (sl > dstate.currentStartLevel) {
toStop.add(bundle);
}
}
}
}
}
}
//
// Log deployment
//
logDeployment(deployment, display);
if (!noRefresh && !toRefresh.isEmpty()) {
print(" Bundles to refresh:", display);
for (Map.Entry<Bundle, String> entry : toRefresh.entrySet()) {
Bundle bundle = entry.getKey();
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion() + " (" + entry.getValue() + ")", display);
}
}
if (!toManage.isEmpty()) {
print(" Managing bundle:", display);
for (Bundle bundle : toManage) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
}
}
if (simulate) {
return;
}
callback.phase("installing");
//
// Execute deployment
//
// #1: stop bundles that needs to be updated or uninstalled or refreshed in order
// #2: uninstall needed bundles
// #3: update regions
// #4: update bundles
// #5: install bundles
// #6: save state
// #7: install configuration
// #8: refresh bundles
// #9: start bundles in order
// #10: send events
//
//
// Handle updates on the agent bundle
//
Deployer.RegionDeployment rootRegionDeployment = deployment.regions.get(Constants.ROOT_REGION);
// We don't support uninstalling the bundle
if (rootRegionDeployment != null && rootRegionDeployment.toDelete.contains(dstate.serviceBundle)) {
throw new UnsupportedOperationException("Uninstalling the agent bundle is not supported");
}
// Ensure all classes are loaded if the agent will be refreshed
if (toRefresh.containsKey(dstate.serviceBundle)) {
OsgiUtils.ensureAllClassesLoaded(dstate.serviceBundle);
}
// When restarting, the resolution will be attempted again
if (rootRegionDeployment != null && rootRegionDeployment.toUpdate.containsKey(dstate.serviceBundle)) {
callback.phase("updating agent");
callback.persistResolveRequest(request);
// save the new checksum persistently
if (deployment.bundleChecksums.containsKey(dstate.serviceBundle.getBundleId())) {
State state = dstate.state.copy();
state.bundleChecksums.put(dstate.serviceBundle.getBundleId(), deployment.bundleChecksums.get(dstate.serviceBundle.getBundleId()));
callback.saveState(state);
}
Resource resource = rootRegionDeployment.toUpdate.get(dstate.serviceBundle);
String uri = getUri(resource);
print("The agent bundle needs is being updated with " + uri, display);
toRefresh.clear();
toRefresh.put(dstate.serviceBundle, "DeploymentAgent bundle is being updated");
computeBundlesToRefresh(toRefresh, dstate.bundles.values(), Collections.<Resource, Bundle>emptyMap(), Collections.<Resource, List<Wire>>emptyMap());
callback.stopBundle(dstate.serviceBundle, STOP_TRANSIENT);
try (InputStream is = getBundleInputStream(resource, providers)) {
callback.updateBundle(dstate.serviceBundle, uri, is);
}
callback.refreshPackages(toRefresh.keySet());
callback.startBundle(dstate.serviceBundle);
return;
}
//
if (!newFeatures.isEmpty()) {
Set<Feature> set = apply(flatten(newFeatures), map(dstate.features));
for (Feature feature : set) {
Downloader downloader = manager.createDownloader();
for (ConfigFile configFile : feature.getConfigurationFiles()) {
downloader.download(configFile.getLocation(), null);
}
downloader.await();
}
}
//
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
toStop.addAll(regionDeployment.toUpdate.keySet());
toStop.addAll(regionDeployment.toDelete);
}
if (!noRefresh) {
Set<Bundle> toRefreshToStopEarly = new HashSet<>(toRefresh.keySet());
toRefreshToStopEarly.remove(dstate.serviceBundle);
toStop.addAll(toRefreshToStopEarly);
toStart.addAll(toRefreshToStopEarly);
}
removeFragmentsAndBundlesInState(toStop, UNINSTALLED | RESOLVED | STOPPING);
if (!toStop.isEmpty()) {
callback.phase("updating (stopping bundles)");
print("Stopping bundles:", display);
while (!toStop.isEmpty()) {
List<Bundle> bs = getBundlesToStop(toStop);
for (final Bundle bundle : bs) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
LOGGER.info("Scheduled stop for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
// If the bundle start level will be changed, stop it persistently to
// avoid a restart when the start level is actually changed
callback.stopBundle(bundle, toUpdateStartLevel.containsKey(bundle) ? 0 : STOP_TRANSIENT);
} catch (BundleException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
return null;
}
}), request.bundleStartTimeout, TimeUnit.SECONDS);
// synch on Future's output, limited by the TimeUnit above
for (Future<Void> f : futures) {
try {
f.get();
} catch (CancellationException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
}
toStop.remove(bundle);
}
}
}
//
// Delete bundles
//
boolean hasToDelete = false;
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
if (hasToDelete = !regionDeployment.toDelete.isEmpty()) {
break;
}
}
if (hasToDelete) {
callback.phase("updating (uninstalling bundles)");
print("Uninstalling bundles:", display);
for (Map.Entry<String, RegionDeployment> entry : deployment.regions.entrySet()) {
String name = entry.getKey();
Deployer.RegionDeployment regionDeployment = entry.getValue();
for (Bundle bundle : regionDeployment.toDelete) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
callback.uninstall(bundle);
removeFromMapSet(managedBundles, name, bundle.getBundleId());
}
}
}
//
// Update regions
//
{
// Add bundles
Map<String, Set<Long>> bundles = new HashMap<>();
add(bundles, apply(unmanagedBundles, bundleId()));
add(bundles, managedBundles);
// Compute policies
RegionDigraph computedDigraph = resolver.getFlatDigraph();
Map<String, Map<String, Map<String, Set<String>>>> policies = copy(dstate.filtersPerRegion);
// Only keep regions which still have bundles
policies.keySet().retainAll(bundles.keySet());
// Fix broken filters
for (String name : policies.keySet()) {
policies.get(name).keySet().retainAll(policies.keySet());
}
// Update managed regions
for (Region computedRegion : computedDigraph.getRegions()) {
String name = computedRegion.getName();
Map<String, Map<String, Set<String>>> policy = policies.get(name);
if (policy == null) {
policy = new HashMap<>();
policies.put(name, policy);
}
for (RegionDigraph.FilteredRegion fr : computedRegion.getEdges()) {
String r2 = fr.getRegion().getName();
Map<String, Set<String>> filters = new HashMap<>();
Map<String, Collection<String>> current = fr.getFilter().getSharingPolicy();
for (String ns : current.keySet()) {
for (String f : current.get(ns)) {
addToMapSet(filters, ns, f);
}
}
policy.put(r2, filters);
}
}
// Apply all changes
callback.replaceDigraph(policies, bundles);
}
//
// Update bundles
//
boolean hasToUpdate = false;
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
if (hasToUpdate = !regionDeployment.toUpdate.isEmpty()) {
break;
}
}
if (hasToUpdate) {
callback.phase("updating (updating bundles)");
print("Updating bundles:", display);
for (Map.Entry<String, RegionDeployment> rde : deployment.regions.entrySet()) {
for (Map.Entry<Bundle, Resource> entry : rde.getValue().toUpdate.entrySet()) {
Bundle bundle = entry.getKey();
Resource resource = entry.getValue();
String uri = getUri(resource);
print(" " + uri, display);
try (InputStream is = getBundleInputStream(resource, providers)) {
callback.updateBundle(bundle, uri, is);
}
toStart.add(bundle);
}
}
}
//
for (Map.Entry<Bundle, Integer> entry : toUpdateStartLevel.entrySet()) {
Bundle bundle = entry.getKey();
int sl = entry.getValue();
callback.setBundleStartLevel(bundle, sl);
}
//
// Install bundles
//
boolean hasToInstall = false;
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
if (hasToInstall = !regionDeployment.toInstall.isEmpty()) {
break;
}
}
if (hasToInstall) {
callback.phase("updating (installing bundles)");
print("Installing bundles:", display);
for (Map.Entry<String, RegionDeployment> entry : deployment.regions.entrySet()) {
String name = entry.getKey();
Deployer.RegionDeployment regionDeployment = entry.getValue();
for (Resource resource : regionDeployment.toInstall) {
String uri = getUri(resource);
print(" " + uri, display);
Bundle bundle;
long crc;
try (ChecksumUtils.CRCInputStream is = new ChecksumUtils.CRCInputStream(getBundleInputStream(resource, providers))) {
bundle = callback.installBundle(name, uri, is);
// calculate CRC normally
crc = is.getCRC();
try {
URI resourceURI = new URI(uri);
if ("blueprint".equals(resourceURI.getScheme())) {
// ENTESB-6957 calculate proper blueprint file CRC during installation
InputStream bis = getBlueprintInputStream(getBundleInputStream(resource, providers));
// original stream is closed in either case
if (bis != null) {
crc = ChecksumUtils.checksum(bis);
}
}
} catch (URISyntaxException ignored) {
}
}
addToMapSet(managedBundles, name, bundle.getBundleId());
deployment.resToBnd.put(resource, bundle);
// save a checksum of installed snapshot bundle
if (Constants.UPDATE_SNAPSHOTS_CRC.equals(request.updateSnaphots) && isUpdateable(resource) && !deployment.bundleChecksums.containsKey(bundle.getBundleId())) {
deployment.bundleChecksums.put(bundle.getBundleId(), crc);
}
if (startLevels.containsKey(resource)) {
int startLevel = startLevels.get(resource);
if (startLevel != dstate.initialBundleStartLevel) {
callback.setBundleStartLevel(bundle, startLevel);
}
}
Constants.RequestedState reqState = states.get(resource);
if (reqState == null) {
reqState = Constants.RequestedState.Started;
}
switch(reqState) {
case Started:
toResolve.add(bundle);
toStart.add(bundle);
break;
case Resolved:
toResolve.add(bundle);
break;
}
}
}
}
//
// Update and save state
//
State newState = new State();
newState.bundleChecksums.putAll(deployment.bundleChecksums);
newState.requirements.putAll(request.requirements);
newState.installedFeatures.putAll(installedFeatures);
newState.stateFeatures.putAll(stateFeatures);
newState.managedBundles.putAll(managedBundles);
callback.saveState(newState);
//
if (!newFeatures.isEmpty()) {
// check if configadmin is started
callback.phase("updating (installing configurations)");
Set<Feature> set = apply(flatten(newFeatures), map(dstate.features));
callback.restoreConfigAdminIfNeeded();
for (Feature feature : set) {
callback.installFeatureConfigs(feature);
}
}
callback.phase("finalizing");
if (!noRefresh) {
toStop = new HashSet<>();
toStop.addAll(toRefresh.keySet());
removeFragmentsAndBundlesInState(toStop, UNINSTALLED | RESOLVED | STOPPING);
if (!toStop.isEmpty()) {
callback.phase("finalizing (stopping bundles)");
print("Stopping bundles:", display);
while (!toStop.isEmpty()) {
List<Bundle> bs = getBundlesToStop(toStop);
for (final Bundle bundle : bs) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
LOGGER.info("Scheduled stop for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
callback.stopBundle(bundle, STOP_TRANSIENT);
} catch (BundleException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
return null;
}
}), request.bundleStartTimeout, TimeUnit.SECONDS);
// synch on Future's output, limited by the TimeUnit above
for (Future<Void> f : futures) {
try {
f.get();
} catch (CancellationException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
}
toStop.remove(bundle);
toStart.add(bundle);
}
}
}
if (!toRefresh.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(" Bundle refresh explanation:");
for (Map.Entry entry : toRefresh.entrySet()) {
LOGGER.debug("{} is refreshed because of [{}]", entry.getKey(), entry.getValue());
}
}
callback.phase("finalizing (refreshing bundles)");
print("Refreshing bundles:", display);
for (Map.Entry<Bundle, String> entry : toRefresh.entrySet()) {
Bundle bundle = entry.getKey();
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion() + " (" + entry.getValue() + ")", display);
}
if (!toRefresh.isEmpty()) {
callback.refreshPackages(toRefresh.keySet());
}
}
}
// Resolve bundles
callback.phase("finalizing (resolving bundles)");
toResolve.addAll(toStart);
toResolve.addAll(toRefresh.keySet());
removeBundlesInState(toResolve, UNINSTALLED);
callback.resolveBundles(toResolve, resolver.getWiring(), deployment.resToBnd);
final boolean[] agentStarted = new boolean[] { false };
// Compute bundles to start
removeFragmentsAndBundlesInState(toStart, UNINSTALLED | ACTIVE | STARTING);
if (!toStart.isEmpty()) {
// Compute correct start order
final List<Throwable> exceptions = new ArrayList<>();
callback.phase("finalizing (starting bundles)");
print("Starting bundles:", display);
while (!toStart.isEmpty()) {
List<Bundle> bs = getBundlesToStart(toStart, dstate.serviceBundle);
for (final Bundle bundle : bs) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
if ("io.fabric8.fabric-agent".equals(bundle.getSymbolicName())) {
agentStarted[0] = true;
}
List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
LOGGER.info("Scheduled start for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
callback.startBundle(bundle);
} catch (BundleException e) {
exceptions.add(e);
}
return null;
}
}), request.bundleStartTimeout, TimeUnit.SECONDS);
// synch on Future's output, limited by the TimeUnit above
for (Future<Void> f : futures) {
try {
f.get();
} catch (CancellationException e) {
exceptions.add(new BundleException("Unable to start bundle [" + bundle.getSymbolicName() + "] within " + request.bundleStartTimeout + " seconds"));
}
}
toStart.remove(bundle);
}
}
deploymentsExecutor.shutdown();
if (!exceptions.isEmpty()) {
throw new MultiException("Error restarting bundles", exceptions);
}
}
// Info about final list of deployed bundles
callback.provisionList(deployment.resToBnd.keySet());
// list of bundles in special "fabric-startup" feature
List<String> urls = new LinkedList<>();
for (Feature ft : dstate.features.values()) {
if (ft.getName().equals("fabric-startup") && ft.getBundles() != null) {
for (BundleInfo bi : ft.getBundles()) {
urls.add(bi.getLocation());
}
}
// special case for Fuse/AMQ...
if (ft.getName().equals("esb-commands-startup") && ft.getBundles() != null) {
for (BundleInfo bi : ft.getBundles()) {
urls.add(bi.getLocation());
}
}
}
// let's resolve these URIs and make them available from ${karaf.default.repository}
try {
LOGGER.info("Storing startup artifacts in default repository: {}", urls);
AgentUtils.downloadLocations(manager, urls, true);
} catch (Exception e) {
LOGGER.warn(e.getMessage(), e);
}
if (!noRefresh) {
// (similar to KARAF-4686/ENTESB-6045 - Cleaning sun.rmi.transport.tcp.TCPEndpointlocalEndpoints cache)
try {
Class<?> cls = getClass().getClassLoader().loadClass("org.ops4j.pax.logging.slf4j.Slf4jMDCAdapter");
Field m_contextField = cls.getDeclaredField("m_context");
m_contextField.setAccessible(true);
// nullify org.ops4j.pax.logging.slf4j.Slf4jMDCAdapter.m_context, so it'll be reinitialized from
// fresh pax-logging-service wiring
m_contextField.set(null, null);
} catch (Exception ignored) {
}
}
if (callback.done(agentStarted[0], urls)) {
print("Done.", display);
}
}
use of io.fabric8.agent.download.StreamProvider in project fabric8 by jboss-fuse.
the class AgentUtils method storeInDefaultKarafRepository.
/**
* Tries to store resource resolved by some {@link StreamProvider} into karaf default repository
* @param finalTargetLocation
* @param file
* @param uri
*/
private static void storeInDefaultKarafRepository(File finalTargetLocation, File file, String uri) {
if (finalTargetLocation != null && file != null && file.isFile()) {
try {
String path = Utils.mvnurlToPath(uri);
if (path != null) {
File target = new File(finalTargetLocation, path);
if (!target.isFile()) {
LOGGER.info("Copying resolved {} to {}", file, finalTargetLocation);
target.getParentFile().mkdirs();
Files.copy(file, target);
}
} else {
LOGGER.warn("Can't resolve Maven URI {} to path", uri);
}
} catch (Exception e) {
LOGGER.warn(e.getMessage(), e);
}
}
}
use of io.fabric8.agent.download.StreamProvider in project fabric8 by jboss-fuse.
the class AgentUtils method downloadLocations.
public static Map<String, File> downloadLocations(DownloadManager manager, Collection<String> uris, final boolean storeInDefaultKarafRepository) throws MultiException, InterruptedException, MalformedURLException {
final Map<String, File> files = new HashMap<>();
final Downloader downloader = manager.createDownloader();
final File targetLocation = storeInDefaultKarafRepository ? getDefaultKarafRepository() : null;
for (String uri : uris) {
downloader.download(uri, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
String uri = provider.getUrl();
File file = provider.getFile();
synchronized (files) {
files.put(uri, file);
if (storeInDefaultKarafRepository) {
storeInDefaultKarafRepository(targetLocation, file, uri);
}
}
}
});
}
downloader.await();
return files;
}
use of io.fabric8.agent.download.StreamProvider in project fabric8 by jboss-fuse.
the class DeploymentAgent method doUpdate.
public boolean doUpdate(Dictionary<String, ?> props) throws Exception {
if (props == null || Boolean.parseBoolean((String) props.get("disabled"))) {
return false;
}
final Hashtable<String, String> properties = new Hashtable<>();
for (Enumeration e = props.keys(); e.hasMoreElements(); ) {
Object key = e.nextElement();
Object val = props.get(key);
if (!"service.pid".equals(key) && !FeatureConfigInstaller.FABRIC_ZOOKEEPER_PID.equals(key)) {
properties.put(key.toString(), val.toString());
}
}
updateStatus("analyzing", null);
// Building configuration
curatorCompleteService.waitForService(TimeUnit.SECONDS.toMillis(30));
String httpUrl;
List<URI> mavenRepoURIs;
// force reading of updated informations from ZK
if (!fabricService.isEmpty()) {
updateMavenRepositoryConfiguration(fabricService.getService());
}
try {
fabricServiceOperations.lock();
// no one will change the members now
httpUrl = this.httpUrl;
mavenRepoURIs = this.mavenRepoURIs;
} finally {
fabricServiceOperations.unlock();
}
addMavenProxies(properties, httpUrl, mavenRepoURIs);
final MavenResolver resolver = MavenResolvers.createMavenResolver(properties, "org.ops4j.pax.url.mvn");
final DownloadManager manager = DownloadManagers.createDownloadManager(resolver, getDownloadExecutor());
manager.addListener(new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
int pending = manager.pending();
updateStatus(pending > 0 ? "downloading (" + pending + " pending)" : "downloading", null);
}
});
// Update framework, libs, system and config props
final Object lock = new Object();
final AtomicBoolean restart = new AtomicBoolean();
final Set<String> libsToRemove = new HashSet<>(managedLibs.keySet());
final Set<String> endorsedLibsToRemove = new HashSet<>(managedEndorsedLibs.keySet());
final Set<String> extensionLibsToRemove = new HashSet<>(managedExtensionLibs.keySet());
final Set<String> sysPropsToRemove = new HashSet<>(managedSysProps.keySet());
final Set<String> configPropsToRemove = new HashSet<>(managedConfigProps.keySet());
final Set<String> etcsToRemove = new HashSet<>(managedEtcs.keySet());
final Properties configProps = new Properties(new File(KARAF_BASE + File.separator + "etc" + File.separator + "config.properties"));
final Properties systemProps = new Properties(new File(KARAF_BASE + File.separator + "etc" + File.separator + "system.properties"));
Downloader downloader = manager.createDownloader();
for (String key : properties.keySet()) {
if (key.equals("framework")) {
String url = properties.get(key);
if (!url.startsWith("mvn:")) {
throw new IllegalArgumentException("Framework url must use the mvn: protocol");
}
downloader.download(url, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File file = provider.getFile();
String path = file.getPath();
if (path.startsWith(KARAF_HOME)) {
path = path.substring(KARAF_HOME.length() + 1);
}
synchronized (lock) {
if (!path.equals(configProps.get("karaf.framework.felix"))) {
configProps.put("karaf.framework", "felix");
configProps.put("karaf.framework.felix", path);
restart.set(true);
}
}
}
});
} else if (key.startsWith("config.")) {
String k = key.substring("config.".length());
String v = properties.get(key);
synchronized (lock) {
managedConfigProps.put(k, v);
configPropsToRemove.remove(k);
if (!v.equals(configProps.get(k))) {
configProps.put(k, v);
restart.set(true);
}
}
} else if (key.startsWith("system.")) {
String k = key.substring("system.".length());
synchronized (lock) {
String v = properties.get(key);
managedSysProps.put(k, v);
sysPropsToRemove.remove(k);
if (!v.equals(systemProps.get(k))) {
systemProps.put(k, v);
restart.set(true);
}
}
} else if (key.startsWith("lib.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File libFile = provider.getFile();
String libName = libFile.getName();
Long checksum = ChecksumUtils.checksum(libFile);
boolean update;
synchronized (lock) {
managedLibs.put(libName, "true");
libsToRemove.remove(libName);
update = !Long.toString(checksum).equals(libChecksums.getProperty(libName));
}
if (update) {
Files.copy(libFile, new File(LIB_PATH, libName));
restart.set(true);
}
}
});
} else if (key.startsWith("endorsed.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File libFile = provider.getFile();
String libName = libFile.getName();
Long checksum = ChecksumUtils.checksum(new FileInputStream(libFile));
boolean update;
synchronized (lock) {
managedEndorsedLibs.put(libName, "true");
endorsedLibsToRemove.remove(libName);
update = !Long.toString(checksum).equals(endorsedChecksums.getProperty(libName));
}
if (update) {
Files.copy(libFile, new File(LIB_ENDORSED_PATH, libName));
restart.set(true);
}
}
});
} else if (key.startsWith("extension.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File libFile = provider.getFile();
String libName = libFile.getName();
Long checksum = ChecksumUtils.checksum(libFile);
boolean update;
synchronized (lock) {
managedExtensionLibs.put(libName, "true");
extensionLibsToRemove.remove(libName);
update = !Long.toString(checksum).equals(extensionChecksums.getProperty(libName));
}
if (update) {
Files.copy(libFile, new File(LIB_EXT_PATH, libName));
restart.set(true);
}
}
});
} else if (key.startsWith("etc.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File etcFile = provider.getFile();
String etcName = etcFile.getName();
Long checksum = ChecksumUtils.checksum(new FileInputStream(etcFile));
boolean update;
synchronized (lock) {
managedEtcs.put(etcName, "true");
etcsToRemove.remove(etcName);
update = !Long.toString(checksum).equals(etcChecksums.getProperty(etcName));
}
if (update) {
Files.copy(etcFile, new File(KARAF_ETC, etcName));
}
}
});
}
}
downloader.await();
// Remove unused libs, system & config properties
for (String sysProp : sysPropsToRemove) {
systemProps.remove(sysProp);
managedSysProps.remove(sysProp);
System.clearProperty(sysProp);
restart.set(true);
}
for (String configProp : configPropsToRemove) {
configProps.remove(configProp);
managedConfigProps.remove(configProp);
restart.set(true);
}
for (String lib : libsToRemove) {
File libFile = new File(LIB_PATH, lib);
libFile.delete();
libChecksums.remove(lib);
managedLibs.remove(lib);
restart.set(true);
}
for (String lib : endorsedLibsToRemove) {
File libFile = new File(LIB_ENDORSED_PATH, lib);
libFile.delete();
endorsedChecksums.remove(lib);
managedEndorsedLibs.remove(lib);
restart.set(true);
}
for (String lib : extensionLibsToRemove) {
File libFile = new File(LIB_EXT_PATH, lib);
libFile.delete();
extensionChecksums.remove(lib);
managedExtensionLibs.remove(lib);
restart.set(true);
}
for (String etc : etcsToRemove) {
File etcFile = new File(KARAF_ETC, etc);
etcFile.delete();
etcChecksums.remove(etc);
managedEtcs.remove(etc);
}
libChecksums.save();
endorsedChecksums.save();
extensionChecksums.save();
etcChecksums.save();
managedLibs.save();
managedEndorsedLibs.save();
managedExtensionLibs.save();
managedConfigProps.save();
managedSysProps.save();
managedEtcs.save();
if (restart.get()) {
updateStatus("restarting", null);
configProps.save();
systemProps.save();
System.setProperty("karaf.restart", "true");
bundleContext.getBundle(0).stop();
return false;
}
FeatureConfigInstaller configInstaller = null;
ServiceReference configAdminServiceReference = bundleContext.getServiceReference(ConfigurationAdmin.class.getName());
if (configAdminServiceReference != null) {
ConfigurationAdmin configAdmin = (ConfigurationAdmin) bundleContext.getService(configAdminServiceReference);
configInstaller = new FeatureConfigInstaller(bundleContext, configAdmin, manager);
}
int bundleStartTimeout = Constants.BUNDLE_START_TIMEOUT;
String overriddenTimeout = properties.get(Constants.BUNDLE_START_TIMEOUT_PID_KEY);
try {
if (overriddenTimeout != null)
bundleStartTimeout = Integer.parseInt(overriddenTimeout);
} catch (Exception e) {
LOGGER.warn("Failed to set {} value: [{}], applying default value: {}", Constants.BUNDLE_START_TIMEOUT_PID_KEY, overriddenTimeout, Constants.BUNDLE_START_TIMEOUT);
}
Agent agent = new Agent(bundleContext.getBundle(), systemBundleContext, manager, configInstaller, null, DEFAULT_FEATURE_RESOLUTION_RANGE, DEFAULT_BUNDLE_UPDATE_RANGE, DEFAULT_UPDATE_SNAPSHOTS, bundleContext.getDataFile(STATE_FILE), bundleStartTimeout) {
@Override
public void updateStatus(String status) {
DeploymentAgent.this.updateStatus(status, null, false);
}
@Override
public void updateStatus(String status, boolean force) {
DeploymentAgent.this.updateStatus(status, null, force);
}
@Override
protected void saveState(State newState) throws IOException {
super.saveState(newState);
DeploymentAgent.this.state.replace(newState);
}
@Override
protected void provisionList(Set<Resource> resources) {
DeploymentAgent.this.provisionList = resources;
}
@Override
protected boolean done(boolean agentStarted, List<String> urls) {
if (agentStarted) {
// let's do patch-management "last touch" only if new agent wasn't started.
return true;
}
// agent finished provisioning, we can call back to low level patch management
ServiceReference<PatchManagement> srPm = systemBundleContext.getServiceReference(PatchManagement.class);
ServiceReference<FabricService> srFs = systemBundleContext.getServiceReference(FabricService.class);
if (srPm != null && srFs != null) {
PatchManagement pm = systemBundleContext.getService(srPm);
FabricService fs = systemBundleContext.getService(srFs);
if (pm != null && fs != null) {
LOGGER.info("Validating baseline information");
this.updateStatus("validating baseline information", true);
Profile profile = fs.getCurrentContainer().getOverlayProfile();
Map<String, String> versions = profile.getConfiguration("io.fabric8.version");
File localRepository = resolver.getLocalRepository();
if (pm.alignTo(versions, urls, localRepository, new PatchSynchronization())) {
this.updateStatus("requires full restart", true);
// let's reuse the same flag
restart.set(true);
return false;
}
if (handleRestartJvmFlag(profile, restart)) {
return false;
}
}
}
return true;
}
};
agent.setDeploymentAgentId(deploymentAgentId);
agent.provision(getPrefixedProperties(properties, "repository."), getPrefixedProperties(properties, "feature."), getPrefixedProperties(properties, "bundle."), getPrefixedProperties(properties, "req."), getPrefixedProperties(properties, "override."), getPrefixedProperties(properties, "optional."), getMetadata(properties, "metadata#"));
if (restart.get()) {
// prevent updating status to "success"
return false;
}
return true;
}
Aggregations