use of io.fabric8.agent.model.Feature in project fabric8 by jboss-fuse.
the class ServiceImpl method bundleUpdatesInPatch.
/**
* Returns a list of {@link BundleUpdate} for single patch, taking into account already discovered updates
* @param patch
* @param allBundles
* @param bundleUpdateLocations out parameter that gathers update locations for bundles across patches
* @param history
* @param updatesForBundleKeys
* @param kind
* @param coreBundles
* @param featureUpdatesInThisPatch
* @return
* @throws IOException
*/
private List<BundleUpdate> bundleUpdatesInPatch(Patch patch, Bundle[] allBundles, Map<Bundle, String> bundleUpdateLocations, BundleVersionHistory history, Map<String, BundleUpdate> updatesForBundleKeys, PatchKind kind, Map<String, Bundle> coreBundles, List<FeatureUpdate> featureUpdatesInThisPatch) throws Exception {
List<BundleUpdate> updatesInThisPatch = new LinkedList<>();
// for ROLLUP patch we can check which bundles AREN'T updated by this patch - we have to reinstall them
// at the same version as existing one. "no update" means "require install after clearing cache"
// Initially all bundles need update. If we find an update in patch, we remove a key from this map
Map<String, Bundle> updateNotRequired = new LinkedHashMap<>();
// // let's keep {symbolic name -> list of versions} mapping
// MultiMap<String, Version> allBundleVersions = new MultiMap<>();
// bundle location -> bundle key (symbolic name|updateable version)
Map<String, String> locationsOfBundleKeys = new HashMap<>();
for (Bundle b : allBundles) {
if (b.getSymbolicName() == null) {
continue;
}
Version v = b.getVersion();
Version updateableVersion = new Version(v.getMajor(), v.getMinor(), 0);
String key = String.format("%s|%s", stripSymbolicName(b.getSymbolicName()), updateableVersion.toString());
// symbolic name, differing at micro version only
if (!coreBundles.containsKey(stripSymbolicName(b.getSymbolicName()))) {
updateNotRequired.put(key, b);
} else {
// let's key core (etc/startup.properties) bundles by symbolic name only - there should be only
// one version of symbolic name
updateNotRequired.put(stripSymbolicName(b.getSymbolicName()), b);
}
// allBundleVersions.put(stripSymbolicName(b.getSymbolicName()), b.getVersion());
String location = b.getLocation();
if (location != null && location.startsWith("mvn:") && location.contains("//")) {
// special case for mvn:org.ops4j.pax.url/pax-url-wrap/2.4.7//uber
location = location.replace("//", "/jar/");
}
locationsOfBundleKeys.put(location, key);
}
// let's prepare a set of bundle keys that are part of features that will be updated/reinstalled - those
// bundle keys don't have to be reinstalled separately
Set<String> bundleKeysFromFeatures = new HashSet<>();
if (featureUpdatesInThisPatch != null) {
for (FeatureUpdate featureUpdate : featureUpdatesInThisPatch) {
if (featureUpdate.getName() != null) {
// this is either installation or update of single feature
String fName = featureUpdate.getName();
String fVersion = featureUpdate.getPreviousVersion();
Feature f = featuresService.getFeature(fName, fVersion);
for (BundleInfo bundleInfo : f.getBundles()) {
if (/*!bundleInfo.isDependency() && */
locationsOfBundleKeys.containsKey(bundleInfo.getLocation())) {
bundleKeysFromFeatures.add(locationsOfBundleKeys.get(bundleInfo.getLocation()));
}
}
for (Conditional cond : f.getConditional()) {
for (BundleInfo bundleInfo : cond.getBundles()) {
if (/*!bundleInfo.isDependency() && */
locationsOfBundleKeys.containsKey(bundleInfo.getLocation())) {
bundleKeysFromFeatures.add(locationsOfBundleKeys.get(bundleInfo.getLocation()));
}
}
}
}
}
}
for (String newLocation : patch.getPatchData().getBundles()) {
// [symbolicName, version] of the new bundle
String[] symbolicNameVersion = helper.getBundleIdentity(newLocation);
if (symbolicNameVersion == null || symbolicNameVersion[0] == null) {
continue;
}
String sn = stripSymbolicName(symbolicNameVersion[0]);
String vr = symbolicNameVersion[1];
Version newVersion = VersionTable.getVersion(vr);
Version updateableVersion = new Version(newVersion.getMajor(), newVersion.getMinor(), 0);
// this bundle update from a patch may be applied only to relevant bundle|updateable-version, not to
// *every* bundle with exact symbolic name
String key = null;
if (!coreBundles.containsKey(sn)) {
key = String.format("%s|%s", sn, updateableVersion.toString());
} else {
key = sn;
}
// if existing bundle is within this range, update is possible
VersionRange range = getUpdateableRange(patch, newLocation, newVersion);
if (coreBundles.containsKey(sn)) {
// so we lower down the lowest possible version of core bundle that we can update
if (range == null) {
range = new VersionRange(false, Version.emptyVersion, newVersion, true);
} else {
range = new VersionRange(false, Version.emptyVersion, range.getCeiling(), true);
}
} else if (range != null) {
// if range is specified on non core bundle, the key should be different - updateable
// version should be taken from range
key = String.format("%s|%s", sn, range.getFloor().toString());
}
Bundle bundle = updateNotRequired.get(key);
if (bundle == null && coreBundles.containsKey(sn)) {
bundle = updateNotRequired.get(sn);
}
if (bundle == null || range == null) {
// this patch ships a bundle that can't be used as an update for ANY currently installed bundle
if (kind == PatchKind.NON_ROLLUP) {
// which is strange, because non rollup patches should update existing bundles...
if (range == null) {
System.err.printf("Skipping bundle %s - unable to process bundle without a version range configuration%n", newLocation);
} else {
// range is fine, we simply didn't find installed bundle at all - bundle from patch
// will be stored in ${karaf.default.repository}, but not used as an update
}
}
continue;
}
Version oldVersion = bundle.getVersion();
if (range.contains(oldVersion)) {
String oldLocation = history.getLocation(bundle);
if ("org.ops4j.pax.url.mvn".equals(sn)) {
Artifact artifact = Utils.mvnurlToArtifact(newLocation, true);
if (artifact != null) {
URL location = new File(repository, String.format("org/ops4j/pax/url/pax-url-aether/%1$s/pax-url-aether-%1$s.jar", artifact.getVersion())).toURI().toURL();
newLocation = location.toString();
}
}
int startLevel = bundle.adapt(BundleStartLevel.class).getStartLevel();
int state = bundle.getState();
BundleUpdate update = new BundleUpdate(sn, newVersion.toString(), newLocation, oldVersion.toString(), oldLocation, startLevel, state);
if (bundleKeysFromFeatures.contains(key) || coreBundles.containsKey(sn)) {
update.setIndependent(false);
}
updatesInThisPatch.add(update);
updateNotRequired.remove(key);
if (coreBundles.containsKey(sn)) {
updateNotRequired.remove(sn);
}
// Merge result
BundleUpdate oldUpdate = updatesForBundleKeys.get(key);
if (oldUpdate != null) {
Version upv = null;
if (oldUpdate.getNewVersion() != null) {
upv = VersionTable.getVersion(oldUpdate.getNewVersion());
}
if (upv == null || upv.compareTo(newVersion) < 0) {
// other patch contains newer update for a bundle
updatesForBundleKeys.put(key, update);
bundleUpdateLocations.put(bundle, newLocation);
}
} else {
// this is the first update of the bundle
updatesForBundleKeys.put(key, update);
bundleUpdateLocations.put(bundle, newLocation);
}
}
}
if (kind == PatchKind.ROLLUP) {
// user features) and we have (at least try) to install them after restart.
for (Bundle b : updateNotRequired.values()) {
if (b.getSymbolicName() == null) {
continue;
}
String symbolicName = stripSymbolicName(b.getSymbolicName());
Version v = b.getVersion();
Version updateableVersion = new Version(v.getMajor(), v.getMinor(), 0);
String key = String.format("%s|%s", symbolicName, updateableVersion.toString());
int startLevel = b.adapt(BundleStartLevel.class).getStartLevel();
int state = b.getState();
BundleUpdate update = new BundleUpdate(symbolicName, null, null, v.toString(), history.getLocation(b), startLevel, state);
if (bundleKeysFromFeatures.contains(key) || coreBundles.containsKey(symbolicName)) {
// we don't have to install it separately
update.setIndependent(false);
}
updatesInThisPatch.add(update);
updatesForBundleKeys.put(key, update);
}
}
return updatesInThisPatch;
}
use of io.fabric8.agent.model.Feature in project fabric8 by jboss-fuse.
the class ServiceImpl method install.
/**
* <p>Main installation method. Installing a patch in non-fabric mode is a matter of correct merge (cherry-pick, merge,
* rebase) of patch branch into <code>master</code> branch.</p>
* <p>Static changes are handled by git, runtime changes (bundles, features) are handled depending on patch type:<ul>
* <li>Rollup: clear OSGi bundle cache, reinstall features that were installed after restart</li>
* <li>Non-Rollup: update bundles, generate overrides.properties and update scripts to reference new versions</li>
* </ul></p>
* <p>For Rollup patches we don't update bundles - we clear the bundle cache instead.</p>
* @param patches
* @param simulate
* @param synchronous
* @return
*/
private Map<String, PatchResult> install(final Collection<Patch> patches, final boolean simulate, boolean synchronous) {
PatchKind kind = checkConsistency(patches);
checkPrerequisites(patches);
checkStandaloneChild(patches);
// checkFabric();
String transaction = null;
try {
// Compute individual patch results (patchId -> Result)
final Map<String, PatchResult> results = new LinkedHashMap<String, PatchResult>();
// current state of the framework
Bundle[] allBundles = bundleContext.getBundles();
// bundle -> url to update the bundle from (used for non-rollup patch)
final Map<Bundle, String> bundleUpdateLocations = new HashMap<>();
/* A "key" is name + "update'able version". Such version is current version with micro version == 0 */
// [symbolic name|updateable-version] -> newest update for the bundle out of all installed patches
final Map<String, BundleUpdate> updatesForBundleKeys = new LinkedHashMap<>();
// [feature name|updateable-version] -> newest update for the feature out of all installed patches
final Map<String, FeatureUpdate> updatesForFeatureKeys = new LinkedHashMap<>();
// symbolic name -> version -> location
final BundleVersionHistory history = createBundleVersionHistory();
// beginning installation transaction = creating of temporary branch in git
transaction = this.patchManagement.beginInstallation(kind);
// bundles from etc/startup.properties + felix.framework = all bundles not managed by features
// these bundles will be treated in special way
// symbolic name -> Bundle
final Map<String, Bundle> coreBundles = helper.getCoreBundles(allBundles);
// runtime info is prepared to apply runtime changes and static info is prepared to update KARAF_HOME files
for (Patch patch : patches) {
List<FeatureUpdate> featureUpdatesInThisPatch = null;
if (kind == PatchKind.ROLLUP) {
// list of feature updates for the current patch
featureUpdatesInThisPatch = featureUpdatesInPatch(patch, updatesForFeatureKeys, kind);
helper.sortFeatureUpdates(featureUpdatesInThisPatch);
}
// list of bundle updates for the current patch - for ROLLUP patch, we minimize the list of bundles
// to "restore" (install after clearing data/cache) by not including bundles that are
// already updated as part of fueatures update
List<BundleUpdate> bundleUpdatesInThisPatch = bundleUpdatesInPatch(patch, allBundles, bundleUpdateLocations, history, updatesForBundleKeys, kind, coreBundles, featureUpdatesInThisPatch);
// each patch may change files, we're not updating the main files yet - it'll be done when
// install transaction is committed
patchManagement.install(transaction, patch, bundleUpdatesInThisPatch);
// each patch may ship a migrator
if (!simulate) {
installMigratorBundle(patch);
}
// prepare patch result before doing runtime changes
PatchResult result = null;
if (patch.getResult() != null) {
result = patch.getResult();
if (patchManagement.isStandaloneChild()) {
// ENTESB-5120: "result" is actually a result of patch installation in root container
// we need dedicated result for admin:create based child container
PatchResult childResult = new PatchResult(patch.getPatchData(), simulate, System.currentTimeMillis(), bundleUpdatesInThisPatch, featureUpdatesInThisPatch, result);
result.addChildResult(System.getProperty("karaf.name"), childResult);
}
} else {
result = new PatchResult(patch.getPatchData(), simulate, System.currentTimeMillis(), bundleUpdatesInThisPatch, featureUpdatesInThisPatch);
}
result.getKarafBases().add(String.format("%s | %s", System.getProperty("karaf.name"), System.getProperty("karaf.base")));
results.put(patch.getPatchData().getId(), result);
}
// One special case
if (kind == PatchKind.NON_ROLLUP) {
// for rollup patch, this bundle will be installed from scratch
for (Map.Entry<Bundle, String> entry : bundleUpdateLocations.entrySet()) {
Bundle bundle = entry.getKey();
if (bundle.getSymbolicName() != null && "org.ops4j.pax.url.mvn".equals(stripSymbolicName(bundle.getSymbolicName()))) {
// handle this bundle specially - update it here
URL location = new URL(entry.getValue());
System.out.printf("Special update of bundle \"%s\" from \"%s\"%n", bundle.getSymbolicName(), location);
if (!simulate) {
BundleUtils.update(bundle, location);
bundle.start();
}
// replace location - to be stored in result
bundleUpdateLocations.put(bundle, location.toString());
}
}
}
Presentation.displayFeatureUpdates(updatesForFeatureKeys.values(), true);
// effectively, we will update all the bundles from this list - even if some bundles will be "updated"
// as part of feature installation
Presentation.displayBundleUpdates(updatesForBundleKeys.values(), true);
// then required repositories, features and bundles will be reinstalled
if (kind == PatchKind.ROLLUP) {
if (!simulate) {
if (patches.size() == 1) {
Patch patch = patches.iterator().next();
PatchResult result = results.get(patch.getPatchData().getId());
patch.setResult(result);
// single shot
if (patchManagement.isStandaloneChild()) {
backupService.backupDataFiles(result.getChildPatches().get(System.getProperty("karaf.name")), Pending.ROLLUP_INSTALLATION);
} else {
backupService.backupDataFiles(result, Pending.ROLLUP_INSTALLATION);
}
for (Bundle b : coreBundles.values()) {
if (b.getSymbolicName() != null && Utils.stripSymbolicName(b.getSymbolicName()).equals("org.apache.felix.fileinstall")) {
b.stop(Bundle.STOP_TRANSIENT);
break;
}
}
// update KARAF_HOME
patchManagement.commitInstallation(transaction);
if (patchManagement.isStandaloneChild()) {
result.getChildPatches().get(System.getProperty("karaf.name")).setPending(Pending.ROLLUP_INSTALLATION);
} else {
result.setPending(Pending.ROLLUP_INSTALLATION);
}
result.store();
// Some updates need a full JVM restart.
if (isJvmRestartNeeded(results)) {
boolean handlesFullRestart = Boolean.getBoolean("karaf.restart.jvm.supported");
if (handlesFullRestart) {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " installed. Restarting Karaf..");
System.setProperty("karaf.restart.jvm", "true");
} else {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " installed. Shutting down Karaf, please restart...");
}
} else {
// We don't need a JVM restart, so lets just do a OSGi framework restart
System.setProperty("karaf.restart", "true");
}
File karafData = new File(bundleContext.getProperty("karaf.data"));
File cleanCache = new File(karafData, "clean_cache");
cleanCache.createNewFile();
Thread.currentThread().setContextClassLoader(bundleContext.getBundle(0l).adapt(BundleWiring.class).getClassLoader());
bundleContext.getBundle(0l).stop();
}
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
patchManagement.rollbackInstallation(transaction);
}
return results;
}
// update KARAF_HOME
if (!simulate) {
patchManagement.commitInstallation(transaction);
} else {
patchManagement.rollbackInstallation(transaction);
}
if (!simulate) {
Runnable task = new Runnable() {
@Override
public void run() {
try {
// update bundles
applyChanges(bundleUpdateLocations);
// persist results of all installed patches
for (Patch patch : patches) {
PatchResult result = results.get(patch.getPatchData().getId());
patch.setResult(result);
result.store();
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
}
}
};
if (synchronous) {
task.run();
} else {
new Thread(task).start();
}
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
}
return results;
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
if (transaction != null && patchManagement != null) {
patchManagement.rollbackInstallation(transaction);
}
throw new PatchException(e.getMessage(), e);
} finally {
System.out.flush();
}
}
use of io.fabric8.agent.model.Feature in project fabric8 by jboss-fuse.
the class ExtendedJoinTest method testJoinAndAddToEnsemble.
/**
* This is a test for FABRIC-353.
*/
@Test
@Ignore
public void testJoinAndAddToEnsemble() throws Exception {
System.err.println(CommandSupport.executeCommand("fabric:create --force --clean -n --wait-for-provisioning"));
// System.out.println(executeCommand("shell:info"));
// System.out.println(executeCommand("fabric:info"));
// System.out.println(executeCommand("fabric:profile-list"));
BundleContext moduleContext = ServiceLocator.getSystemContext();
ServiceProxy<FabricService> fabricProxy = ServiceProxy.createServiceProxy(moduleContext, FabricService.class);
try {
FabricService fabricService = fabricProxy.getService();
AdminService adminService = ServiceLocator.awaitService(AdminService.class);
String version = System.getProperty("fabric.version");
System.out.println(CommandSupport.executeCommand("admin:create --featureURL mvn:io.fabric8/fabric8-karaf/" + version + "/xml/features --feature fabric-git --feature fabric-agent --feature fabric-boot-commands basic_cnt_f"));
System.out.println(CommandSupport.executeCommand("admin:create --featureURL mvn:io.fabric8/fabric8-karaf/" + version + "/xml/features --feature fabric-git --feature fabric-agent --feature fabric-boot-commands basic_cnt_g"));
try {
System.out.println(CommandSupport.executeCommand("admin:start basic_cnt_f"));
System.out.println(CommandSupport.executeCommand("admin:start basic_cnt_g"));
ProvisionSupport.instanceStarted(Arrays.asList("basic_cnt_f", "basic_cnt_g"), ProvisionSupport.PROVISION_TIMEOUT);
System.out.println(CommandSupport.executeCommand("admin:list"));
String joinCommand = "fabric:join -f --zookeeper-password " + fabricService.getZookeeperPassword() + " " + fabricService.getZookeeperUrl();
String response = "";
for (int i = 0; i < 10 && !response.contains("true"); i++) {
response = CommandSupport.executeCommand("ssh:ssh -l karaf -P karaf -p " + adminService.getInstance("basic_cnt_f").getSshPort() + " localhost " + WAIT_FOR_JOIN_SERVICE);
Thread.sleep(1000);
}
response = "";
for (int i = 0; i < 10 && !response.contains("true"); i++) {
response = CommandSupport.executeCommand("ssh:ssh -l karaf -P karaf -p " + adminService.getInstance("basic_cnt_g").getSshPort() + " localhost " + WAIT_FOR_JOIN_SERVICE);
Thread.sleep(1000);
}
System.err.println(CommandSupport.executeCommand("ssh:ssh -l karaf -P karaf -p " + adminService.getInstance("basic_cnt_f").getSshPort() + " localhost " + joinCommand));
System.err.println(CommandSupport.executeCommand("ssh:ssh -l karaf -P karaf -p " + adminService.getInstance("basic_cnt_g").getSshPort() + " localhost " + joinCommand));
ProvisionSupport.containersExist(Arrays.asList("basic_cnt_f", "basic_cnt_g"), ProvisionSupport.PROVISION_TIMEOUT);
Container cntF = fabricService.getContainer("basic_cnt_f");
Container cntG = fabricService.getContainer("basic_cnt_g");
ProvisionSupport.containerStatus(Arrays.asList(cntF, cntG), "success", ProvisionSupport.PROVISION_TIMEOUT);
EnsembleSupport.addToEnsemble(fabricService, cntF, cntG);
System.out.println(CommandSupport.executeCommand("fabric:container-list"));
EnsembleSupport.removeFromEnsemble(fabricService, cntF, cntG);
System.out.println(CommandSupport.executeCommand("fabric:container-list"));
} finally {
System.out.println(CommandSupport.executeCommand("admin:stop basic_cnt_f"));
System.out.println(CommandSupport.executeCommand("admin:stop basic_cnt_g"));
}
} finally {
fabricProxy.close();
}
}
use of io.fabric8.agent.model.Feature in project fabric8 by jboss-fuse.
the class DeploymentAgentTest method testFeatureRepoResolution.
@Test
@SuppressWarnings("unchecked")
public void testFeatureRepoResolution() throws Exception {
CommandSupport.executeCommand("fabric:create --force --clean -n --wait-for-provisioning");
// We are just want to use a feature repository that is not part of the distribution.
CommandSupport.executeCommand("fabric:profile-create --parent feature-camel test-profile");
CommandSupport.executeCommand("fabric:version-create --parent 1.0 1.1");
CommandSupport.executeCommand("fabric:profile-edit --repository mvn:io.fabric8.examples.fabric-camel-dosgi/features/" + System.getProperty("fabric.version") + "/xml/features test-profile 1.1");
CommandSupport.executeCommand("fabric:profile-edit --feature fabric-dosgi test-profile 1.1");
// We remove all repositories from agent config but the maven central to rely on the fabric-maven-proxy.
// Also remove local repository
CommandSupport.executeCommand("fabric:profile-edit --pid io.fabric8.agent/org.ops4j.pax.url.mvn.repositories=http://repo1.maven.org/maven2@id=m2central default 1.1");
CommandSupport.executeCommand("fabric:profile-edit --pid test-profile 1.1");
BundleContext moduleContext = ServiceLocator.getSystemContext();
ServiceProxy<FabricService> fabricProxy = ServiceProxy.createServiceProxy(moduleContext, FabricService.class);
try {
FabricService fabricService = fabricProxy.getService();
Set<Container> containers = ContainerBuilder.create().withName("smoke_cnt_a").withProfiles("test-profile").assertProvisioningResult().build(fabricService);
try {
// We want to remove all repositories from fabric-agent.
for (Container container : containers) {
CommandSupport.executeCommand("fabric:container-upgrade 1.1 " + container.getId());
System.out.flush();
}
ProvisionSupport.provisioningSuccess(containers, ProvisionSupport.PROVISION_TIMEOUT);
CommandSupport.executeCommand("fabric:container-list");
for (Container container : containers) {
CommandSupport.executeCommand("fabric:container-connect -u admin -p admin " + container.getId() + " osgi:list");
CommandSupport.executeCommand("fabric:container-connect -u admin -p admin " + container.getId() + " config:proplist --pid org.ops4j.pax.url.mvn");
System.out.flush();
}
} finally {
ContainerBuilder.stop(fabricService, containers);
}
} finally {
fabricProxy.close();
}
}
use of io.fabric8.agent.model.Feature in project fabric8 by jboss-fuse.
the class CreateChildContainerTest method testCreateChildContainerWithCustomZKServerPort.
@Test
public void testCreateChildContainerWithCustomZKServerPort() throws Exception {
System.err.println(CommandSupport.executeCommand("fabric:create --force --clean -n --wait-for-provisioning --zookeeper-server-port 2345"));
System.err.println(CommandSupport.executeCommand("fabric:profile-create --parent default p1"));
System.err.println(CommandSupport.executeCommand("fabric:profile-edit --feature fabric-zookeeper-commands p1"));
BundleContext moduleContext = ServiceLocator.getSystemContext();
ServiceProxy<FabricService> fabricProxy = ServiceProxy.createServiceProxy(moduleContext, FabricService.class);
try {
FabricService fabricService = fabricProxy.getService();
Set<Container> containers = ContainerBuilder.child(1).withName("smoke_child_b").withProfiles("p1").build(fabricService);
ProvisionSupport.provisioningSuccess(containers, ProvisionSupport.PROVISION_TIMEOUT);
try {
Container child = containers.iterator().next();
String ensembleUrl = CommandSupport.executeCommand("fabric:container-connect -u admin -p admin " + child.getId() + " zk:get /fabric/configs/ensemble/url");
Assert.assertTrue("Child should use custom ZK server port, but was: " + ensembleUrl, ensembleUrl.contains("${zk:root/ip}:2345"));
} finally {
ContainerBuilder.stop(fabricService, containers);
}
} finally {
fabricProxy.close();
}
}
Aggregations