use of io.fabric8.karaf.checks.Check in project fabric8 by jboss-fuse.
the class GitPatchManagementServiceImpl method trackPatch.
/**
* <p>This method turns static information about a patch into managed patch - i.e., patch added to git
* repository.</p>
*
* <p>Such patch has its own branch ready to be merged (when patch is installed). Before installation we can verify
* the patch,
* examine the content, check the differences, conflicts and perform simulation (merge to temporary branch created
* from main patch branch)</p>
*
* <p>The strategy is as follows:<ul>
* <li><em>main patch branch</em> in git repository tracks all changes (from baselines, patch-management
* system, patches and user changes)</li>
* <li>Initially there are 3 commits: baseline, patch-management bundle installation in etc/startup.properties,
* initial user changes</li>
* <li>We always <strong>tag the baseline commit</strong></li>
* <li>User changes may be applied each time Framework is restarted</li>
* <li>When we add a patch, we create <em>named branch</em> from the <strong>latest baseline</strong></li>
* <li>When we install a patch, we <strong>merge</strong> the patch branch with the <em>main patch branch</em>
* (that may contain additional user changes)</li>
* <li>When patch ZIP contains new baseline distribution, after merging patch branch, we tag the merge commit
* in <em>main patch branch</em> branch as new baseline</li>
* <li>Branches for new patches will then be created from new baseline commit</li>
* </ul></p>
* @param patchData
* @return
*/
@Override
public Patch trackPatch(PatchData patchData) throws PatchException {
try {
awaitInitialization();
} catch (InterruptedException e) {
throw new PatchException("Patch management system is not ready yet");
}
Git fork = null;
try {
Git mainRepository = gitPatchRepository.findOrCreateMainGitRepository();
// prepare single fork for all the below operations
fork = gitPatchRepository.cloneRepository(mainRepository, true);
// 1. find current baseline
RevTag latestBaseline = gitPatchRepository.findCurrentBaseline(fork);
if (latestBaseline == null) {
throw new PatchException("Can't find baseline distribution tracked in patch management. Is patch management initialized?");
}
// the commit from the patch should be available from main patch branch
RevCommit commit = new RevWalk(fork.getRepository()).parseCommit(latestBaseline.getObject());
// create dedicated branch for this patch. We'll immediately add patch content there so we can examine the
// changes from the latest baseline
gitPatchRepository.checkout(fork).setCreateBranch(true).setName("patch-" + patchData.getId()).setStartPoint(commit).call();
// copy patch resources (but not maven artifacts from system/ or repository/) to working copy
if (patchData.getPatchDirectory() != null) {
boolean removeTargetDir = patchData.isRollupPatch();
copyManagedDirectories(patchData.getPatchDirectory(), fork.getRepository().getWorkTree(), removeTargetDir, false, false);
}
// add the changes
fork.add().addFilepattern(".").call();
// remove the deletes (without touching specially-managed etc/overrides.properties)
for (String missing : fork.status().call().getMissing()) {
if (!"etc/overrides.properties".equals(missing)) {
fork.rm().addFilepattern(missing).call();
}
}
// record information about other "patches" included in added patch (e.g., Fuse patch
// may contain patches to admin:create based containers in standalone mode)
StringWriter sw = new StringWriter();
sw.append("# tags for patches included in \"").append(patchData.getId()).append("\"\n");
for (String bundle : patchData.getBundles()) {
// containers that want to patch:install patches added in root containers
if (bundle.contains("mvn:org.apache.karaf.admin/org.apache.karaf.admin.core/")) {
Artifact a = Utils.mvnurlToArtifact(bundle, true);
if (a != null) {
sw.append(String.format(EnvType.STANDALONE_CHILD.getBaselineTagFormat(), a.getVersion())).append("\n");
}
break;
}
}
FileUtils.write(new File(fork.getRepository().getWorkTree(), "patch-info.txt"), sw.toString());
fork.add().addFilepattern(".").call();
// commit the changes (patch vs. baseline) to patch branch
gitPatchRepository.prepareCommit(fork, String.format("[PATCH] Tracking patch %s", patchData.getId())).call();
// push the patch branch
gitPatchRepository.push(fork, "patch-" + patchData.getId());
// track other kinds of baselines found in the patch
if (env.isFabric()) {
trackBaselinesForRootContainer(fork);
trackBaselinesForChildContainers(fork);
trackBaselinesForSSHContainers(fork);
} else {
// for admin:create child containers
trackBaselinesForChildContainers(fork);
}
return new Patch(patchData, gitPatchRepository.getManagedPatch(patchData.getId()));
} catch (IOException | GitAPIException e) {
throw new PatchException(e.getMessage(), e);
} finally {
if (fork != null) {
gitPatchRepository.closeRepository(fork, true);
}
}
}
use of io.fabric8.karaf.checks.Check in project fabric8 by jboss-fuse.
the class GitPatchManagementServiceImpl method handleConflict.
private void handleConflict(File patchDirectory, Git fork, boolean preferNew, String cpPrefix, boolean performBackup, String choose, String backup, boolean rollback) throws GitAPIException, IOException {
Map<String, IndexDiff.StageState> conflicts = fork.status().call().getConflictingStageState();
DirCache cache = fork.getRepository().readDirCache();
// path -> [oursObjectId, baseObjectId, theirsObjectId]
Map<String, ObjectId[]> threeWayMerge = new HashMap<>();
// collect conflicts info
for (int i = 0; i < cache.getEntryCount(); i++) {
DirCacheEntry entry = cache.getEntry(i);
if (entry.getStage() == DirCacheEntry.STAGE_0) {
continue;
}
if (!threeWayMerge.containsKey(entry.getPathString())) {
threeWayMerge.put(entry.getPathString(), new ObjectId[] { null, null, null });
}
if (entry.getStage() == DirCacheEntry.STAGE_1) {
// base
threeWayMerge.get(entry.getPathString())[1] = entry.getObjectId();
}
if (entry.getStage() == DirCacheEntry.STAGE_2) {
// ours
threeWayMerge.get(entry.getPathString())[0] = entry.getObjectId();
}
if (entry.getStage() == DirCacheEntry.STAGE_3) {
// theirs
threeWayMerge.get(entry.getPathString())[2] = entry.getObjectId();
}
}
// resolve conflicts
ObjectReader objectReader = fork.getRepository().newObjectReader();
for (Map.Entry<String, ObjectId[]> entry : threeWayMerge.entrySet()) {
if (entry.getKey().equals("patch-info.txt")) {
fork.rm().addFilepattern(entry.getKey()).call();
continue;
}
Resolver resolver = conflictResolver.getResolver(entry.getKey());
// resolved version - either by custom resolved or using automatic algorithm
String resolved = null;
if (resolver != null && entry.getValue()[0] != null && entry.getValue()[2] != null) {
// custom conflict resolution (don't expect DELETED_BY_X kind of conflict, only BOTH_MODIFIED)
String message = String.format(" - %s (%s): %s", entry.getKey(), conflicts.get(entry.getKey()), "Using " + resolver.getClass().getName() + " to resolve the conflict");
Activator.log2(LogService.LOG_INFO, message);
// when doing custom resolution of conflict, we know that both user and patch has changed the file
// in non-mergeable way.
// If there was no resolver, we simply check what to choose by "preferNew" flag
// But because we have custom resolver, we use "preferNew" flag to check which STAGE points to patch'
// version and we select this patch' version of conflicting file as less important file inside
// custom resolver
File base = null, first = null, second = null;
try {
ObjectLoader loader = null;
if (entry.getValue()[1] != null) {
base = new File(fork.getRepository().getWorkTree(), entry.getKey() + ".1");
loader = objectReader.open(entry.getValue()[1]);
try (FileOutputStream fos = new FileOutputStream(base)) {
loader.copyTo(fos);
}
}
// if preferNew == true (P patch) then "first" file (less important) will be file
// provided by patch ("theirs", STAGE_3)
first = new File(fork.getRepository().getWorkTree(), entry.getKey() + ".2");
loader = objectReader.open(entry.getValue()[preferNew ? 2 : 0]);
try (FileOutputStream fos = new FileOutputStream(first)) {
loader.copyTo(fos);
}
// "second", more important file will be user change
second = new File(fork.getRepository().getWorkTree(), entry.getKey() + ".3");
loader = objectReader.open(entry.getValue()[preferNew ? 0 : 2]);
try (FileOutputStream fos = new FileOutputStream(second)) {
loader.copyTo(fos);
}
// resolvers treat patch change as less important - user lines overwrite patch lines
if (resolver instanceof PropertiesFileResolver) {
// TODO: use options from patch:install / patch:fabric-install command
// by default we use a file that comes from patch and we may add property changes
// from user
// in R patch, preferNew == false, because patch comes first
// in P patch, preferNew == true, because patch comes last
// in R patch + fabric mode, preferNew == true, because we *merge* patch branch into version
// branch
boolean useFirstChangeAsBase = true;
if (entry.getKey().startsWith("etc/")) {
// as base
if (rollback) {
useFirstChangeAsBase = true;
} else {
useFirstChangeAsBase = false;
}
}
resolved = ((ResolverEx) resolver).resolve(first, base, second, useFirstChangeAsBase, rollback);
} else {
resolved = resolver.resolve(first, base, second);
}
if (resolved != null) {
FileUtils.write(new File(fork.getRepository().getWorkTree(), entry.getKey()), resolved);
fork.add().addFilepattern(entry.getKey()).call();
}
} finally {
if (base != null) {
base.delete();
}
if (first != null) {
first.delete();
}
if (second != null) {
second.delete();
}
}
}
if (resolved == null) {
// automatic conflict resolution
String message = String.format(" - %s (%s): Choosing %s", entry.getKey(), conflicts.get(entry.getKey()), choose);
ObjectLoader loader = null;
ObjectLoader loaderForBackup = null;
// longer code, but more readable then series of elvis operators (?:)
if (preferNew) {
switch(conflicts.get(entry.getKey())) {
case BOTH_ADDED:
case BOTH_MODIFIED:
loader = objectReader.open(entry.getValue()[2]);
loaderForBackup = objectReader.open(entry.getValue()[0]);
break;
case BOTH_DELETED:
break;
case DELETED_BY_THEM:
// ENTESB-6003: special case: when R patch removes something and we've modified it
// let's preserve our version
message = String.format(" - %s (%s): Keeping custom change", entry.getKey(), conflicts.get(entry.getKey()));
loader = objectReader.open(entry.getValue()[0]);
break;
case DELETED_BY_US:
loader = objectReader.open(entry.getValue()[2]);
break;
}
} else {
switch(conflicts.get(entry.getKey())) {
case BOTH_ADDED:
case BOTH_MODIFIED:
loader = objectReader.open(entry.getValue()[0]);
loaderForBackup = objectReader.open(entry.getValue()[2]);
break;
case DELETED_BY_THEM:
loader = objectReader.open(entry.getValue()[0]);
break;
case BOTH_DELETED:
case DELETED_BY_US:
break;
}
}
Activator.log2(LogService.LOG_WARNING, message);
if (loader != null) {
try (FileOutputStream fos = new FileOutputStream(new File(fork.getRepository().getWorkTree(), entry.getKey()))) {
loader.copyTo(fos);
}
fork.add().addFilepattern(entry.getKey()).call();
} else {
fork.rm().addFilepattern(entry.getKey()).call();
}
if (performBackup) {
// the other entry should be backed up
if (loaderForBackup != null) {
File target = new File(patchDirectory.getParent(), patchDirectory.getName() + ".backup");
if (isStandaloneChild()) {
target = new File(patchDirectory.getParent(), patchDirectory.getName() + "." + System.getProperty("karaf.name") + ".backup");
}
if (cpPrefix != null) {
target = new File(target, cpPrefix);
}
File file = new File(target, entry.getKey());
message = String.format("Backing up %s to \"%s\"", backup, file.getCanonicalPath());
Activator.log2(LogService.LOG_DEBUG, message);
file.getParentFile().mkdirs();
try (FileOutputStream fos = new FileOutputStream(file)) {
loaderForBackup.copyTo(fos);
}
}
}
}
}
}
use of io.fabric8.karaf.checks.Check in project fabric8 by jboss-fuse.
the class GitPatchManagementServiceIT method rollbackInstalledRollupPatch.
@Test
public void rollbackInstalledRollupPatch() throws IOException, GitAPIException {
freshKarafStandaloneDistro();
GitPatchRepository repository = patchManagement();
PatchManagement management = (PatchManagement) pm;
preparePatchZip("src/test/resources/content/patch1", "target/karaf/patches/source/patch-1.zip", false);
preparePatchZip("src/test/resources/content/patch4", "target/karaf/patches/source/patch-4.zip", false);
List<PatchData> patches = management.fetchPatches(new File("target/karaf/patches/source/patch-1.zip").toURI().toURL());
Patch patch1 = management.trackPatch(patches.get(0));
patches = management.fetchPatches(new File("target/karaf/patches/source/patch-4.zip").toURI().toURL());
Patch patch4 = management.trackPatch(patches.get(0));
Git fork = repository.cloneRepository(repository.findOrCreateMainGitRepository(), true);
ObjectId master1 = fork.getRepository().resolve(GitPatchRepository.HISTORY_BRANCH);
String tx = management.beginInstallation(PatchKind.ROLLUP);
management.install(tx, patch4, null);
management.commitInstallation(tx);
// install P patch to check if rolling back rollup patch will remove P patch's tag
tx = management.beginInstallation(PatchKind.NON_ROLLUP);
management.install(tx, patch1, null);
management.commitInstallation(tx);
fork = repository.cloneRepository(repository.findOrCreateMainGitRepository(), true);
assertTrue(repository.containsTag(fork, "patch-my-patch-1"));
management.rollback(patch4.getPatchData());
repository.closeRepository(fork, true);
fork = repository.cloneRepository(repository.findOrCreateMainGitRepository(), true);
ObjectId master2 = fork.getRepository().resolve(GitPatchRepository.HISTORY_BRANCH);
assertThat(master1, not(equalTo(master2)));
assertThat(fork.tagList().call().size(), equalTo(2));
assertTrue(repository.containsTag(fork, "patch-management"));
assertTrue(repository.containsTag(fork, "baseline-6.2.0"));
assertFalse("When rolling back rollup patch, newer P patches' tags should be removed", repository.containsTag(fork, "patch-my-patch-1"));
assertThat(repository.findCurrentBaseline(fork).getTagName(), equalTo("baseline-6.2.0"));
// TODO: There should be version restored from backed up conflict
// but we've changed the way rolledback R patch handled - we copy entire WC after rollback
// String binStart = FileUtils.readFileToString(new File(karafHome, "bin/start"));
// assertTrue("bin/start should be at previous version",
// binStart.contains("echo \"This is user's change\""));
}
use of io.fabric8.karaf.checks.Check in project fabric8 by jboss-fuse.
the class ServiceImpl method rollback.
@Override
public void rollback(final Patch patch, boolean simulate, boolean force) throws PatchException {
final PatchResult result = !patchManagement.isStandaloneChild() ? patch.getResult() : patch.getResult().getChildPatches().get(System.getProperty("karaf.name"));
if (result == null) {
throw new PatchException("Patch " + patch.getPatchData().getId() + " is not installed");
}
if (patch.getPatchData().isRollupPatch()) {
// we already have the "state" (feature repositories, features, bundles and their states, datafiles
// and start-level info) stored in *.result file
Presentation.displayFeatureUpdates(result.getFeatureUpdates(), false);
Presentation.displayBundleUpdates(result.getBundleUpdates(), false);
try {
if (!simulate) {
// let's backup data files before configadmin detects changes to etc/* files.
backupService.backupDataFiles(result, Pending.ROLLUP_ROLLBACK);
for (Bundle b : this.bundleContext.getBundles()) {
if (b.getSymbolicName() != null && Utils.stripSymbolicName(b.getSymbolicName()).equals("org.apache.felix.fileinstall")) {
b.stop(Bundle.STOP_TRANSIENT);
break;
}
}
patchManagement.rollback(patch.getPatchData());
result.setPending(Pending.ROLLUP_ROLLBACK);
if (patchManagement.isStandaloneChild()) {
result.getParent().store();
} else {
result.store();
}
if (isJvmRestartNeeded(result)) {
boolean handlesFullRestart = Boolean.getBoolean("karaf.restart.jvm.supported");
if (handlesFullRestart) {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " rolled back. Restarting Karaf..");
System.setProperty("karaf.restart.jvm", "true");
} else {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " rolled back. Shutting down Karaf, please restart...");
}
} else {
// We don't need a JVM restart, so lets just do a OSGi framework restart
System.setProperty("karaf.restart", "true");
}
File karafData = new File(bundleContext.getProperty("karaf.data"));
File cleanCache = new File(karafData, "clean_cache");
cleanCache.createNewFile();
bundleContext.getBundle(0l).stop();
// stop/shutdown occurs on another thread
return;
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
return;
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
throw new PatchException(e.getMessage(), e);
}
}
// continue with NON_ROLLUP patch
// current state of the framework
Bundle[] allBundles = bundleContext.getBundles();
// check if all the bundles that were updated in patch are available (installed)
List<BundleUpdate> badUpdates = new ArrayList<BundleUpdate>();
for (BundleUpdate update : result.getBundleUpdates()) {
boolean found = false;
Version v = Version.parseVersion(update.getNewVersion() == null ? update.getPreviousVersion() : update.getNewVersion());
for (Bundle bundle : allBundles) {
if (bundle.getSymbolicName() == null || update.getSymbolicName() == null) {
continue;
}
if (stripSymbolicName(bundle.getSymbolicName()).equals(stripSymbolicName(update.getSymbolicName())) && bundle.getVersion().equals(v)) {
found = true;
break;
}
}
if (!found) {
badUpdates.add(update);
}
}
if (!badUpdates.isEmpty() && !force) {
StringBuilder sb = new StringBuilder();
sb.append("Unable to rollback patch ").append(patch.getPatchData().getId()).append(" because of the following missing bundles:\n");
for (BundleUpdate up : badUpdates) {
String version = up.getNewVersion() == null ? up.getPreviousVersion() : up.getNewVersion();
sb.append(" - ").append(up.getSymbolicName()).append("/").append(version).append("\n");
}
throw new PatchException(sb.toString());
}
if (!simulate) {
// bundle -> old location of the bundle to downgrade from
final Map<Bundle, String> toUpdate = new HashMap<Bundle, String>();
for (BundleUpdate update : result.getBundleUpdates()) {
Version v = Version.parseVersion(update.getNewVersion() == null ? update.getPreviousVersion() : update.getNewVersion());
for (Bundle bundle : allBundles) {
if (bundle.getSymbolicName() == null || update.getSymbolicName() == null) {
continue;
}
if (stripSymbolicName(bundle.getSymbolicName()).equals(stripSymbolicName(update.getSymbolicName())) && bundle.getVersion().equals(v)) {
toUpdate.put(bundle, update.getPreviousLocation());
}
}
}
final boolean isStandaloneChild = patchManagement.isStandaloneChild();
patchManagement.rollback(patch.getPatchData());
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
public void run() {
try {
applyChanges(toUpdate);
} catch (Exception e) {
throw new PatchException("Unable to rollback patch " + patch.getPatchData().getId() + ": " + e.getMessage(), e);
}
patch.setResult(null);
File file = new File(patchDir, result.getPatchData().getId() + ".patch.result");
if (isStandaloneChild) {
file = new File(patchDir, result.getPatchData().getId() + "." + System.getProperty("karaf.name") + ".patch.result");
}
file.delete();
}
});
}
}
use of io.fabric8.karaf.checks.Check in project fabric8 by jboss-fuse.
the class ServiceImpl method bundleUpdatesInPatch.
/**
* Returns a list of {@link BundleUpdate} for single patch, taking into account already discovered updates
* @param patch
* @param allBundles
* @param bundleUpdateLocations out parameter that gathers update locations for bundles across patches
* @param history
* @param updatesForBundleKeys
* @param kind
* @param coreBundles
* @param featureUpdatesInThisPatch
* @return
* @throws IOException
*/
private List<BundleUpdate> bundleUpdatesInPatch(Patch patch, Bundle[] allBundles, Map<Bundle, String> bundleUpdateLocations, BundleVersionHistory history, Map<String, BundleUpdate> updatesForBundleKeys, PatchKind kind, Map<String, Bundle> coreBundles, List<FeatureUpdate> featureUpdatesInThisPatch) throws Exception {
List<BundleUpdate> updatesInThisPatch = new LinkedList<>();
// for ROLLUP patch we can check which bundles AREN'T updated by this patch - we have to reinstall them
// at the same version as existing one. "no update" means "require install after clearing cache"
// Initially all bundles need update. If we find an update in patch, we remove a key from this map
Map<String, Bundle> updateNotRequired = new LinkedHashMap<>();
// // let's keep {symbolic name -> list of versions} mapping
// MultiMap<String, Version> allBundleVersions = new MultiMap<>();
// bundle location -> bundle key (symbolic name|updateable version)
Map<String, String> locationsOfBundleKeys = new HashMap<>();
for (Bundle b : allBundles) {
if (b.getSymbolicName() == null) {
continue;
}
Version v = b.getVersion();
Version updateableVersion = new Version(v.getMajor(), v.getMinor(), 0);
String key = String.format("%s|%s", stripSymbolicName(b.getSymbolicName()), updateableVersion.toString());
// symbolic name, differing at micro version only
if (!coreBundles.containsKey(stripSymbolicName(b.getSymbolicName()))) {
updateNotRequired.put(key, b);
} else {
// let's key core (etc/startup.properties) bundles by symbolic name only - there should be only
// one version of symbolic name
updateNotRequired.put(stripSymbolicName(b.getSymbolicName()), b);
}
// allBundleVersions.put(stripSymbolicName(b.getSymbolicName()), b.getVersion());
String location = b.getLocation();
if (location != null && location.startsWith("mvn:") && location.contains("//")) {
// special case for mvn:org.ops4j.pax.url/pax-url-wrap/2.4.7//uber
location = location.replace("//", "/jar/");
}
locationsOfBundleKeys.put(location, key);
}
// let's prepare a set of bundle keys that are part of features that will be updated/reinstalled - those
// bundle keys don't have to be reinstalled separately
Set<String> bundleKeysFromFeatures = new HashSet<>();
if (featureUpdatesInThisPatch != null) {
for (FeatureUpdate featureUpdate : featureUpdatesInThisPatch) {
if (featureUpdate.getName() != null) {
// this is either installation or update of single feature
String fName = featureUpdate.getName();
String fVersion = featureUpdate.getPreviousVersion();
Feature f = featuresService.getFeature(fName, fVersion);
for (BundleInfo bundleInfo : f.getBundles()) {
if (/*!bundleInfo.isDependency() && */
locationsOfBundleKeys.containsKey(bundleInfo.getLocation())) {
bundleKeysFromFeatures.add(locationsOfBundleKeys.get(bundleInfo.getLocation()));
}
}
for (Conditional cond : f.getConditional()) {
for (BundleInfo bundleInfo : cond.getBundles()) {
if (/*!bundleInfo.isDependency() && */
locationsOfBundleKeys.containsKey(bundleInfo.getLocation())) {
bundleKeysFromFeatures.add(locationsOfBundleKeys.get(bundleInfo.getLocation()));
}
}
}
}
}
}
for (String newLocation : patch.getPatchData().getBundles()) {
// [symbolicName, version] of the new bundle
String[] symbolicNameVersion = helper.getBundleIdentity(newLocation);
if (symbolicNameVersion == null || symbolicNameVersion[0] == null) {
continue;
}
String sn = stripSymbolicName(symbolicNameVersion[0]);
String vr = symbolicNameVersion[1];
Version newVersion = VersionTable.getVersion(vr);
Version updateableVersion = new Version(newVersion.getMajor(), newVersion.getMinor(), 0);
// this bundle update from a patch may be applied only to relevant bundle|updateable-version, not to
// *every* bundle with exact symbolic name
String key = null;
if (!coreBundles.containsKey(sn)) {
key = String.format("%s|%s", sn, updateableVersion.toString());
} else {
key = sn;
}
// if existing bundle is within this range, update is possible
VersionRange range = getUpdateableRange(patch, newLocation, newVersion);
if (coreBundles.containsKey(sn)) {
// so we lower down the lowest possible version of core bundle that we can update
if (range == null) {
range = new VersionRange(false, Version.emptyVersion, newVersion, true);
} else {
range = new VersionRange(false, Version.emptyVersion, range.getCeiling(), true);
}
} else if (range != null) {
// if range is specified on non core bundle, the key should be different - updateable
// version should be taken from range
key = String.format("%s|%s", sn, range.getFloor().toString());
}
Bundle bundle = updateNotRequired.get(key);
if (bundle == null && coreBundles.containsKey(sn)) {
bundle = updateNotRequired.get(sn);
}
if (bundle == null || range == null) {
// this patch ships a bundle that can't be used as an update for ANY currently installed bundle
if (kind == PatchKind.NON_ROLLUP) {
// which is strange, because non rollup patches should update existing bundles...
if (range == null) {
System.err.printf("Skipping bundle %s - unable to process bundle without a version range configuration%n", newLocation);
} else {
// range is fine, we simply didn't find installed bundle at all - bundle from patch
// will be stored in ${karaf.default.repository}, but not used as an update
}
}
continue;
}
Version oldVersion = bundle.getVersion();
if (range.contains(oldVersion)) {
String oldLocation = history.getLocation(bundle);
if ("org.ops4j.pax.url.mvn".equals(sn)) {
Artifact artifact = Utils.mvnurlToArtifact(newLocation, true);
if (artifact != null) {
URL location = new File(repository, String.format("org/ops4j/pax/url/pax-url-aether/%1$s/pax-url-aether-%1$s.jar", artifact.getVersion())).toURI().toURL();
newLocation = location.toString();
}
}
int startLevel = bundle.adapt(BundleStartLevel.class).getStartLevel();
int state = bundle.getState();
BundleUpdate update = new BundleUpdate(sn, newVersion.toString(), newLocation, oldVersion.toString(), oldLocation, startLevel, state);
if (bundleKeysFromFeatures.contains(key) || coreBundles.containsKey(sn)) {
update.setIndependent(false);
}
updatesInThisPatch.add(update);
updateNotRequired.remove(key);
if (coreBundles.containsKey(sn)) {
updateNotRequired.remove(sn);
}
// Merge result
BundleUpdate oldUpdate = updatesForBundleKeys.get(key);
if (oldUpdate != null) {
Version upv = null;
if (oldUpdate.getNewVersion() != null) {
upv = VersionTable.getVersion(oldUpdate.getNewVersion());
}
if (upv == null || upv.compareTo(newVersion) < 0) {
// other patch contains newer update for a bundle
updatesForBundleKeys.put(key, update);
bundleUpdateLocations.put(bundle, newLocation);
}
} else {
// this is the first update of the bundle
updatesForBundleKeys.put(key, update);
bundleUpdateLocations.put(bundle, newLocation);
}
}
}
if (kind == PatchKind.ROLLUP) {
// user features) and we have (at least try) to install them after restart.
for (Bundle b : updateNotRequired.values()) {
if (b.getSymbolicName() == null) {
continue;
}
String symbolicName = stripSymbolicName(b.getSymbolicName());
Version v = b.getVersion();
Version updateableVersion = new Version(v.getMajor(), v.getMinor(), 0);
String key = String.format("%s|%s", symbolicName, updateableVersion.toString());
int startLevel = b.adapt(BundleStartLevel.class).getStartLevel();
int state = b.getState();
BundleUpdate update = new BundleUpdate(symbolicName, null, null, v.toString(), history.getLocation(b), startLevel, state);
if (bundleKeysFromFeatures.contains(key) || coreBundles.containsKey(symbolicName)) {
// we don't have to install it separately
update.setIndependent(false);
}
updatesInThisPatch.add(update);
updatesForBundleKeys.put(key, update);
}
}
return updatesInThisPatch;
}
Aggregations