use of org.jboss.fuse.patch.management.BundleUpdate in project fuse-karaf by jboss-fuse.
the class FileBackupService method backupDataFiles.
/**
* Invoked just before Framework is restarted and data/cache directory is removed. We copy existing data
* directories for current bundles and record for which bundle$$version it is used.
* @param result used to create backup directories.
* @param pending
* @throws IOException
*/
@Override
public void backupDataFiles(PatchResult result, Pending pending) throws IOException {
Map<String, Bundle> bundlesWithData = new HashMap<>();
// bundle.getDataFile("xxx") creates data dir if it didn't exist - it's not what we want
String storageLocation = systemContext.getProperty("org.osgi.framework.storage");
if (storageLocation == null) {
Activator.log(LogService.LOG_INFO, "Can't determine \"org.osgi.framework.storage\" property value");
return;
}
File cacheDir = new File(storageLocation);
if (!cacheDir.isDirectory()) {
return;
}
for (Bundle b : systemContext.getBundles()) {
if (b.getSymbolicName() != null) {
String sn = Utils.stripSymbolicName(b.getSymbolicName());
if ("org.apache.karaf.features.core".equals(sn)) {
// we start with fresh features service state
continue;
}
// a bit of knowledge of how Felix works below...
File dataDir = new File(cacheDir, "bundle" + b.getBundleId() + "/data");
if (dataDir.isDirectory()) {
String key = String.format("%s$$%s", sn, b.getVersion().toString());
bundlesWithData.put(key, b);
}
}
}
// this property file will be used to map full symbolicName$$version to a location where bundle data
// is stored - the data must be restored both during R patch installation and rollback
Properties properties = new Properties();
String dirName = result.getPatchData().getId() + ".datafiles";
if (result.getParent() != null) {
dirName = result.getPatchData().getId() + "." + System.getProperty("karaf.name") + ".datafiles";
}
File dataBackupDir = new File(result.getPatchData().getPatchLocation(), dirName);
String prefix = pending == Pending.ROLLUP_INSTALLATION ? "install" : "rollback";
for (BundleUpdate update : result.getBundleUpdates()) {
// same update for both updated and reinstalled bundle
String key = String.format("%s$$%s", update.getSymbolicName(), pending == Pending.ROLLUP_INSTALLATION ? update.getPreviousVersion() : (update.getNewVersion() == null ? update.getPreviousVersion() : update.getNewVersion()));
if (bundlesWithData.containsKey(key)) {
File dataFileBackupDir = new File(dataBackupDir, prefix + "/" + key + "/data");
dataFileBackupDir.mkdirs();
final Bundle b = bundlesWithData.get(key);
FileUtils.copyDirectory(b.getDataFile(""), dataFileBackupDir, new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isDirectory() || !b.getSymbolicName().equals("org.apache.felix.configadmin") || pathname.getName().endsWith(".config");
}
});
properties.setProperty(key, key);
properties.setProperty(String.format("%s$$%s", update.getSymbolicName(), update.getPreviousVersion()), key);
if (update.getNewVersion() != null) {
properties.setProperty(String.format("%s$$%s", update.getSymbolicName(), update.getNewVersion()), key);
}
}
}
FileOutputStream propsFile = new FileOutputStream(new File(dataBackupDir, "backup-" + prefix + ".properties"));
properties.store(propsFile, "Data files to restore after \"" + result.getPatchData().getId() + "\" " + (pending == Pending.ROLLUP_INSTALLATION ? "installation" : "rollback"));
propsFile.close();
}
use of org.jboss.fuse.patch.management.BundleUpdate in project fuse-karaf by jboss-fuse.
the class GitPatchManagementServiceImpl method rollback.
@Override
public void rollback(PatchData patchData) {
Git fork = null;
try {
fork = gitPatchRepository.cloneRepository(gitPatchRepository.findOrCreateMainGitRepository(), true);
Ref installationBranch = null;
PatchKind kind = patchData.isRollupPatch() ? PatchKind.ROLLUP : PatchKind.NON_ROLLUP;
switch(kind) {
case ROLLUP:
{
Activator.log2(LogService.LOG_INFO, String.format("Rolling back rollup patch \"%s\"", patchData.getId()));
// rolling back a rollup patch should rebase (cherry-pick) all user commits done after current baseline
// to previous baseline
RevTag currentBaseline = gitPatchRepository.findCurrentBaseline(fork);
RevCommit c1 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve(currentBaseline.getTagName() + "^{commit}"));
// remember the commit to discover P patch tags installed on top of rolledback baseline
RevCommit since = c1;
RevCommit c2 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve("HEAD"));
RevCommit to = c2;
Iterable<RevCommit> mainChangesSinceRollupPatch = fork.log().addRange(c1, c2).call();
List<RevCommit> userChanges = new LinkedList<>();
for (RevCommit rc : mainChangesSinceRollupPatch) {
if (isUserChangeCommit(rc)) {
userChanges.add(rc);
}
}
if (env == EnvType.STANDALONE) {
// remove the tag
fork.tagDelete().setTags(currentBaseline.getTagName()).call();
}
// baselines are stacked on each other
RevTag previousBaseline = gitPatchRepository.findNthPreviousBaseline(fork, env == EnvType.STANDALONE ? 0 : 1);
c1 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve(previousBaseline.getTagName() + "^{commit}"));
// hard reset of main patch branch - to point to other branch, originating from previous baseline
fork.reset().setMode(ResetCommand.ResetType.HARD).setRef(previousBaseline.getTagName() + "^{commit}").call();
// reapply those user changes that are not conflicting
ListIterator<RevCommit> it = userChanges.listIterator(userChanges.size());
Status status = fork.status().call();
if (!status.isClean()) {
// unstage any garbage
fork.reset().setMode(ResetCommand.ResetType.MIXED).call();
for (String p : status.getModified()) {
gitPatchRepository.checkout(fork).addPath(p).call();
}
}
while (it.hasPrevious()) {
RevCommit userChange = it.previous();
CherryPickResult cpr = fork.cherryPick().include(userChange.getId()).setNoCommit(true).call();
// this time prefer user change on top of previous baseline - this change shouldn't be
// conflicting, because when rolling back, patch change was preferred over user change
handleCherryPickConflict(patchData.getPatchDirectory(), fork, cpr, userChange, true, PatchKind.ROLLUP, null, false, true);
// restore backed up content from the reapplied user change
String[] commitMessage = userChange.getFullMessage().split("\n\n");
if (commitMessage.length > 1) {
// we have original commit (that had conflicts) stored in this commit's full message
String ref = commitMessage[commitMessage.length - 1];
File backupDir = new File(patchesDir, patchData.getId() + ".backup");
if (isStandaloneChild()) {
backupDir = new File(patchesDir, patchData.getId() + "." + System.getProperty("karaf.name") + ".backup");
}
backupDir = new File(backupDir, ref);
if (backupDir.exists() && backupDir.isDirectory()) {
Activator.log2(LogService.LOG_DEBUG, String.format("Restoring content of %s", backupDir.getCanonicalPath()));
copyManagedDirectories(backupDir, karafBase, false, false, false);
}
}
gitPatchRepository.prepareCommit(fork, userChange.getFullMessage()).call();
}
// rollback should not lead to restoration of old patch management features
String productVersion = determineVersion(fork.getRepository().getWorkTree());
File featuresCfg = new File(fork.getRepository().getWorkTree(), "etc/org.apache.karaf.features.cfg");
if (featuresCfg.isFile()) {
if (setCurrentPatchManagementVersion(featuresCfg, productVersion)) {
// artificial updates to etc/startup.properties
String pmNew = String.format("mvn:org.jboss.fuse.modules.patch/patch-management/%s", bundleContext.getBundle().getVersion().toString());
String pmOld = String.format("mvn:org.jboss.fuse.modules.patch/patch-management/%s", productVersion);
BundleUpdate update = new BundleUpdate(null, null, pmNew, null, pmOld);
List<BundleUpdate> patchManagementUpdates = Collections.singletonList(update);
updateReferences(fork, "etc/startup.properties", "", Utils.collectLocationUpdates(patchManagementUpdates));
fork.add().addFilepattern("etc/org.apache.karaf.features.cfg").addFilepattern("etc/startup.properties").call();
gitPatchRepository.prepareCommit(fork, String.format(MARKER_BASELINE_REPLACE_PATCH_FEATURE_PATTERN, productVersion, bundleContext.getBundle().getVersion().toString())).call();
}
}
gitPatchRepository.push(fork);
if (env == EnvType.STANDALONE) {
// remove remote tag
fork.push().setRefSpecs(new RefSpec().setSource(null).setDestination("refs/tags/" + currentBaseline.getTagName())).call();
}
// remove tags related to non-rollup patches installed between
// rolled back baseline and previous HEAD, because rolling back to previous rollup patch
// (previous baseline) equal effectively to starting from fresh baseline
RevWalk walk = new RevWalk(fork.getRepository());
Map<String, RevTag> tags = gitPatchRepository.findTagsBetween(fork, since, to);
for (Map.Entry<String, RevTag> entry : tags.entrySet()) {
if (entry.getKey().startsWith("patch-")) {
fork.tagDelete().setTags(entry.getKey()).call();
fork.push().setRefSpecs(new RefSpec().setSource(null).setDestination("refs/tags/" + entry.getKey())).call();
// and remove karaf base from tracked patch result, or even remove the result itself
String patchId = entry.getKey().substring("patch-".length());
Patch patch = loadPatch(new PatchDetailsRequest(patchId));
if (patch != null && patch.getResult() != null) {
boolean removed = false;
for (Iterator<String> iterator = patch.getResult().getKarafBases().iterator(); iterator.hasNext(); ) {
String base = iterator.next();
if (base.contains("|")) {
String[] kb = base.split("\\s*\\|\\s*");
String containerId = kb[0];
if (System.getProperty("karaf.name", "").equals(containerId)) {
iterator.remove();
removed = true;
break;
}
}
}
if (removed) {
if (patch.getResult().getKarafBases().size() == 0) {
// just remove the result entirely
new File(patch.getPatchData().getPatchLocation(), patchId + ".patch.result").delete();
} else {
patch.getResult().store();
}
if (isStandaloneChild()) {
File file = new File(patch.getPatchData().getPatchLocation(), patchId + "." + System.getProperty("karaf.name") + ".patch.result");
if (file.isFile()) {
file.delete();
}
}
}
}
}
}
// HEAD of main patch branch after reset and cherry-picks
c2 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve("HEAD"));
// applyChanges(fork, c1, c2);
applyChanges(fork, false);
break;
}
case NON_ROLLUP:
{
Activator.log2(LogService.LOG_INFO, String.format("Rolling back non-rollup patch \"%s\"", patchData.getId()));
// rolling back a non-rollup patch is a revert of the patch commit and removal of patch tag
String patchTagName = String.format("patch-%s", env == EnvType.STANDALONE ? patchData.getId() : patchData.getId() + "-" + gitPatchRepository.getStandaloneChildkarafName());
ObjectId oid = fork.getRepository().resolve(patchTagName);
if (oid == null) {
throw new PatchException(String.format("Can't find installed patch (tag %s is missing)", patchTagName));
}
RevCommit commit = new RevWalk(fork.getRepository()).parseCommit(oid);
RevertCommand revertCommand = fork.revert().include(commit);
RevCommit reverted = revertCommand.call();
if (reverted == null) {
List<String> unmerged = revertCommand.getUnmergedPaths();
Activator.log2(LogService.LOG_WARNING, "Problem rolling back patch \"" + patchData.getId() + "\". The following files where updated later:");
for (String path : unmerged) {
Activator.log2(LogService.LOG_WARNING, " - " + path);
}
RevWalk walk = new RevWalk(fork.getRepository());
RevCommit head = walk.parseCommit(fork.getRepository().resolve("HEAD"));
Map<String, RevTag> tags = gitPatchRepository.findTagsBetween(fork, commit, head);
List<RevTag> laterPatches = new LinkedList<>();
if (tags.size() > 0) {
for (Map.Entry<String, RevTag> tag : tags.entrySet()) {
if (tag.getKey().startsWith("patch-")) {
laterPatches.add(tag.getValue());
}
}
Activator.log2(LogService.LOG_INFO, "The following patches were installed after \"" + patchData.getId() + "\":");
for (RevTag t : laterPatches) {
String message = " - " + t.getTagName().substring("patch-".length());
RevObject object = walk.peel(t);
if (object != null) {
RevCommit c = walk.parseCommit(object.getId());
String date = GitPatchRepository.FULL_DATE.format(new Date(c.getCommitTime() * 1000L));
message += " (" + date + ")";
}
Activator.log2(LogService.LOG_INFO, message);
}
}
return;
}
// TODO: should we restore the backup possibly created when instalilng P patch?
// remove the tag
fork.tagDelete().setTags(patchTagName).call();
gitPatchRepository.push(fork);
// remove remote tag
fork.push().setRefSpecs(new RefSpec().setSource(null).setDestination(String.format("refs/tags/%s", patchTagName))).call();
// HEAD of main patch branch after reset and cherry-picks
RevCommit c = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve("HEAD"));
applyChanges(fork, c.getParent(0), c);
break;
}
}
} catch (IOException | GitAPIException e) {
throw new PatchException(e.getMessage(), e);
} finally {
if (fork != null) {
gitPatchRepository.closeRepository(fork, true);
}
}
}
use of org.jboss.fuse.patch.management.BundleUpdate in project fuse-karaf by jboss-fuse.
the class PatchServiceImpl method install.
/**
* <p>Main installation method. Installing a patch in standalone mode is a matter of correct merge (cherry-pick, merge,
* rebase) of patch branch into <code>master</code> branch.</p>
* <p>Static changes are handled by git, runtime changes (bundles, features) are handled depending on patch type:<ul>
* <li>Rollup: clear OSGi bundle cache, reinstall features that were installed after restart</li>
* <li>Non-Rollup: update bundles, generate overrides.properties and update scripts to reference new versions</li>
* </ul></p>
* <p>For Rollup patches we don't update bundles - we clear the bundle cache instead.</p>
* @param patches
* @param simulate
* @param synchronous
* @return
*/
private Map<String, PatchResult> install(final Collection<Patch> patches, final boolean simulate, boolean synchronous) {
PatchKind kind = checkConsistency(patches);
checkPrerequisites(patches);
checkStandaloneChild(patches);
String transaction = null;
try {
// Compute individual patch results (patchId -> Result)
final Map<String, PatchResult> results = new LinkedHashMap<String, PatchResult>();
// current state of the framework
Bundle[] allBundles = bundleContext.getBundles();
// bundle -> url to update the bundle from (used for non-rollup patch)
final Map<Bundle, String> bundleUpdateLocations = new HashMap<>();
/* A "key" is name + "update'able version". Such version is current version with micro version == 0 */
// [symbolic name|updateable-version] -> newest update for the bundle out of all installed patches
final Map<String, BundleUpdate> updatesForBundleKeys = new LinkedHashMap<>();
// [feature name|updateable-version] -> newest update for the feature out of all installed patches
final Map<String, FeatureUpdate> updatesForFeatureKeys = new LinkedHashMap<>();
final List<String> overridesForFeatureKeys = new LinkedList<>();
// symbolic name -> version -> location
final BundleVersionHistory history = createBundleVersionHistory();
// beginning installation transaction = creating of temporary branch in git
transaction = this.patchManagement.beginInstallation(kind);
// bundles from etc/startup.properties + felix.framework = all bundles not managed by features
// these bundles will be treated in special way
// symbolic name -> Bundle
final Map<String, Bundle> coreBundles = helper.getCoreBundles(allBundles);
// runtime info is prepared to apply runtime changes and static info is prepared to update KARAF_HOME files
for (Patch patch : patches) {
List<FeatureUpdate> featureUpdatesInThisPatch = null;
List<String> featureOverridesInThisPatch = null;
if (kind == PatchKind.ROLLUP) {
// list of feature updates for the current patch
featureUpdatesInThisPatch = featureUpdatesInPatch(patch, updatesForFeatureKeys, kind);
helper.sortFeatureUpdates(featureUpdatesInThisPatch);
} else {
// list of feature overrides (new Karaf 4.2 feature override mechanism)
// this is collected for the purpose of summary, not to collect information needed
// for actual override
featureOverridesInThisPatch = featureOverridesInPatch(patch, kind);
overridesForFeatureKeys.addAll(featureOverridesInThisPatch);
}
// list of bundle updates for the current patch - for ROLLUP patch, we minimize the list of bundles
// to "restore" (install after clearing data/cache) by not including bundles that are
// already updated as part of fueatures update
List<BundleUpdate> bundleUpdatesInThisPatch = bundleUpdatesInPatch(patch, allBundles, bundleUpdateLocations, history, updatesForBundleKeys, kind, coreBundles, featureUpdatesInThisPatch);
// prepare patch result before doing runtime changes
PatchResult result = null;
if (patch.getResult() != null) {
result = patch.getResult();
if (patchManagement.isStandaloneChild()) {
// ENTESB-5120: "result" is actually a result of patch installation in root container
// we need dedicated result for admin:create based child container
PatchResult childResult = new PatchResult(patch.getPatchData(), simulate, System.currentTimeMillis(), bundleUpdatesInThisPatch, featureUpdatesInThisPatch, featureOverridesInThisPatch, result);
result.addChildResult(System.getProperty("karaf.name"), childResult);
}
} else {
result = new PatchResult(patch.getPatchData(), simulate, System.currentTimeMillis(), bundleUpdatesInThisPatch, featureUpdatesInThisPatch, featureOverridesInThisPatch);
}
result.getKarafBases().add(String.format("%s | %s", System.getProperty("karaf.name"), System.getProperty("karaf.base")));
results.put(patch.getPatchData().getId(), result);
patch.setResult(result);
// each patch may change files, we're not updating the main files yet - it'll be done when
// install transaction is committed
patchManagement.install(transaction, patch, bundleUpdatesInThisPatch);
}
// One special case
if (kind == PatchKind.NON_ROLLUP) {
// for rollup patch, this bundle will be installed from scratch
for (Map.Entry<Bundle, String> entry : bundleUpdateLocations.entrySet()) {
Bundle bundle = entry.getKey();
if (bundle.getSymbolicName() != null && "org.ops4j.pax.url.mvn".equals(stripSymbolicName(bundle.getSymbolicName()))) {
// handle this bundle specially - update it here
URL location = new URL(entry.getValue());
System.out.printf("Special update of bundle \"%s\" from \"%s\"%n", bundle.getSymbolicName(), location);
if (!simulate) {
update(bundle, location);
bundle.start();
}
// replace location - to be stored in result
bundleUpdateLocations.put(bundle, location.toString());
}
}
}
if (kind == PatchKind.ROLLUP) {
Presentation.displayFeatureUpdates(updatesForFeatureKeys.values(), true);
} else {
Presentation.displayFeatureOverrides(overridesForFeatureKeys, true);
}
// effectively, we will update all the bundles from this list - even if some bundles will be "updated"
// as part of feature installation
Presentation.displayBundleUpdates(updatesForBundleKeys.values(), true);
// then required repositories, features and bundles will be reinstalled
if (kind == PatchKind.ROLLUP) {
if (!simulate) {
if (patches.size() == 1) {
Patch patch = patches.iterator().next();
PatchResult result = results.get(patch.getPatchData().getId());
// single shot
if (patchManagement.isStandaloneChild()) {
backupService.backupDataFiles(result.getChildPatches().get(System.getProperty("karaf.name")), Pending.ROLLUP_INSTALLATION);
} else {
backupService.backupDataFiles(result, Pending.ROLLUP_INSTALLATION);
}
for (Bundle b : coreBundles.values()) {
if (b.getSymbolicName() != null && Utils.stripSymbolicName(b.getSymbolicName()).equals("org.apache.felix.fileinstall")) {
b.stop(Bundle.STOP_TRANSIENT);
break;
}
}
// update KARAF_HOME
patchManagement.commitInstallation(transaction);
if (patchManagement.isStandaloneChild()) {
result.getChildPatches().get(System.getProperty("karaf.name")).setPending(Pending.ROLLUP_INSTALLATION);
} else {
result.setPending(Pending.ROLLUP_INSTALLATION);
}
result.store();
// Some updates need a full JVM restart.
if (isJvmRestartNeeded(results)) {
boolean handlesFullRestart = Boolean.getBoolean("karaf.restart.jvm.supported");
if (handlesFullRestart) {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " installed. Restarting Karaf..");
// KARAF-5179 - we need both properties set to true
System.setProperty("karaf.restart", "true");
System.setProperty("karaf.restart.jvm", "true");
} else {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " installed. Shutting down Karaf, please restart...");
}
} else {
// We don't need a JVM restart, so lets just do a OSGi framework restart
System.setProperty("karaf.restart", "true");
}
File karafData = new File(bundleContext.getProperty("karaf.data"));
File cleanCache = new File(karafData, "clean_cache");
cleanCache.createNewFile();
Thread.currentThread().setContextClassLoader(bundleContext.getBundle(0L).adapt(BundleWiring.class).getClassLoader());
bundleContext.getBundle(0L).stop();
// stop/shutdown occurs on another thread
}
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
patchManagement.rollbackInstallation(transaction);
}
return results;
}
// update KARAF_HOME
if (!simulate) {
patchManagement.commitInstallation(transaction);
} else {
patchManagement.rollbackInstallation(transaction);
}
if (!simulate) {
Runnable task = () -> {
try {
// update bundles
applyChanges(bundleUpdateLocations);
for (String featureOverride : overridesForFeatureKeys) {
System.out.println("overriding feature: " + featureOverride);
}
if (overridesForFeatureKeys.size() > 0) {
System.out.println("refreshing features");
featuresService.refreshFeatures(EnumSet.noneOf(FeaturesService.Option.class));
}
// persist results of all installed patches
for (Patch patch : patches) {
PatchResult result = results.get(patch.getPatchData().getId());
System.out.printf("Summary of patch %s:%n", patch.getPatchData().getId());
PatchReport report = patch.getResult().getReport();
System.out.printf(" - Bundles updated: %d%n", report.getUpdatedBundles());
System.out.printf(" - Features updated: %d%n", report.getUpdatedFeatures());
System.out.printf(" - Features overriden: %d%n", report.getOverridenFeatures());
System.out.flush();
result.store();
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
}
};
if (synchronous) {
task.run();
} else {
new Thread(task).start();
}
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
}
return results;
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
if (transaction != null && patchManagement != null) {
patchManagement.rollbackInstallation(transaction);
}
throw new PatchException(e.getMessage(), e);
} finally {
System.out.flush();
}
}
use of org.jboss.fuse.patch.management.BundleUpdate in project fuse-karaf by jboss-fuse.
the class PatchServiceImpl method resumePendingPatchTasks.
/**
* Upon startup (activation), we check if there are any *.patch.pending files. if yes, we're finishing the
* installation
*/
private void resumePendingPatchTasks() {
LOG.info("Performing \"resume pending patch tasks\"");
try {
File[] pendingPatches = patchDir.listFiles(pathname -> pathname.exists() && pathname.getName().endsWith(".pending"));
if (pendingPatches == null || pendingPatches.length == 0) {
return;
}
for (File pending : pendingPatches) {
Pending what = Pending.valueOf(FileUtils.readFileToString(pending, "UTF-8"));
String name = pending.getName().replaceFirst("\\.pending$", "");
if (patchManagement.isStandaloneChild()) {
if (name.endsWith("." + System.getProperty("karaf.name") + ".patch")) {
name = name.replaceFirst("\\." + System.getProperty("karaf.name"), "");
} else {
continue;
}
}
File patchFile = new File(pending.getParentFile(), name);
if (!patchFile.isFile()) {
System.out.println("Ignoring patch result file: " + patchFile.getName());
continue;
}
PatchData patchData = PatchData.load(new FileInputStream(patchFile));
Patch patch = patchManagement.loadPatch(new PatchDetailsRequest(patchData.getId()));
System.out.printf("Resume %s of %spatch \"%s\"%n", what == Pending.ROLLUP_INSTALLATION ? "installation" : "rollback", patch.getPatchData().isRollupPatch() ? "rollup " : "", patch.getPatchData().getId());
PatchResult result = patch.getResult();
if (patchManagement.isStandaloneChild()) {
result = result.getChildPatches().get(System.getProperty("karaf.name"));
if (result == null) {
System.out.println("Ignoring patch result file: " + patchFile.getName());
continue;
}
}
// feature time
Set<String> newRepositories = new LinkedHashSet<>();
Set<String> features = new LinkedHashSet<>();
for (FeatureUpdate featureUpdate : result.getFeatureUpdates()) {
if (featureUpdate.getName() == null && featureUpdate.getPreviousRepository() != null) {
// feature was not shipped by patch
newRepositories.add(featureUpdate.getPreviousRepository());
} else if (featureUpdate.getNewRepository() == null) {
// feature was not changed by patch
newRepositories.add(featureUpdate.getPreviousRepository());
features.add(String.format("%s|%s", featureUpdate.getName(), featureUpdate.getPreviousVersion()));
} else {
// feature was shipped by patch
if (what == Pending.ROLLUP_INSTALLATION) {
newRepositories.add(featureUpdate.getNewRepository());
features.add(String.format("%s|%s", featureUpdate.getName(), featureUpdate.getNewVersion()));
} else {
newRepositories.add(featureUpdate.getPreviousRepository());
features.add(String.format("%s|%s", featureUpdate.getName(), featureUpdate.getPreviousVersion()));
}
}
}
System.out.println("Restoring feature repositories");
for (String repo : newRepositories) {
try {
URI repositoryUri = URI.create(repo);
if (featuresService.getRepository(repositoryUri) == null) {
System.out.println("Restoring feature repository: " + repo);
featuresService.addRepository(repositoryUri);
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace(System.err);
System.err.flush();
}
}
Set<String> installedFeatures = null;
try {
installedFeatures = Arrays.stream(featuresService.listInstalledFeatures()).map(f -> String.format("%s|%s", f.getName(), f.getVersion())).collect(Collectors.toSet());
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace(System.err);
System.err.flush();
}
EnumSet<FeaturesService.Option> options = EnumSet.noneOf(FeaturesService.Option.class);
Set<String> toInstall = new LinkedHashSet<>();
System.out.println("Restoring features");
for (String f : features) {
if (installedFeatures == null || !installedFeatures.contains(f)) {
String[] fv = f.split("\\|");
String fid = String.format("%s/%s", fv[0], fv[1]);
System.out.printf("Restoring feature %s%n", fid);
toInstall.add(fid);
}
}
try {
if (!toInstall.isEmpty()) {
featuresService.installFeatures(toInstall, options);
}
System.out.println("Refreshing features service");
featuresService.refreshFeatures(options);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace(System.err);
System.err.flush();
}
for (BundleUpdate update : result.getBundleUpdates()) {
if (!update.isIndependent()) {
continue;
}
String location = null;
if (update.getNewVersion() == null) {
System.out.printf("Restoring bundle %s from %s%n", update.getSymbolicName(), update.getPreviousLocation());
location = update.getPreviousLocation();
} else {
if (what == Pending.ROLLUP_INSTALLATION) {
System.out.printf("Updating bundle %s from %s%n", update.getSymbolicName(), update.getNewLocation());
location = update.getNewLocation();
} else {
System.out.printf("Downgrading bundle %s from %s%n", update.getSymbolicName(), update.getPreviousLocation());
location = update.getPreviousLocation();
}
}
try {
Bundle b = bundleContext.installBundle(location);
if (update.getStartLevel() > -1) {
b.adapt(BundleStartLevel.class).setStartLevel(update.getStartLevel());
}
switch(update.getState()) {
// ?
case Bundle.UNINSTALLED:
case Bundle.INSTALLED:
case Bundle.STARTING:
case Bundle.STOPPING:
break;
case Bundle.RESOLVED:
// ?bundleContext.getBundle(0L).adapt(org.osgi.framework.wiring.FrameworkWiring.class).resolveBundles(...);
break;
case Bundle.ACTIVE:
b.start();
break;
}
} catch (BundleException e) {
System.err.println(" - " + e.getMessage());
// e.printStackTrace(System.err);
System.err.flush();
}
}
pending.delete();
System.out.printf("%spatch \"%s\" %s successfully%n", patch.getPatchData().isRollupPatch() ? "Rollup " : "", patchData.getId(), what == Pending.ROLLUP_INSTALLATION ? "installed" : "rolled back");
if (what == Pending.ROLLUP_INSTALLATION) {
System.out.printf("Summary of patch %s:%n", patch.getPatchData().getId());
PatchReport report = patch.getResult().getReport();
System.out.printf(" - Bundles updated: %d%n", report.getUpdatedBundles());
System.out.printf(" - Features updated: %d%n", report.getUpdatedFeatures());
System.out.printf(" - Features overriden: %d%n", report.getOverridenFeatures());
System.out.printf("Detailed report: %s%n", new File(patch.getPatchData().getPatchLocation(), patch.getPatchData().getId() + ".patch.result.html").getCanonicalPath());
System.out.flush();
}
if (what == Pending.ROLLUP_ROLLBACK) {
List<String> bases = patch.getResult().getKarafBases();
bases.removeIf(s -> s.startsWith(System.getProperty("karaf.name")));
result.setPending(null);
patch.getResult().store();
if (patch.getResult().getKarafBases().size() == 0) {
File file = new File(patchDir, patchData.getId() + ".patch.result");
file.delete();
}
if (patchManagement.isStandaloneChild()) {
File file = new File(patchDir, patchData.getId() + "." + System.getProperty("karaf.name") + ".patch.result");
if (file.isFile()) {
file.delete();
}
}
}
}
} catch (IOException e) {
LOG.error("Error resuming a patch: " + e.getMessage(), e);
}
}
use of org.jboss.fuse.patch.management.BundleUpdate in project fuse-karaf by jboss-fuse.
the class PatchServiceImpl method rollback.
@Override
public void rollback(final Patch patch, boolean simulate, boolean force) throws PatchException {
final PatchResult result = !patchManagement.isStandaloneChild() ? patch.getResult() : patch.getResult().getChildPatches().get(System.getProperty("karaf.name"));
if (result == null) {
throw new PatchException("Patch " + patch.getPatchData().getId() + " is not installed");
}
if (patch.getPatchData().isRollupPatch()) {
// we already have the "state" (feature repositories, features, bundles and their states, datafiles
// and start-level info) stored in *.result file
Presentation.displayFeatureUpdates(result.getFeatureUpdates(), false);
Presentation.displayBundleUpdates(result.getBundleUpdates(), false);
try {
if (!simulate) {
// let's backup data files before configadmin detects changes to etc/* files.
backupService.backupDataFiles(result, Pending.ROLLUP_ROLLBACK);
for (Bundle b : this.bundleContext.getBundles()) {
if (b.getSymbolicName() != null && Utils.stripSymbolicName(b.getSymbolicName()).equals("org.apache.felix.fileinstall")) {
b.stop(Bundle.STOP_TRANSIENT);
break;
}
}
patchManagement.rollback(patch.getPatchData());
result.setPending(Pending.ROLLUP_ROLLBACK);
if (patchManagement.isStandaloneChild()) {
result.getParent().store();
} else {
result.store();
}
if (isJvmRestartNeeded(result)) {
boolean handlesFullRestart = Boolean.getBoolean("karaf.restart.jvm.supported");
if (handlesFullRestart) {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " rolled back. Restarting Karaf..");
// KARAF-5179 - we need both properties set to true
System.setProperty("karaf.restart", "true");
System.setProperty("karaf.restart.jvm", "true");
} else {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " rolled back. Shutting down Karaf, please restart...");
}
} else {
// We don't need a JVM restart, so lets just do a OSGi framework restart
System.setProperty("karaf.restart", "true");
}
File karafData = new File(bundleContext.getProperty("karaf.data"));
File cleanCache = new File(karafData, "clean_cache");
cleanCache.createNewFile();
Thread.currentThread().setContextClassLoader(bundleContext.getBundle(0L).adapt(BundleWiring.class).getClassLoader());
bundleContext.getBundle(0L).stop();
// stop/shutdown occurs on another thread
return;
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
return;
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
throw new PatchException(e.getMessage(), e);
}
}
// continue with NON_ROLLUP patch
// current state of the framework
Bundle[] allBundles = bundleContext.getBundles();
// check if all the bundles that were updated in patch are available (installed)
List<BundleUpdate> badUpdates = new ArrayList<BundleUpdate>();
for (BundleUpdate update : result.getBundleUpdates()) {
boolean found = false;
Version v = Version.parseVersion(update.getNewVersion() == null ? update.getPreviousVersion() : update.getNewVersion());
for (Bundle bundle : allBundles) {
if (bundle.getSymbolicName() == null || update.getSymbolicName() == null) {
continue;
}
if (stripSymbolicName(bundle.getSymbolicName()).equals(stripSymbolicName(update.getSymbolicName())) && bundle.getVersion().equals(v)) {
found = true;
break;
}
}
if (!found) {
badUpdates.add(update);
}
}
if (!badUpdates.isEmpty() && !force) {
StringBuilder sb = new StringBuilder();
sb.append("Unable to rollback patch ").append(patch.getPatchData().getId()).append(" because of the following missing bundles:\n");
for (BundleUpdate up : badUpdates) {
String version = up.getNewVersion() == null ? up.getPreviousVersion() : up.getNewVersion();
sb.append(" - ").append(up.getSymbolicName()).append("/").append(version).append("\n");
}
throw new PatchException(sb.toString());
}
if (!simulate) {
// bundle -> old location of the bundle to downgrade from
final Map<Bundle, String> toUpdate = new HashMap<Bundle, String>();
for (BundleUpdate update : result.getBundleUpdates()) {
Version v = Version.parseVersion(update.getNewVersion() == null ? update.getPreviousVersion() : update.getNewVersion());
for (Bundle bundle : allBundles) {
if (bundle.getSymbolicName() == null || update.getSymbolicName() == null) {
continue;
}
if (stripSymbolicName(bundle.getSymbolicName()).equals(stripSymbolicName(update.getSymbolicName())) && bundle.getVersion().equals(v)) {
toUpdate.put(bundle, update.getPreviousLocation());
}
}
}
final boolean isStandaloneChild = patchManagement.isStandaloneChild();
patchManagement.rollback(patch.getPatchData());
Executors.newSingleThreadExecutor().execute(() -> {
try {
applyChanges(toUpdate);
for (String featureOverride : result.getFeatureOverrides()) {
System.out.println("removing overriden feature: " + featureOverride);
}
if (result.getFeatureOverrides().size() > 0) {
System.out.println("refreshing features");
featuresService.refreshFeatures(EnumSet.noneOf(FeaturesService.Option.class));
}
} catch (Exception e) {
throw new PatchException("Unable to rollback patch " + patch.getPatchData().getId() + ": " + e.getMessage(), e);
}
try {
List<String> bases = patch.getResult().getKarafBases();
bases.removeIf(s -> s.startsWith(System.getProperty("karaf.name")));
patch.getResult().store();
if (patch.getResult().getKarafBases().size() == 0) {
File file = new File(patchDir, result.getPatchData().getId() + ".patch.result");
file.delete();
patch.setResult(null);
}
if (patchManagement.isStandaloneChild()) {
File file = new File(patchDir, result.getPatchData().getId() + "." + System.getProperty("karaf.name") + ".patch.result");
if (file.isFile()) {
file.delete();
}
}
} catch (Exception e) {
LOG.warn("Problem updating metadata for patch \"" + patch.getPatchData().getId() + "\": " + e.getMessage());
}
});
}
}
Aggregations