use of org.jboss.fuse.patch.management.PatchException in project fuse-karaf by jboss-fuse.
the class GitPatchManagementServiceImpl method install.
@Override
public void install(String transaction, Patch patch, List<BundleUpdate> bundleUpdatesInThisPatch) {
transactionIsValid(transaction, patch);
Git fork = pendingTransactions.get(transaction);
// for report preparation purposes
RevWalk walk = new RevWalk(fork.getRepository());
RevCommit reportCommitBase;
RevCommit reportCommitOurs;
RevCommit reportCommitPatch;
RevCommit reportCommitResolved;
try {
switch(pendingTransactionsTypes.get(transaction)) {
case ROLLUP:
{
Activator.log2(LogService.LOG_INFO, String.format("Installing rollup patch \"%s\"", patch.getPatchData().getId()));
// We can install only one rollup patch within single transaction
// and it is equal to cherry-picking all user changes on top of transaction branch
// after cherry-picking the commit from the rollup patch branch.
// Rollup patches do their own update to etc/startup.properties
// We're operating on patch branch, HEAD of the patch branch points to the baseline
ObjectId since = fork.getRepository().resolve("HEAD^{commit}");
reportCommitBase = walk.parseCommit(since);
// we'll pick all user changes between baseline and main patch branch
// we'll consider all real user changes and some P-patch changes if HF-patches install newer
// bundles than currently installed R-patch (very rare situation)
ObjectId to = fork.getRepository().resolve(gitPatchRepository.getMainBranchName() + "^{commit}");
// Custom changes: since..to
reportCommitOurs = walk.parseCommit(to);
Iterable<RevCommit> mainChanges = fork.log().addRange(since, to).call();
List<RevCommit> userChanges = new LinkedList<>();
// gather lines of HF patches - patches that have *only* bundle updates
// if any of HF patches provide newer version of artifact than currently installed R patch,
// we will leave the relevant line in etc/org.apache.karaf.features.xml
List<PatchData> hfChanges = new LinkedList<>();
for (RevCommit rc : mainChanges) {
if (isUserChangeCommit(rc)) {
userChanges.add(rc);
} else {
String hfPatchId = isHfChangeCommit(rc);
if (hfPatchId != null) {
hfChanges.add(gatherOverrides(hfPatchId, patch));
}
}
}
String patchRef = patch.getManagedPatch().getCommitId();
if (env == EnvType.STANDALONE_CHILD) {
// we're in a slightly different situation:
// - patch was patch:added in root container
// - its main commit should be used when patching full Fuse/AMQ container
// - it created "side" commits (with tags) for this case of patching instance:create based containers
// - those tags are stored in special patch-info.txt file within patch' commit
String patchInfo = gitPatchRepository.getFileContent(fork, patchRef, "patch-info.txt");
if (patchInfo != null) {
BufferedReader reader = new BufferedReader(new StringReader(patchInfo));
String line = null;
while ((line = reader.readLine()) != null) {
if (line.startsWith("#")) {
continue;
}
Pattern p = Pattern.compile(env.getBaselineTagFormat().replace("%s", "(.*)"));
if (p.matcher(line).matches()) {
// this means we have another commit/tag that we should chery-pick as a patch
// for this standalone child container
patchRef = line.trim();
}
}
} else {
// hmm, we actually can't patch standalone child container then...
Activator.log2(LogService.LOG_WARNING, String.format("Can't install rollup patch \"%s\" in instance:create-based container - no information about child container patch", patch.getPatchData().getId()));
return;
}
}
if (env == EnvType.STANDALONE) {
// pick the rollup patch
fork.cherryPick().include(fork.getRepository().resolve(patchRef)).setNoCommit(true).call();
gitPatchRepository.prepareCommit(fork, String.format(MARKER_R_PATCH_INSTALLATION_PATTERN, patch.getPatchData().getId())).call();
} else if (env == EnvType.STANDALONE_CHILD) {
// rebase on top of rollup patch
fork.reset().setMode(ResetCommand.ResetType.HARD).setRef("refs/tags/" + patchRef + "^{commit}").call();
}
// next commit - reset overrides - this is 2nd step of installing rollup patch
// if there are hot fix patches applied before rollup patch and the changes are newer (very rare
// situation), we have to add these overrides after patch' etc/org.apache.karaf.features.xml
// we always remove etc/overrides.properties
resetOverrides(fork, fork.getRepository().getWorkTree(), hfChanges);
fork.add().addFilepattern("etc/" + featureProcessing).call();
if (new File(fork.getRepository().getWorkTree(), "etc/" + featureProcessingVersions).isFile()) {
fork.add().addFilepattern("etc/" + featureProcessingVersions).call();
}
RevCommit c = gitPatchRepository.prepareCommit(fork, String.format(MARKER_R_PATCH_RESET_OVERRIDES_PATTERN, patch.getPatchData().getId())).call();
// R-patch changes: since..c
reportCommitPatch = walk.parseCommit(c);
if (env == EnvType.STANDALONE) {
// tag the new rollup patch as new baseline
String newFuseVersion = determineVersion(fork.getRepository().getWorkTree());
fork.tag().setName(String.format(EnvType.STANDALONE.getBaselineTagFormat(), newFuseVersion)).setObjectId(c).call();
}
// reapply those user changes that are not conflicting
// for each conflicting cherry-pick we do a backup of user files, to be able to restore them
// when rollup patch is rolled back
ListIterator<RevCommit> it = userChanges.listIterator(userChanges.size());
int prefixSize = Integer.toString(userChanges.size()).length();
int count = 1;
// when there are not user changes, the "resolved" point will be just after cherryPicking patch
// commit. If there are user changes - these will be latest
reportCommitResolved = c;
Set<String> conflicts = new LinkedHashSet<>();
while (it.hasPrevious()) {
RevCommit userChange = it.previous();
String prefix = String.format("%0" + prefixSize + "d-%s", count++, userChange.getName());
CherryPickResult result = fork.cherryPick().include(userChange).setNoCommit(true).call();
// ENTESB-5492: remove etc/overrides.properties if there is such file left from old patch
// mechanism
File overrides = new File(fork.getRepository().getWorkTree(), "etc/overrides.properties");
if (overrides.isFile()) {
overrides.delete();
fork.rm().addFilepattern("etc/overrides.properties").call();
}
// if there's conflict here, prefer patch version (which is "ours" (first) in this case)
Set<String> conflicting = handleCherryPickConflict(patch.getPatchData().getPatchDirectory(), fork, result, userChange, false, PatchKind.ROLLUP, prefix, true, false);
if (conflicting != null) {
conflicts.addAll(conflicting);
}
// always commit even empty changes - to be able to restore user changes when rolling back
// rollup patch.
// commit has the original commit id appended to the message.
// when we rebase on OLDER baseline (rollback) we restore backed up files based on this
// commit id (from patches/patch-id.backup/number-commit directory)
String newMessage = userChange.getFullMessage() + "\n\n";
newMessage += prefix;
reportCommitResolved = gitPatchRepository.prepareCommit(fork, newMessage).call();
// we may have unadded changes - when file mode is changed
fork.reset().setMode(ResetCommand.ResetType.HARD).call();
}
// finally - let's get rid of all the tags related to non-rollup patches installed between
// previous baseline and previous HEAD, because installing rollup patch makes all previous P
// patches obsolete
RevCommit c1 = walk.parseCommit(since);
RevCommit c2 = walk.parseCommit(to);
Map<String, RevTag> tags = gitPatchRepository.findTagsBetween(fork, c1, c2);
for (Map.Entry<String, RevTag> entry : tags.entrySet()) {
if (entry.getKey().startsWith("patch-")) {
fork.tagDelete().setTags(entry.getKey()).call();
fork.push().setRefSpecs(new RefSpec().setSource(null).setDestination("refs/tags/" + entry.getKey())).call();
}
}
// we have 4 commits and we can now prepare the report
File reportFile = new File(patch.getPatchData().getPatchLocation(), patch.getPatchData().getId() + ".patch.result.html");
try (FileWriter writer = new FileWriter(reportFile)) {
DiffUtils.generateDiffReport(patch, patch.getResult(), fork, conflicts, reportCommitBase, reportCommitOurs, reportCommitPatch, reportCommitResolved, writer);
} catch (Exception e) {
Activator.log(LogService.LOG_WARNING, "Problem generatic patch report for patch " + patch.getPatchData().getId() + ": " + e.getMessage());
}
break;
}
case NON_ROLLUP:
{
Activator.log2(LogService.LOG_INFO, String.format("Installing non-rollup patch \"%s\"", patch.getPatchData().getId()));
// simply cherry-pick patch commit to transaction branch
// non-rollup patches require manual change to artifact references in all files
// pick the non-rollup patch
RevCommit commit = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve(patch.getManagedPatch().getCommitId()));
CherryPickResult result = fork.cherryPick().include(commit).setNoCommit(true).call();
handleCherryPickConflict(patch.getPatchData().getPatchDirectory(), fork, result, commit, true, PatchKind.NON_ROLLUP, null, true, false);
// there are several files in ${karaf.home} that need to be changed together with patch
// commit, to make them reference updated bundles (paths, locations, ...)
updateFileReferences(fork, patch.getPatchData(), bundleUpdatesInThisPatch);
updateOverrides(fork.getRepository().getWorkTree(), Collections.singletonList(patch.getPatchData()));
fork.add().addFilepattern(".").call();
// always commit non-rollup patch
RevCommit c = gitPatchRepository.prepareCommit(fork, String.format(MARKER_P_PATCH_INSTALLATION_PATTERN, patch.getPatchData().getId())).call();
// we may have unadded changes - when file mode is changed
fork.reset().setMode(ResetCommand.ResetType.MIXED).call();
fork.reset().setMode(ResetCommand.ResetType.HARD).call();
// tag the installed patch (to easily rollback and to prevent another installation)
String tagName = String.format("patch-%s", patch.getPatchData().getId().replace(' ', '-'));
if (env == EnvType.STANDALONE_CHILD) {
tagName += "-" + gitPatchRepository.getStandaloneChildkarafName();
}
fork.tag().setName(tagName).setObjectId(c).call();
break;
}
}
} catch (IOException | GitAPIException e) {
throw new PatchException(e.getMessage(), e);
}
}
use of org.jboss.fuse.patch.management.PatchException in project fuse-karaf by jboss-fuse.
the class GitPatchManagementServiceImpl method fetchPatches.
@Override
public List<PatchData> fetchPatches(URL url) throws PatchException {
try {
List<PatchData> patches = new ArrayList<>(1);
File patchFile = new File(patchesDir, Long.toString(System.currentTimeMillis()) + ".patch.tmp");
InputStream input = url.openStream();
FileOutputStream output = new FileOutputStream(patchFile);
ZipFile zf = null;
try {
IOUtils.copy(input, output);
} finally {
Utils.closeQuietly(input);
Utils.closeQuietly(output);
}
try {
zf = new ZipFile(patchFile);
} catch (IOException ignored) {
if (!FilenameUtils.getExtension(url.getFile()).equals("patch")) {
throw new PatchException("Patch should be ZIP file or *.patch descriptor");
}
}
// patchFile may "be" a patch descriptor or be a ZIP file containing descriptor
PatchData patchData = null;
// in case patch ZIP file has no descriptor, we'll "generate" patch data on the fly
// no descriptor -> assume we have rollup patch or even full, new distribution
PatchData fallbackPatchData = new PatchData(FilenameUtils.getBaseName(url.getPath()));
fallbackPatchData.setGenerated(true);
fallbackPatchData.setRollupPatch(true);
fallbackPatchData.setPatchDirectory(new File(patchesDir, fallbackPatchData.getId()));
fallbackPatchData.setPatchLocation(patchesDir);
if (zf != null) {
File systemRepo = getSystemRepository(karafHome, systemContext);
try {
List<ZipArchiveEntry> otherResources = new LinkedList<>();
boolean skipRootDir = false;
for (Enumeration<ZipArchiveEntry> e = zf.getEntries(); e.hasMoreElements(); ) {
ZipArchiveEntry entry = e.nextElement();
if (!skipRootDir && entry.isDirectory() && entry.getName().startsWith("fuse-karaf-")) {
skipRootDir = true;
}
if (entry.isDirectory() || entry.isUnixSymlink()) {
continue;
}
String name = entry.getName();
if (skipRootDir) {
name = name.substring(name.indexOf('/') + 1);
}
if (!name.contains("/") && name.endsWith(".patch")) {
// patch descriptor in ZIP's root directory
if (patchData == null) {
// load data from patch descriptor inside ZIP. This may or may not be a rollup
// patch
File target = new File(patchesDir, name);
extractZipEntry(zf, entry, target);
patchData = loadPatchData(target);
// ENTESB-4600: try checking the target version of the patch
Version version = Utils.findVersionInName(patchData.getId());
if (version.getMajor() == 6 && version.getMinor() == 1) {
throw new PatchException("Can't install patch \"" + patchData.getId() + "\", it is released for version 6.x of the product");
}
patchData.setGenerated(false);
File targetDirForPatchResources = new File(patchesDir, patchData.getId());
patchData.setPatchDirectory(targetDirForPatchResources);
patchData.setPatchLocation(patchesDir);
target.renameTo(new File(patchesDir, patchData.getId() + ".patch"));
patches.add(patchData);
} else {
throw new PatchException(String.format("Multiple patch descriptors: already have patch %s and now encountered entry %s", patchData.getId(), name));
}
} else {
File target = null;
String relativeName = null;
if (name.startsWith("system/")) {
// copy to ${karaf.default.repository}
relativeName = name.substring("system/".length());
target = new File(systemRepo, relativeName);
} else if (name.startsWith("repository/")) {
// copy to ${karaf.default.repository}
relativeName = name.substring("repository/".length());
target = new File(systemRepo, relativeName);
} else {
// other files that should be applied to ${karaf.home} when the patch is installed
otherResources.add(entry);
}
if (target != null) {
// we unzip to system repository
extractAndTrackZipEntry(fallbackPatchData, zf, entry, target, skipRootDir);
}
}
}
File targetDirForPatchResources = new File(patchesDir, patchData == null ? fallbackPatchData.getId() : patchData.getId());
// now copy non-maven resources (we should now know where to copy them)
for (ZipArchiveEntry entry : otherResources) {
String name = entry.getName();
if (skipRootDir) {
name = name.substring(name.indexOf('/'));
}
File target = new File(targetDirForPatchResources, name);
extractAndTrackZipEntry(fallbackPatchData, zf, entry, target, skipRootDir);
}
} finally {
if (zf != null) {
zf.close();
}
if (patchFile != null) {
patchFile.delete();
}
}
} else {
// If the file is not a zip/jar, assume it's a single patch file
patchData = loadPatchData(patchFile);
// no patch directory - no attached content, assuming only references to bundles
patchData.setPatchDirectory(null);
patchFile.renameTo(new File(patchesDir, patchData.getId() + ".patch"));
patches.add(patchData);
}
if (patches.size() == 0) {
// let's use generated patch descriptor
File generatedPatchDescriptor = new File(patchesDir, fallbackPatchData.getId() + ".patch");
FileOutputStream out = new FileOutputStream(generatedPatchDescriptor);
try {
fallbackPatchData.storeTo(out);
} finally {
Utils.closeQuietly(out);
}
patches.add(fallbackPatchData);
}
return patches;
} catch (IOException e) {
throw new PatchException("Unable to download patch from url " + url, e);
}
}
use of org.jboss.fuse.patch.management.PatchException in project fuse-karaf by jboss-fuse.
the class GitPatchManagementServiceImpl method commitInstallation.
@Override
public void commitInstallation(String transaction) {
transactionIsValid(transaction, null);
Git fork = pendingTransactions.get(transaction);
try {
switch(pendingTransactionsTypes.get(transaction)) {
case ROLLUP:
{
// hard reset of main patch branch to point to transaction branch + apply changes to ${karaf.home}
gitPatchRepository.checkout(fork).setName(gitPatchRepository.getMainBranchName()).call();
// before we reset main patch branch to originate from new baseline, let's find previous baseline
RevTag baseline = gitPatchRepository.findCurrentBaseline(fork);
RevCommit c1 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve(baseline.getTagName() + "^{commit}"));
// hard reset of main patch branch - to point to other branch, originating from new baseline
fork.reset().setMode(ResetCommand.ResetType.HARD).setRef(transaction).call();
gitPatchRepository.push(fork);
RevCommit c2 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve("HEAD"));
// apply changes from single range of commits
// applyChanges(fork, c1, c2);
applyChanges(fork, false);
break;
}
case NON_ROLLUP:
{
// fast forward merge of main patch branch with transaction branch
gitPatchRepository.checkout(fork).setName(gitPatchRepository.getMainBranchName()).call();
// current version of ${karaf.home}
RevCommit c1 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve("HEAD"));
// fast forward over patch-installation branch - possibly over more than 1 commit
fork.merge().setFastForward(MergeCommand.FastForwardMode.FF_ONLY).include(fork.getRepository().resolve(transaction)).call();
gitPatchRepository.push(fork);
// apply a change from commits of all installed patches
RevCommit c2 = new RevWalk(fork.getRepository()).parseCommit(fork.getRepository().resolve("HEAD"));
applyChanges(fork, c1, c2);
// applyChanges(fork);
break;
}
}
gitPatchRepository.push(fork);
} catch (GitAPIException | IOException e) {
throw new PatchException(e.getMessage(), e);
} finally {
gitPatchRepository.closeRepository(fork, true);
}
pendingTransactions.remove(transaction);
pendingTransactionsTypes.remove(transaction);
}
use of org.jboss.fuse.patch.management.PatchException in project fuse-karaf by jboss-fuse.
the class PatchServiceImpl method activate.
@Activate
void activate(ComponentContext componentContext) throws IOException {
// Use system bundle' bundle context to avoid running into
// "Invalid BundleContext" exceptions when updating bundles
bundleContext = componentContext.getBundleContext().getBundle(0).getBundleContext();
pool = Executors.newFixedThreadPool(1, ThreadUtils.namedThreadFactory("patch-service"));
String dir = this.bundleContext.getProperty(PATCH_LOCATION);
if (dir != null) {
patchDir = new File(dir);
} else {
patchDir = new File(bundleContext.getProperty("karaf.home"), "patches");
LOG.info("Can't find {} property, defaulting to {}", PATCH_LOCATION, patchDir.getCanonicalPath());
}
if (!patchDir.isDirectory()) {
Utils.mkdirs(patchDir);
if (!patchDir.isDirectory()) {
throw new PatchException("Unable to create patch folder");
}
}
this.karafHome = new File(bundleContext.getProperty("karaf.home"));
this.repository = new File(bundleContext.getProperty("karaf.default.repository"));
helper = new OSGiPatchHelper(karafHome, bundleContext);
load(true);
bootTracker = new BootDoneTracker(bundleContext);
bootTracker.open();
}
use of org.jboss.fuse.patch.management.PatchException in project fuse-karaf by jboss-fuse.
the class PatchServiceImpl method install.
/**
* <p>Main installation method. Installing a patch in standalone mode is a matter of correct merge (cherry-pick, merge,
* rebase) of patch branch into <code>master</code> branch.</p>
* <p>Static changes are handled by git, runtime changes (bundles, features) are handled depending on patch type:<ul>
* <li>Rollup: clear OSGi bundle cache, reinstall features that were installed after restart</li>
* <li>Non-Rollup: update bundles, generate overrides.properties and update scripts to reference new versions</li>
* </ul></p>
* <p>For Rollup patches we don't update bundles - we clear the bundle cache instead.</p>
* @param patches
* @param simulate
* @param synchronous
* @return
*/
private Map<String, PatchResult> install(final Collection<Patch> patches, final boolean simulate, boolean synchronous) {
PatchKind kind = checkConsistency(patches);
checkPrerequisites(patches);
checkStandaloneChild(patches);
String transaction = null;
try {
// Compute individual patch results (patchId -> Result)
final Map<String, PatchResult> results = new LinkedHashMap<String, PatchResult>();
// current state of the framework
Bundle[] allBundles = bundleContext.getBundles();
// bundle -> url to update the bundle from (used for non-rollup patch)
final Map<Bundle, String> bundleUpdateLocations = new HashMap<>();
/* A "key" is name + "update'able version". Such version is current version with micro version == 0 */
// [symbolic name|updateable-version] -> newest update for the bundle out of all installed patches
final Map<String, BundleUpdate> updatesForBundleKeys = new LinkedHashMap<>();
// [feature name|updateable-version] -> newest update for the feature out of all installed patches
final Map<String, FeatureUpdate> updatesForFeatureKeys = new LinkedHashMap<>();
final List<String> overridesForFeatureKeys = new LinkedList<>();
// symbolic name -> version -> location
final BundleVersionHistory history = createBundleVersionHistory();
// beginning installation transaction = creating of temporary branch in git
transaction = this.patchManagement.beginInstallation(kind);
// bundles from etc/startup.properties + felix.framework = all bundles not managed by features
// these bundles will be treated in special way
// symbolic name -> Bundle
final Map<String, Bundle> coreBundles = helper.getCoreBundles(allBundles);
// runtime info is prepared to apply runtime changes and static info is prepared to update KARAF_HOME files
for (Patch patch : patches) {
List<FeatureUpdate> featureUpdatesInThisPatch = null;
List<String> featureOverridesInThisPatch = null;
if (kind == PatchKind.ROLLUP) {
// list of feature updates for the current patch
featureUpdatesInThisPatch = featureUpdatesInPatch(patch, updatesForFeatureKeys, kind);
helper.sortFeatureUpdates(featureUpdatesInThisPatch);
} else {
// list of feature overrides (new Karaf 4.2 feature override mechanism)
// this is collected for the purpose of summary, not to collect information needed
// for actual override
featureOverridesInThisPatch = featureOverridesInPatch(patch, kind);
overridesForFeatureKeys.addAll(featureOverridesInThisPatch);
}
// list of bundle updates for the current patch - for ROLLUP patch, we minimize the list of bundles
// to "restore" (install after clearing data/cache) by not including bundles that are
// already updated as part of fueatures update
List<BundleUpdate> bundleUpdatesInThisPatch = bundleUpdatesInPatch(patch, allBundles, bundleUpdateLocations, history, updatesForBundleKeys, kind, coreBundles, featureUpdatesInThisPatch);
// prepare patch result before doing runtime changes
PatchResult result = null;
if (patch.getResult() != null) {
result = patch.getResult();
if (patchManagement.isStandaloneChild()) {
// ENTESB-5120: "result" is actually a result of patch installation in root container
// we need dedicated result for admin:create based child container
PatchResult childResult = new PatchResult(patch.getPatchData(), simulate, System.currentTimeMillis(), bundleUpdatesInThisPatch, featureUpdatesInThisPatch, featureOverridesInThisPatch, result);
result.addChildResult(System.getProperty("karaf.name"), childResult);
}
} else {
result = new PatchResult(patch.getPatchData(), simulate, System.currentTimeMillis(), bundleUpdatesInThisPatch, featureUpdatesInThisPatch, featureOverridesInThisPatch);
}
result.getKarafBases().add(String.format("%s | %s", System.getProperty("karaf.name"), System.getProperty("karaf.base")));
results.put(patch.getPatchData().getId(), result);
patch.setResult(result);
// each patch may change files, we're not updating the main files yet - it'll be done when
// install transaction is committed
patchManagement.install(transaction, patch, bundleUpdatesInThisPatch);
}
// One special case
if (kind == PatchKind.NON_ROLLUP) {
// for rollup patch, this bundle will be installed from scratch
for (Map.Entry<Bundle, String> entry : bundleUpdateLocations.entrySet()) {
Bundle bundle = entry.getKey();
if (bundle.getSymbolicName() != null && "org.ops4j.pax.url.mvn".equals(stripSymbolicName(bundle.getSymbolicName()))) {
// handle this bundle specially - update it here
URL location = new URL(entry.getValue());
System.out.printf("Special update of bundle \"%s\" from \"%s\"%n", bundle.getSymbolicName(), location);
if (!simulate) {
update(bundle, location);
bundle.start();
}
// replace location - to be stored in result
bundleUpdateLocations.put(bundle, location.toString());
}
}
}
if (kind == PatchKind.ROLLUP) {
Presentation.displayFeatureUpdates(updatesForFeatureKeys.values(), true);
} else {
Presentation.displayFeatureOverrides(overridesForFeatureKeys, true);
}
// effectively, we will update all the bundles from this list - even if some bundles will be "updated"
// as part of feature installation
Presentation.displayBundleUpdates(updatesForBundleKeys.values(), true);
// then required repositories, features and bundles will be reinstalled
if (kind == PatchKind.ROLLUP) {
if (!simulate) {
if (patches.size() == 1) {
Patch patch = patches.iterator().next();
PatchResult result = results.get(patch.getPatchData().getId());
// single shot
if (patchManagement.isStandaloneChild()) {
backupService.backupDataFiles(result.getChildPatches().get(System.getProperty("karaf.name")), Pending.ROLLUP_INSTALLATION);
} else {
backupService.backupDataFiles(result, Pending.ROLLUP_INSTALLATION);
}
for (Bundle b : coreBundles.values()) {
if (b.getSymbolicName() != null && Utils.stripSymbolicName(b.getSymbolicName()).equals("org.apache.felix.fileinstall")) {
b.stop(Bundle.STOP_TRANSIENT);
break;
}
}
// update KARAF_HOME
patchManagement.commitInstallation(transaction);
if (patchManagement.isStandaloneChild()) {
result.getChildPatches().get(System.getProperty("karaf.name")).setPending(Pending.ROLLUP_INSTALLATION);
} else {
result.setPending(Pending.ROLLUP_INSTALLATION);
}
result.store();
// Some updates need a full JVM restart.
if (isJvmRestartNeeded(results)) {
boolean handlesFullRestart = Boolean.getBoolean("karaf.restart.jvm.supported");
if (handlesFullRestart) {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " installed. Restarting Karaf..");
// KARAF-5179 - we need both properties set to true
System.setProperty("karaf.restart", "true");
System.setProperty("karaf.restart.jvm", "true");
} else {
System.out.println("Rollup patch " + patch.getPatchData().getId() + " installed. Shutting down Karaf, please restart...");
}
} else {
// We don't need a JVM restart, so lets just do a OSGi framework restart
System.setProperty("karaf.restart", "true");
}
File karafData = new File(bundleContext.getProperty("karaf.data"));
File cleanCache = new File(karafData, "clean_cache");
cleanCache.createNewFile();
Thread.currentThread().setContextClassLoader(bundleContext.getBundle(0L).adapt(BundleWiring.class).getClassLoader());
bundleContext.getBundle(0L).stop();
// stop/shutdown occurs on another thread
}
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
patchManagement.rollbackInstallation(transaction);
}
return results;
}
// update KARAF_HOME
if (!simulate) {
patchManagement.commitInstallation(transaction);
} else {
patchManagement.rollbackInstallation(transaction);
}
if (!simulate) {
Runnable task = () -> {
try {
// update bundles
applyChanges(bundleUpdateLocations);
for (String featureOverride : overridesForFeatureKeys) {
System.out.println("overriding feature: " + featureOverride);
}
if (overridesForFeatureKeys.size() > 0) {
System.out.println("refreshing features");
featuresService.refreshFeatures(EnumSet.noneOf(FeaturesService.Option.class));
}
// persist results of all installed patches
for (Patch patch : patches) {
PatchResult result = results.get(patch.getPatchData().getId());
System.out.printf("Summary of patch %s:%n", patch.getPatchData().getId());
PatchReport report = patch.getResult().getReport();
System.out.printf(" - Bundles updated: %d%n", report.getUpdatedBundles());
System.out.printf(" - Features updated: %d%n", report.getUpdatedFeatures());
System.out.printf(" - Features overriden: %d%n", report.getOverridenFeatures());
System.out.flush();
result.store();
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
}
};
if (synchronous) {
task.run();
} else {
new Thread(task).start();
}
} else {
System.out.println("Simulation only - no files and runtime data will be modified.");
}
return results;
} catch (Exception e) {
e.printStackTrace(System.err);
System.err.flush();
if (transaction != null && patchManagement != null) {
patchManagement.rollbackInstallation(transaction);
}
throw new PatchException(e.getMessage(), e);
} finally {
System.out.flush();
}
}
Aggregations