use of io.fabric8.agent.model.Repository in project fabric8 by jboss-fuse.
the class MavenProxySnapshotResolutionTest method snapshotIsAvailableInDefaultRepository.
@Test
public void snapshotIsAvailableInDefaultRepository() throws IOException, InvalidMavenArtifactRequest {
File defaultRepository = initFileRepository("dr");
MavenResolver resolver = new ResolverBuilder().withRemoteRepositories(Collections.<File>emptyList()).withUpdatePolicy(RepositoryPolicy.UPDATE_POLICY_NEVER).withDefaultRepositories(Collections.singletonList(defaultRepository)).build();
MavenDownloadProxyServlet servlet = new MavenDownloadProxyServlet(resolver, runtime, null, 1, 0);
servlet.start();
mvnInstall(defaultRepository, "io.fabric8.test", "universalis-api", "0.1.0-SNAPSHOT", at("10:00"), "a");
// Here's expected state of repository where SNAPSHOT was `mvn install`ed
assertFalse(new File(defaultRepository, "io/fabric8/test/universalis-api/0.1.0-SNAPSHOT/maven-metadata.xml").isFile());
assertTrue(new File(defaultRepository, "io/fabric8/test/universalis-api/0.1.0-SNAPSHOT/maven-metadata-local.xml").isFile());
File file = servlet.download("io/fabric8/test/universalis-api/0.1.0-SNAPSHOT/maven-metadata.xml");
Metadata metadata = readMetadata(file);
boolean checked = false;
assertThat(metadata.getVersioning().getSnapshot().isLocalCopy(), is(true));
for (SnapshotVersion snapshotVersion : metadata.getVersioning().getSnapshotVersions()) {
if ("jar".equals(snapshotVersion.getExtension())) {
assertThat(snapshotVersion.getVersion(), is("0.1.0-SNAPSHOT"));
checked = true;
}
}
assertTrue("We should find snapshot metadata", checked);
// if metadata says it's "0.1.0-SNAPSHOT", we should have no problem downloading this artifact without
// version transformation
file = servlet.download("io/fabric8/test/universalis-api/0.1.0-SNAPSHOT/universalis-api-0.1.0-SNAPSHOT.jar");
assertThat(FileUtils.readFileToString(file), equalTo("a"));
mvnInstall(defaultRepository, "io.fabric8.test", "universalis-api", "0.1.0-SNAPSHOT", at("11:00"), "b");
file = servlet.download("io/fabric8/test/universalis-api/0.1.0-SNAPSHOT/universalis-api-0.1.0-SNAPSHOT.jar");
assertThat("No policy should prevent us from seeing newer snapshot from defaultRepository", FileUtils.readFileToString(file), equalTo("b"));
}
use of io.fabric8.agent.model.Repository in project fabric8 by jboss-fuse.
the class FabricGitServiceImpl method activate.
@Activate
@VisibleForTesting
public void activate() throws IOException {
RuntimeProperties sysprops = runtimeProperties.get();
localRepo = sysprops.getDataPath().resolve(DEFAULT_GIT_PATH).toFile();
if (!localRepo.exists() && !localRepo.mkdirs()) {
throw new IOException("Failed to create local repository at:" + localRepo.getAbsolutePath());
}
git = openOrInit(localRepo);
activateComponent();
}
use of io.fabric8.agent.model.Repository in project fabric8 by jboss-fuse.
the class Deployer method deploy.
/**
* @param dstate deployment state
* @param request deployment request
* @throws Exception
*/
public void deploy(DeploymentState dstate, final DeploymentRequest request) throws Exception {
String threadFactoryName = deploymentAgentId != null ? String.format("%s-deployer", deploymentAgentId) : "deployer";
ExecutorService deploymentsExecutor = Executors.newSingleThreadExecutor(new NamedThreadFactory(threadFactoryName));
boolean noRefreshUnmanaged = request.options.contains(Constants.Option.NoAutoRefreshUnmanagedBundles);
boolean noRefreshManaged = request.options.contains(Constants.Option.NoAutoRefreshManagedBundles);
boolean noRefresh = request.options.contains(Constants.Option.NoAutoRefreshBundles);
boolean noStart = request.options.contains(Constants.Option.NoAutoStartBundles);
boolean verbose = request.options.contains(Constants.Option.Verbose);
boolean silent = request.options.contains(Constants.Option.Silent);
boolean simulate = request.options.contains(Constants.Option.Simulate);
boolean noManageBundles = request.options.contains(Constants.Option.NoAutoManageBundles);
int display = silent ? 0 : verbose ? DISPLAY_LOG | DISPLAY_STDOUT : DISPLAY_LOG;
// TODO: add an option to unmanage bundles instead of uninstalling those
Map<String, Set<Long>> managedBundles = copy(dstate.state.managedBundles);
Map<String, Set<Bundle>> unmanagedBundles = apply(diff(dstate.bundlesPerRegion, dstate.state.managedBundles), map(dstate.bundles));
// Resolve
SubsystemResolver resolver = new SubsystemResolver(manager);
resolver.prepare(dstate.features.values(), request.requirements, apply(unmanagedBundles, adapt(BundleRevision.class)));
Set<String> prereqs = resolver.collectPrerequisites();
if (!prereqs.isEmpty()) {
for (Iterator<String> iterator = prereqs.iterator(); iterator.hasNext(); ) {
String prereq = iterator.next();
String[] parts = prereq.split("/");
VersionRange range;
if (parts[1].equals("0.0.0")) {
range = VersionRange.ANY_VERSION;
} else if (!parts[1].startsWith("[") && !parts[1].startsWith("(")) {
range = new VersionRange(Macro.transform(request.featureResolutionRange, parts[1]));
} else {
range = new VersionRange(parts[1]);
}
boolean found = false;
for (Set<String> featureSet : dstate.state.installedFeatures.values()) {
for (String feature : featureSet) {
String[] p = feature.split("/");
found = parts[0].equals(p[0]) && range.contains(VersionTable.getVersion(p[1]));
if (found)
break;
}
if (found)
break;
}
if (found) {
iterator.remove();
}
}
}
if (!prereqs.isEmpty()) {
DeploymentRequest newRequest = new DeploymentRequest();
newRequest.bundleUpdateRange = request.bundleUpdateRange;
newRequest.featureResolutionRange = request.featureResolutionRange;
newRequest.globalRepository = request.globalRepository;
newRequest.options = request.options;
newRequest.overrides = request.overrides;
newRequest.requirements = copy(dstate.state.requirements);
for (String prereq : prereqs) {
addToMapSet(newRequest.requirements, Constants.ROOT_REGION, prereq);
}
newRequest.stateChanges = Collections.emptyMap();
newRequest.updateSnaphots = request.updateSnaphots;
deploy(dstate, newRequest);
throw new PartialDeploymentException(prereqs);
}
callback.phase("resolving");
resolver.resolve(new MetadataBuilder(request.metadata), request.overrides, request.featureResolutionRange, request.globalRepository);
Map<String, StreamProvider> providers = resolver.getProviders();
Map<String, Set<Resource>> featuresPerRegion = resolver.getFeaturesPerRegions();
Map<String, Set<String>> installedFeatures = apply(featuresPerRegion, featureId());
Map<String, Set<String>> newFeatures = diff(installedFeatures, dstate.state.installedFeatures);
Map<String, Set<String>> delFeatures = diff(dstate.state.installedFeatures, installedFeatures);
//
// Compute requested features state
//
Map<String, Map<String, String>> stateFeatures = copy(dstate.state.stateFeatures);
for (Map.Entry<String, Set<String>> entry : delFeatures.entrySet()) {
Map<String, String> map = stateFeatures.get(entry.getKey());
if (map != null) {
map.keySet().removeAll(entry.getValue());
if (map.isEmpty()) {
stateFeatures.remove(entry.getKey());
}
}
}
for (Map.Entry<String, Map<String, Constants.RequestedState>> entry1 : request.stateChanges.entrySet()) {
String region = entry1.getKey();
Map<String, String> regionStates = stateFeatures.get(region);
if (regionStates != null) {
for (Map.Entry<String, Constants.RequestedState> entry2 : entry1.getValue().entrySet()) {
String feature = entry2.getKey();
if (regionStates.containsKey(feature)) {
regionStates.put(feature, entry2.getValue().name());
}
}
}
}
for (Map.Entry<String, Set<String>> entry : newFeatures.entrySet()) {
for (String feature : entry.getValue()) {
Map<String, String> map = stateFeatures.get(entry.getKey());
if (map == null) {
map = new HashMap<>();
stateFeatures.put(entry.getKey(), map);
}
map.put(feature, noStart ? Constants.RequestedState.Installed.name() : Constants.RequestedState.Started.name());
}
}
// Compute information for each bundle
Map<String, Map<String, BundleInfo>> bundleInfos = resolver.getBundleInfos();
//
// Compute deployment
//
Deployer.Deployment deployment = computeDeployment(dstate, request, resolver);
//
// Compute the set of bundles to refresh
//
// sort is only used for display
Map<Bundle, String> toRefresh = new TreeMap<>(new BundleComparator());
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
for (Bundle b : regionDeployment.toDelete) {
toRefresh.put(b, "Bundle will be uninstalled");
}
for (Bundle b : regionDeployment.toUpdate.keySet()) {
toRefresh.put(b, "Bundle will be updated");
}
}
if (!noRefreshManaged) {
computeBundlesToRefresh(toRefresh, dstate.bundles.values(), deployment.resToBnd, resolver.getWiring());
}
if (noRefreshUnmanaged) {
toRefresh.keySet().removeAll(flatten(unmanagedBundles));
}
// Automatically turn unmanaged bundles into managed bundles
// if they are required by a feature and no other unmanaged
// bundles have a requirement on it
// sort is only used for display
Set<Bundle> toManage = new TreeSet<>(new BundleComparator());
if (!noManageBundles) {
Set<Resource> features = resolver.getFeatures().keySet();
Set<? extends Resource> unmanaged = apply(flatten(unmanagedBundles), adapt(BundleRevision.class));
Set<Resource> requested = new HashSet<>();
// Gather bundles required by a feature
if (resolver.getWiring() != null) {
for (List<Wire> wires : resolver.getWiring().values()) {
for (Wire wire : wires) {
if (features.contains(wire.getRequirer()) && unmanaged.contains(wire.getProvider())) {
requested.add(wire.getProvider());
}
}
}
}
// Now, we know which bundles are completely unmanaged
unmanaged.removeAll(requested);
// Check if bundles have wires from really unmanaged bundles
if (resolver.getWiring() != null) {
for (List<Wire> wires : resolver.getWiring().values()) {
for (Wire wire : wires) {
if (requested.contains(wire.getProvider()) && unmanaged.contains(wire.getRequirer())) {
requested.remove(wire.getProvider());
}
}
}
}
if (!requested.isEmpty()) {
Map<Long, String> bundleToRegion = new HashMap<>();
for (Map.Entry<String, Set<Long>> entry : dstate.bundlesPerRegion.entrySet()) {
for (long id : entry.getValue()) {
bundleToRegion.put(id, entry.getKey());
}
}
for (Resource rev : requested) {
Bundle bundle = ((BundleRevision) rev).getBundle();
long id = bundle.getBundleId();
addToMapSet(managedBundles, bundleToRegion.get(id), id);
toManage.add(bundle);
}
}
}
Set<Bundle> toStart = new HashSet<>();
Set<Bundle> toResolve = new HashSet<>();
Set<Bundle> toStop = new HashSet<>();
//
// Compute bundle states
//
Map<Resource, Constants.RequestedState> states = new HashMap<>();
// Find all features state
Map<Resource, Constants.RequestedState> featuresState = new HashMap<>();
Map<Resource, Set<Resource>> conditionals = new HashMap<>();
if (resolver.getFeaturesPerRegions() != null) {
for (Map.Entry<String, Set<Resource>> entry : resolver.getFeaturesPerRegions().entrySet()) {
String region = entry.getKey();
Map<String, String> fss = stateFeatures.get(region);
for (Resource feature : entry.getValue()) {
Set<Resource> conditions = new HashSet<>();
for (Wire wire : resolver.getWiring().get(feature)) {
if (IDENTITY_NAMESPACE.equals(wire.getRequirement().getNamespace()) && FeatureResource.CONDITIONAL_TRUE.equals(wire.getRequirement().getDirectives().get(FeatureResource.REQUIREMENT_CONDITIONAL_DIRECTIVE))) {
conditions.add(wire.getProvider());
}
}
if (conditions.isEmpty()) {
String fs = fss.get(getFeatureId(feature));
featuresState.put(feature, Constants.RequestedState.valueOf(fs));
} else {
conditionals.put(feature, conditions);
}
}
}
}
// Compute conditional features state
for (Resource feature : conditionals.keySet()) {
Constants.RequestedState state = null;
for (Resource cond : conditionals.get(feature)) {
Constants.RequestedState s = featuresState.get(cond);
if (state == null) {
state = s;
} else if (state == Constants.RequestedState.Started && s == Constants.RequestedState.Resolved) {
state = Constants.RequestedState.Resolved;
}
}
featuresState.put(feature, state);
}
// Propagate Resolved state
for (Resource feature : featuresState.keySet()) {
if (featuresState.get(feature) == Constants.RequestedState.Resolved) {
propagateState(states, feature, Constants.RequestedState.Resolved, resolver);
}
}
// Propagate Started state
for (Resource feature : featuresState.keySet()) {
if (featuresState.get(feature) == Constants.RequestedState.Started) {
propagateState(states, feature, Constants.RequestedState.Started, resolver);
}
}
// Put default Started state for other bundles
for (Resource resource : resolver.getBundles().keySet()) {
if (!states.containsKey(resource)) {
states.put(resource, Constants.RequestedState.Started);
}
}
// Override explicit disabled state
for (Resource resource : featuresState.keySet()) {
if (resource instanceof FeatureResource) {
FeatureResource featureResource = (FeatureResource) resource;
List<BundleInfo> bundles = featureResource.getFeature().getBundles();
for (BundleInfo bundleInfo : bundles) {
if (bundleInfo.isStart() == false) {
Set<String> candidates = new HashSet<>();
String fullLocation = bundleInfo.getLocation();
int protocolMarker = fullLocation.lastIndexOf(":");
fullLocation = fullLocation.substring(protocolMarker + 1);
String[] split = fullLocation.split("/");
if (split.length >= 3) {
String fullLocationKey = split[0] + "." + split[1] + "/" + split[2];
candidates.add(fullLocationKey);
}
String location = bundleInfo.getLocation();
Parser parser = new Parser(location);
String id = parser.getArtifact();
String version = parser.getVersion();
String key = id + "/" + version;
String keyDotted = id + "/" + version.replaceAll("_", ".");
String prefix = parser.getGroup();
if (parser.getGroup().contains(":")) {
prefix = parser.getGroup().split(":")[1];
}
String fullKey = prefix + "." + key;
String fullKeyDotted = prefix + "." + keyDotted;
candidates.add(key);
candidates.add(keyDotted);
candidates.add(fullKey);
candidates.add(fullKeyDotted);
for (Iterator<Resource> iter = states.keySet().iterator(); iter.hasNext(); ) {
Resource res = iter.next();
String resourceStringRepresentation = res.toString();
if (candidates.contains(resourceStringRepresentation)) {
states.put(res, Constants.RequestedState.Installed);
break;
}
}
}
}
}
}
// Only keep bundles resources
states.keySet().retainAll(resolver.getBundles().keySet());
//
for (Map.Entry<Resource, Constants.RequestedState> entry : states.entrySet()) {
Bundle bundle = deployment.resToBnd.get(entry.getKey());
if (bundle != null) {
switch(entry.getValue()) {
case Started:
toResolve.add(bundle);
toStart.add(bundle);
break;
case Resolved:
toResolve.add(bundle);
toStop.add(bundle);
break;
}
}
}
//
// Compute bundle all start levels and start levels to update
//
Map<Resource, Integer> startLevels = new HashMap<>();
final Map<Bundle, Integer> toUpdateStartLevel = new HashMap<>();
for (Map.Entry<String, Set<Resource>> entry : resolver.getBundlesPerRegions().entrySet()) {
String region = entry.getKey();
for (Resource resource : entry.getValue()) {
BundleInfo bi = bundleInfos.get(region).get(getUri(resource));
if (bi != null) {
int sl = bi.getStartLevel() > 0 ? bi.getStartLevel() : dstate.initialBundleStartLevel;
startLevels.put(resource, sl);
Bundle bundle = deployment.resToBnd.get(resource);
if (bundle != null) {
int curSl = bundle.adapt(BundleStartLevel.class).getStartLevel();
if (sl != curSl) {
toUpdateStartLevel.put(bundle, sl);
if (sl > dstate.currentStartLevel) {
toStop.add(bundle);
}
}
}
}
}
}
//
// Log deployment
//
logDeployment(deployment, display);
if (!noRefresh && !toRefresh.isEmpty()) {
print(" Bundles to refresh:", display);
for (Map.Entry<Bundle, String> entry : toRefresh.entrySet()) {
Bundle bundle = entry.getKey();
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion() + " (" + entry.getValue() + ")", display);
}
}
if (!toManage.isEmpty()) {
print(" Managing bundle:", display);
for (Bundle bundle : toManage) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
}
}
if (simulate) {
return;
}
callback.phase("installing");
//
// Execute deployment
//
// #1: stop bundles that needs to be updated or uninstalled or refreshed in order
// #2: uninstall needed bundles
// #3: update regions
// #4: update bundles
// #5: install bundles
// #6: save state
// #7: install configuration
// #8: refresh bundles
// #9: start bundles in order
// #10: send events
//
//
// Handle updates on the agent bundle
//
Deployer.RegionDeployment rootRegionDeployment = deployment.regions.get(Constants.ROOT_REGION);
// We don't support uninstalling the bundle
if (rootRegionDeployment != null && rootRegionDeployment.toDelete.contains(dstate.serviceBundle)) {
throw new UnsupportedOperationException("Uninstalling the agent bundle is not supported");
}
// Ensure all classes are loaded if the agent will be refreshed
if (toRefresh.containsKey(dstate.serviceBundle)) {
OsgiUtils.ensureAllClassesLoaded(dstate.serviceBundle);
}
// When restarting, the resolution will be attempted again
if (rootRegionDeployment != null && rootRegionDeployment.toUpdate.containsKey(dstate.serviceBundle)) {
callback.phase("updating agent");
callback.persistResolveRequest(request);
// save the new checksum persistently
if (deployment.bundleChecksums.containsKey(dstate.serviceBundle.getBundleId())) {
State state = dstate.state.copy();
state.bundleChecksums.put(dstate.serviceBundle.getBundleId(), deployment.bundleChecksums.get(dstate.serviceBundle.getBundleId()));
callback.saveState(state);
}
Resource resource = rootRegionDeployment.toUpdate.get(dstate.serviceBundle);
String uri = getUri(resource);
print("The agent bundle needs is being updated with " + uri, display);
toRefresh.clear();
toRefresh.put(dstate.serviceBundle, "DeploymentAgent bundle is being updated");
computeBundlesToRefresh(toRefresh, dstate.bundles.values(), Collections.<Resource, Bundle>emptyMap(), Collections.<Resource, List<Wire>>emptyMap());
callback.stopBundle(dstate.serviceBundle, STOP_TRANSIENT);
try (InputStream is = getBundleInputStream(resource, providers)) {
callback.updateBundle(dstate.serviceBundle, uri, is);
}
callback.refreshPackages(toRefresh.keySet());
callback.startBundle(dstate.serviceBundle);
return;
}
//
if (!newFeatures.isEmpty()) {
Set<Feature> set = apply(flatten(newFeatures), map(dstate.features));
for (Feature feature : set) {
Downloader downloader = manager.createDownloader();
for (ConfigFile configFile : feature.getConfigurationFiles()) {
downloader.download(configFile.getLocation(), null);
}
downloader.await();
}
}
//
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
toStop.addAll(regionDeployment.toUpdate.keySet());
toStop.addAll(regionDeployment.toDelete);
}
if (!noRefresh) {
Set<Bundle> toRefreshToStopEarly = new HashSet<>(toRefresh.keySet());
toRefreshToStopEarly.remove(dstate.serviceBundle);
toStop.addAll(toRefreshToStopEarly);
toStart.addAll(toRefreshToStopEarly);
}
removeFragmentsAndBundlesInState(toStop, UNINSTALLED | RESOLVED | STOPPING);
if (!toStop.isEmpty()) {
callback.phase("updating (stopping bundles)");
print("Stopping bundles:", display);
while (!toStop.isEmpty()) {
List<Bundle> bs = getBundlesToStop(toStop);
for (final Bundle bundle : bs) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
LOGGER.info("Scheduled stop for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
// If the bundle start level will be changed, stop it persistently to
// avoid a restart when the start level is actually changed
callback.stopBundle(bundle, toUpdateStartLevel.containsKey(bundle) ? 0 : STOP_TRANSIENT);
} catch (BundleException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
return null;
}
}), request.bundleStartTimeout, TimeUnit.SECONDS);
// synch on Future's output, limited by the TimeUnit above
for (Future<Void> f : futures) {
try {
f.get();
} catch (CancellationException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
}
toStop.remove(bundle);
}
}
}
//
// Delete bundles
//
boolean hasToDelete = false;
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
if (hasToDelete = !regionDeployment.toDelete.isEmpty()) {
break;
}
}
if (hasToDelete) {
callback.phase("updating (uninstalling bundles)");
print("Uninstalling bundles:", display);
for (Map.Entry<String, RegionDeployment> entry : deployment.regions.entrySet()) {
String name = entry.getKey();
Deployer.RegionDeployment regionDeployment = entry.getValue();
for (Bundle bundle : regionDeployment.toDelete) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
callback.uninstall(bundle);
removeFromMapSet(managedBundles, name, bundle.getBundleId());
}
}
}
//
// Update regions
//
{
// Add bundles
Map<String, Set<Long>> bundles = new HashMap<>();
add(bundles, apply(unmanagedBundles, bundleId()));
add(bundles, managedBundles);
// Compute policies
RegionDigraph computedDigraph = resolver.getFlatDigraph();
Map<String, Map<String, Map<String, Set<String>>>> policies = copy(dstate.filtersPerRegion);
// Only keep regions which still have bundles
policies.keySet().retainAll(bundles.keySet());
// Fix broken filters
for (String name : policies.keySet()) {
policies.get(name).keySet().retainAll(policies.keySet());
}
// Update managed regions
for (Region computedRegion : computedDigraph.getRegions()) {
String name = computedRegion.getName();
Map<String, Map<String, Set<String>>> policy = policies.get(name);
if (policy == null) {
policy = new HashMap<>();
policies.put(name, policy);
}
for (RegionDigraph.FilteredRegion fr : computedRegion.getEdges()) {
String r2 = fr.getRegion().getName();
Map<String, Set<String>> filters = new HashMap<>();
Map<String, Collection<String>> current = fr.getFilter().getSharingPolicy();
for (String ns : current.keySet()) {
for (String f : current.get(ns)) {
addToMapSet(filters, ns, f);
}
}
policy.put(r2, filters);
}
}
// Apply all changes
callback.replaceDigraph(policies, bundles);
}
//
// Update bundles
//
boolean hasToUpdate = false;
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
if (hasToUpdate = !regionDeployment.toUpdate.isEmpty()) {
break;
}
}
if (hasToUpdate) {
callback.phase("updating (updating bundles)");
print("Updating bundles:", display);
for (Map.Entry<String, RegionDeployment> rde : deployment.regions.entrySet()) {
for (Map.Entry<Bundle, Resource> entry : rde.getValue().toUpdate.entrySet()) {
Bundle bundle = entry.getKey();
Resource resource = entry.getValue();
String uri = getUri(resource);
print(" " + uri, display);
try (InputStream is = getBundleInputStream(resource, providers)) {
callback.updateBundle(bundle, uri, is);
}
toStart.add(bundle);
}
}
}
//
for (Map.Entry<Bundle, Integer> entry : toUpdateStartLevel.entrySet()) {
Bundle bundle = entry.getKey();
int sl = entry.getValue();
callback.setBundleStartLevel(bundle, sl);
}
//
// Install bundles
//
boolean hasToInstall = false;
for (Deployer.RegionDeployment regionDeployment : deployment.regions.values()) {
if (hasToInstall = !regionDeployment.toInstall.isEmpty()) {
break;
}
}
if (hasToInstall) {
callback.phase("updating (installing bundles)");
print("Installing bundles:", display);
for (Map.Entry<String, RegionDeployment> entry : deployment.regions.entrySet()) {
String name = entry.getKey();
Deployer.RegionDeployment regionDeployment = entry.getValue();
for (Resource resource : regionDeployment.toInstall) {
String uri = getUri(resource);
print(" " + uri, display);
Bundle bundle;
long crc;
try (ChecksumUtils.CRCInputStream is = new ChecksumUtils.CRCInputStream(getBundleInputStream(resource, providers))) {
bundle = callback.installBundle(name, uri, is);
// calculate CRC normally
crc = is.getCRC();
try {
URI resourceURI = new URI(uri);
if ("blueprint".equals(resourceURI.getScheme())) {
// ENTESB-6957 calculate proper blueprint file CRC during installation
InputStream bis = getBlueprintInputStream(getBundleInputStream(resource, providers));
// original stream is closed in either case
if (bis != null) {
crc = ChecksumUtils.checksum(bis);
}
}
} catch (URISyntaxException ignored) {
}
}
addToMapSet(managedBundles, name, bundle.getBundleId());
deployment.resToBnd.put(resource, bundle);
// save a checksum of installed snapshot bundle
if (Constants.UPDATE_SNAPSHOTS_CRC.equals(request.updateSnaphots) && isUpdateable(resource) && !deployment.bundleChecksums.containsKey(bundle.getBundleId())) {
deployment.bundleChecksums.put(bundle.getBundleId(), crc);
}
if (startLevels.containsKey(resource)) {
int startLevel = startLevels.get(resource);
if (startLevel != dstate.initialBundleStartLevel) {
callback.setBundleStartLevel(bundle, startLevel);
}
}
Constants.RequestedState reqState = states.get(resource);
if (reqState == null) {
reqState = Constants.RequestedState.Started;
}
switch(reqState) {
case Started:
toResolve.add(bundle);
toStart.add(bundle);
break;
case Resolved:
toResolve.add(bundle);
break;
}
}
}
}
//
// Update and save state
//
State newState = new State();
newState.bundleChecksums.putAll(deployment.bundleChecksums);
newState.requirements.putAll(request.requirements);
newState.installedFeatures.putAll(installedFeatures);
newState.stateFeatures.putAll(stateFeatures);
newState.managedBundles.putAll(managedBundles);
callback.saveState(newState);
//
if (!newFeatures.isEmpty()) {
// check if configadmin is started
callback.phase("updating (installing configurations)");
Set<Feature> set = apply(flatten(newFeatures), map(dstate.features));
callback.restoreConfigAdminIfNeeded();
for (Feature feature : set) {
callback.installFeatureConfigs(feature);
}
}
callback.phase("finalizing");
if (!noRefresh) {
toStop = new HashSet<>();
toStop.addAll(toRefresh.keySet());
removeFragmentsAndBundlesInState(toStop, UNINSTALLED | RESOLVED | STOPPING);
if (!toStop.isEmpty()) {
callback.phase("finalizing (stopping bundles)");
print("Stopping bundles:", display);
while (!toStop.isEmpty()) {
List<Bundle> bs = getBundlesToStop(toStop);
for (final Bundle bundle : bs) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
LOGGER.info("Scheduled stop for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
callback.stopBundle(bundle, STOP_TRANSIENT);
} catch (BundleException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
return null;
}
}), request.bundleStartTimeout, TimeUnit.SECONDS);
// synch on Future's output, limited by the TimeUnit above
for (Future<Void> f : futures) {
try {
f.get();
} catch (CancellationException e) {
LOGGER.warn("Error while trying to stop bundle {}", bundle.getSymbolicName(), e);
}
}
toStop.remove(bundle);
toStart.add(bundle);
}
}
}
if (!toRefresh.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(" Bundle refresh explanation:");
for (Map.Entry entry : toRefresh.entrySet()) {
LOGGER.debug("{} is refreshed because of [{}]", entry.getKey(), entry.getValue());
}
}
callback.phase("finalizing (refreshing bundles)");
print("Refreshing bundles:", display);
for (Map.Entry<Bundle, String> entry : toRefresh.entrySet()) {
Bundle bundle = entry.getKey();
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion() + " (" + entry.getValue() + ")", display);
}
if (!toRefresh.isEmpty()) {
callback.refreshPackages(toRefresh.keySet());
}
}
}
// Resolve bundles
callback.phase("finalizing (resolving bundles)");
toResolve.addAll(toStart);
toResolve.addAll(toRefresh.keySet());
removeBundlesInState(toResolve, UNINSTALLED);
callback.resolveBundles(toResolve, resolver.getWiring(), deployment.resToBnd);
final boolean[] agentStarted = new boolean[] { false };
// Compute bundles to start
removeFragmentsAndBundlesInState(toStart, UNINSTALLED | ACTIVE | STARTING);
if (!toStart.isEmpty()) {
// Compute correct start order
final List<Throwable> exceptions = new ArrayList<>();
callback.phase("finalizing (starting bundles)");
print("Starting bundles:", display);
while (!toStart.isEmpty()) {
List<Bundle> bs = getBundlesToStart(toStart, dstate.serviceBundle);
for (final Bundle bundle : bs) {
print(" " + bundle.getSymbolicName() + " / " + bundle.getVersion(), display);
if ("io.fabric8.fabric-agent".equals(bundle.getSymbolicName())) {
agentStarted[0] = true;
}
List<Future<Void>> futures = deploymentsExecutor.invokeAll(Arrays.asList(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
LOGGER.info("Scheduled start for bundle:" + bundle.getSymbolicName() + " with a timeout limit of " + request.bundleStartTimeout + " seconds");
callback.startBundle(bundle);
} catch (BundleException e) {
exceptions.add(e);
}
return null;
}
}), request.bundleStartTimeout, TimeUnit.SECONDS);
// synch on Future's output, limited by the TimeUnit above
for (Future<Void> f : futures) {
try {
f.get();
} catch (CancellationException e) {
exceptions.add(new BundleException("Unable to start bundle [" + bundle.getSymbolicName() + "] within " + request.bundleStartTimeout + " seconds"));
}
}
toStart.remove(bundle);
}
}
deploymentsExecutor.shutdown();
if (!exceptions.isEmpty()) {
throw new MultiException("Error restarting bundles", exceptions);
}
}
// Info about final list of deployed bundles
callback.provisionList(deployment.resToBnd.keySet());
// list of bundles in special "fabric-startup" feature
List<String> urls = new LinkedList<>();
for (Feature ft : dstate.features.values()) {
if (ft.getName().equals("fabric-startup") && ft.getBundles() != null) {
for (BundleInfo bi : ft.getBundles()) {
urls.add(bi.getLocation());
}
}
// special case for Fuse/AMQ...
if (ft.getName().equals("esb-commands-startup") && ft.getBundles() != null) {
for (BundleInfo bi : ft.getBundles()) {
urls.add(bi.getLocation());
}
}
}
// let's resolve these URIs and make them available from ${karaf.default.repository}
try {
LOGGER.info("Storing startup artifacts in default repository: {}", urls);
AgentUtils.downloadLocations(manager, urls, true);
} catch (Exception e) {
LOGGER.warn(e.getMessage(), e);
}
if (!noRefresh) {
// (similar to KARAF-4686/ENTESB-6045 - Cleaning sun.rmi.transport.tcp.TCPEndpointlocalEndpoints cache)
try {
Class<?> cls = getClass().getClassLoader().loadClass("org.ops4j.pax.logging.slf4j.Slf4jMDCAdapter");
Field m_contextField = cls.getDeclaredField("m_context");
m_contextField.setAccessible(true);
// nullify org.ops4j.pax.logging.slf4j.Slf4jMDCAdapter.m_context, so it'll be reinitialized from
// fresh pax-logging-service wiring
m_contextField.set(null, null);
} catch (Exception ignored) {
}
}
if (callback.done(agentStarted[0], urls)) {
print("Done.", display);
}
}
use of io.fabric8.agent.model.Repository in project fabric8 by jboss-fuse.
the class AgentUtils method storeInDefaultKarafRepository.
/**
* Tries to store resource resolved by some {@link StreamProvider} into karaf default repository
* @param finalTargetLocation
* @param file
* @param uri
*/
private static void storeInDefaultKarafRepository(File finalTargetLocation, File file, String uri) {
if (finalTargetLocation != null && file != null && file.isFile()) {
try {
String path = Utils.mvnurlToPath(uri);
if (path != null) {
File target = new File(finalTargetLocation, path);
if (!target.isFile()) {
LOGGER.info("Copying resolved {} to {}", file, finalTargetLocation);
target.getParentFile().mkdirs();
Files.copy(file, target);
}
} else {
LOGGER.warn("Can't resolve Maven URI {} to path", uri);
}
} catch (Exception e) {
LOGGER.warn(e.getMessage(), e);
}
}
}
use of io.fabric8.agent.model.Repository in project fabric8 by jboss-fuse.
the class DeployToProfileMojo method execute.
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (isIgnoreProject())
return;
try {
ProjectRequirements requirements = new ProjectRequirements();
if (isIncludeArtifact()) {
DependencyDTO rootDependency = loadRootDependency();
requirements.setRootDependency(rootDependency);
}
configureRequirements(requirements);
// validate requirements
if (requirements.getProfileId() != null) {
// make sure the profile id is a valid name
FabricValidations.validateProfileName(requirements.getProfileId());
}
boolean newUserAdded = false;
fabricServer = mavenSettings.getServer(serverId);
// we may have username and password from jolokiaUrl
String jolokiaUsername = null;
String jolokiaPassword = null;
try {
URL url = new URL(jolokiaUrl);
String s = url.getUserInfo();
if (Strings.isNotBlank(s) && s.indexOf(':') > 0) {
int idx = s.indexOf(':');
jolokiaUsername = s.substring(0, idx);
jolokiaPassword = s.substring(idx + 1);
customUsernameAndPassword = true;
}
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Option jolokiaUrl is invalid due " + e.getMessage());
}
// jolokia url overrides username/password configured in maven settings
if (jolokiaUsername != null) {
if (fabricServer == null) {
fabricServer = new Server();
}
getLog().info("Using username: " + jolokiaUsername + " and password from provided jolokiaUrl option");
fabricServer.setId(serverId);
fabricServer.setUsername(jolokiaUsername);
fabricServer.setPassword(jolokiaPassword);
}
if (fabricServer == null) {
boolean create = false;
if (mavenSettings.isInteractiveMode() && mavenSettingsWriter != null) {
System.out.println("Maven settings file: " + mavenSettingsFile.getAbsolutePath());
System.out.println();
System.out.println();
System.out.println("There is no <server> section in your ~/.m2/settings.xml file for the server id: " + serverId);
System.out.println();
System.out.println("You can enter the username/password now and have the settings.xml updated or you can do this by hand if you prefer.");
System.out.println();
while (true) {
String value = readInput("Would you like to update the settings.xml file now? (y/n): ").toLowerCase();
if (value.startsWith("n")) {
System.out.println();
System.out.println();
break;
} else if (value.startsWith("y")) {
create = true;
break;
}
}
if (create) {
System.out.println("Please let us know the login details for this server: " + serverId);
System.out.println();
String userName = readInput("Username: ");
String password = readPassword("Password: ");
String password2 = readPassword("Repeat Password: ");
while (!password.equals(password2)) {
System.out.println("Passwords do not match, please try again.");
password = readPassword("Password: ");
password2 = readPassword("Repeat Password: ");
}
System.out.println();
fabricServer = new Server();
fabricServer.setId(serverId);
fabricServer.setUsername(userName);
fabricServer.setPassword(password);
mavenSettings.addServer(fabricServer);
if (mavenSettingsFile.exists()) {
int counter = 1;
while (true) {
File backupFile = new File(mavenSettingsFile.getAbsolutePath() + ".backup-" + counter++ + ".xml");
if (!backupFile.exists()) {
System.out.println("Copied original: " + mavenSettingsFile.getAbsolutePath() + " to: " + backupFile.getAbsolutePath());
Files.copy(mavenSettingsFile, backupFile);
break;
}
}
}
Map<String, Object> config = new HashMap<String, Object>();
mavenSettingsWriter.write(mavenSettingsFile, config, mavenSettings);
System.out.println("Updated settings file: " + mavenSettingsFile.getAbsolutePath());
System.out.println();
newUserAdded = true;
}
}
}
if (fabricServer == null) {
String message = "No <server> element can be found in ~/.m2/settings.xml for the server <id>" + serverId + "</id> so we cannot connect to fabric8!\n\n" + "Please add the following to your ~/.m2/settings.xml file (using the correct user/password values):\n\n" + "<servers>\n" + " <server>\n" + " <id>" + serverId + "</id>\n" + " <username>admin</username>\n" + " <password>admin</password>\n" + " </server>\n" + "</servers>\n";
getLog().error(message);
throw new MojoExecutionException(message);
}
// now lets invoke the mbean
J4pClient client = createJolokiaClient();
if (upload) {
uploadDeploymentUnit(client, newUserAdded || customUsernameAndPassword);
} else {
getLog().info("Uploading to the fabric8 maven repository is disabled");
}
DeployResults results = uploadRequirements(client, requirements);
if (results != null) {
uploadReadMeFile(client, results);
uploadProfileConfigurations(client, results);
refreshProfile(client, results);
}
} catch (MojoExecutionException e) {
throw e;
} catch (Exception e) {
throw new MojoExecutionException("Error executing", e);
}
}
Aggregations