use of io.fabric8.openshift.api.model.Policy in project fabric8 by jboss-fuse.
the class Agent method provision.
public void provision(Map<String, Feature> allFeatures, Set<String> features, Set<String> bundles, Set<String> reqs, Set<String> overrides, Set<String> optionals, Map<String, Map<VersionRange, Map<String, String>>> metadata) throws Exception {
Callable<Map<String, Resource>> res = loadResources(manager, metadata, optionals);
// TODO: requirements should be able to be assigned to a region
Map<String, Set<String>> requirements = new HashMap<>();
for (String feature : features) {
addToMapSet(requirements, ROOT_REGION, "feature:" + feature);
}
for (String bundle : bundles) {
addToMapSet(requirements, ROOT_REGION, "bundle:" + bundle);
}
for (String req : reqs) {
addToMapSet(requirements, ROOT_REGION, "req:" + req);
}
Deployer.DeploymentRequest request = new Deployer.DeploymentRequest();
request.updateSnaphots = updateSnaphots;
request.bundleUpdateRange = bundleUpdateRange;
request.featureResolutionRange = featureResolutionRange;
request.globalRepository = new StaticRepository(res.call().values());
request.overrides = overrides;
request.requirements = requirements;
request.stateChanges = Collections.emptyMap();
request.options = options;
request.metadata = metadata;
request.bundleStartTimeout = bundleStartTimeout;
Deployer.DeploymentState dstate = new Deployer.DeploymentState();
// Service bundle
dstate.serviceBundle = serviceBundle;
// Start level
FrameworkStartLevel fsl = systemBundleContext.getBundle().adapt(FrameworkStartLevel.class);
dstate.initialBundleStartLevel = fsl.getInitialBundleStartLevel();
dstate.currentStartLevel = fsl.getStartLevel();
// Bundles
dstate.bundles = new HashMap<>();
for (Bundle bundle : systemBundleContext.getBundles()) {
dstate.bundles.put(bundle.getBundleId(), bundle);
}
// Features
dstate.features = allFeatures;
// Region -> bundles mapping
// Region -> policy mapping
dstate.bundlesPerRegion = new HashMap<>();
dstate.filtersPerRegion = new HashMap<>();
if (digraph == null) {
for (Long id : dstate.bundles.keySet()) {
addToMapSet(dstate.bundlesPerRegion, ROOT_REGION, id);
}
} else {
RegionDigraph clone = digraph.copy();
for (Region region : clone.getRegions()) {
// Get bundles
dstate.bundlesPerRegion.put(region.getName(), new HashSet<>(region.getBundleIds()));
// Get policies
Map<String, Map<String, Set<String>>> edges = new HashMap<>();
for (RegionDigraph.FilteredRegion fr : clone.getEdges(region)) {
Map<String, Set<String>> policy = new HashMap<>();
Map<String, Collection<String>> current = fr.getFilter().getSharingPolicy();
for (String ns : current.keySet()) {
for (String f : current.get(ns)) {
addToMapSet(policy, ns, f);
}
}
edges.put(fr.getRegion().getName(), policy);
}
dstate.filtersPerRegion.put(region.getName(), edges);
}
}
final State state = new State();
try {
storage.load(state);
} catch (IOException e) {
LOGGER.warn("Error loading agent state", e);
}
if (state.managedBundles.isEmpty()) {
for (Bundle b : systemBundleContext.getBundles()) {
if (b.getBundleId() != 0) {
addToMapSet(state.managedBundles, ROOT_REGION, b.getBundleId());
}
}
}
// corresponding jar url and use that one to compute the checksum of the bundle.
for (Map.Entry<Long, Bundle> entry : dstate.bundles.entrySet()) {
long id = entry.getKey();
Bundle bundle = entry.getValue();
if (id > 0 && isUpdateable(bundle) && !state.bundleChecksums.containsKey(id)) {
try {
URL url = bundle.getResource("META-INF/MANIFEST.MF");
URLConnection con = url.openConnection();
Method method = con.getClass().getDeclaredMethod("getLocalURL");
method.setAccessible(true);
String jarUrl = ((URL) method.invoke(con)).toExternalForm();
if (jarUrl.startsWith("jar:")) {
String jar = jarUrl.substring("jar:".length(), jarUrl.indexOf("!/"));
jar = new URL(jar).getFile();
long checksum = ChecksumUtils.checksumFile(new File(jar));
state.bundleChecksums.put(id, checksum);
}
} catch (Throwable t) {
LOGGER.debug("Error calculating checksum for bundle: %s", bundle, t);
}
}
}
dstate.state = state;
Set<String> prereqs = new HashSet<>();
while (true) {
try {
Deployer.DeployCallback callback = new BaseDeployCallback() {
@Override
public void phase(String message) {
Agent.this.updateStatus(message);
}
@Override
public void saveState(State newState) {
state.replace(newState);
try {
Agent.this.saveState(newState);
} catch (IOException e) {
LOGGER.warn("Error storing agent state", e);
}
}
@Override
public void provisionList(Set<Resource> resources) {
Agent.this.provisionList(resources);
}
@Override
public void restoreConfigAdminIfNeeded() {
if (configInstaller != null) {
configInstaller.restoreConfigAdminIfNeeded();
}
}
@Override
public boolean done(boolean agentStarted, List<String> urls) {
return Agent.this.done(agentStarted, urls);
}
};
// FABRIC-790, FABRIC-981 - wait for ProfileUrlHandler before attempting to load bundles (in subsystem.resolve())
// (which may be the case with bundle.xxx=blueprint:profile:xxx URLs in io.fabric8.agent PID)
// https://developer.jboss.org/message/920681 - 30 seconds is too low sometimes
// there was "url.handler.timeouts" option for agent, but it was removed during migration to karaf 4.x resolver
// LOGGER.debug("Waiting for ProfileUrlHandler");
// awaitService(URLStreamHandlerService.class, "(url.handler.protocol=profile)", 30, TimeUnit.SECONDS);
// LOGGER.debug("Waiting for ProfileUrlHandler finished");
Deployer deployer = new Deployer(manager, callback);
deployer.setDeploymentAgentId(deploymentAgentId);
deployer.deploy(dstate, request);
break;
} catch (Deployer.PartialDeploymentException e) {
if (!prereqs.containsAll(e.getMissing())) {
prereqs.addAll(e.getMissing());
} else {
throw new Exception("Deployment aborted due to loop in missing prerequisites: " + e.getMissing());
}
}
}
}
use of io.fabric8.openshift.api.model.Policy in project jointware by isdream.
the class KubernetesKeyValueStyleGeneratorTest method testOpenShiftWithAllKind.
protected static void testOpenShiftWithAllKind() throws Exception {
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new Policy());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new Group());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new User());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new OAuthClient());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new ClusterRoleBinding());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new ImageStreamTag());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new ImageStream());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new Build());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new BuildConfig());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new RoleBinding());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new Route());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new PolicyBinding());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new OAuthAuthorizeToken());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new Role());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new Project());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new OAuthAccessToken());
info(OPENSHIFT_KIND, OpenShiftDocumentKeyValueStyleGenerator.class.getName(), new DeploymentConfig());
}
Aggregations