Search in sources :

Example 11 with Dummy

use of io.fabric8.kubernetes.examples.crds.Dummy in project jkube by eclipse.

the class TriggersAnnotationEnricherTest method testNoEnrichment.

@Test
public void testNoEnrichment() {
    KubernetesListBuilder builder = new KubernetesListBuilder().addToItems(new JobBuilder().withNewMetadata().addToAnnotations("dummy", "annotation").endMetadata().withNewSpec().withNewTemplate().withNewSpec().withContainers(createContainers("c1", "is1:latest", "c2", "is2:latest")).endSpec().endTemplate().endSpec().build());
    TriggersAnnotationEnricher enricher = new TriggersAnnotationEnricher(context);
    enricher.enrich(PlatformMode.kubernetes, builder);
    Job res = (Job) builder.build().getItems().get(0);
    String triggers = res.getMetadata().getAnnotations().get("image.openshift.io/triggers");
    assertNull(triggers);
}
Also used : KubernetesListBuilder(io.fabric8.kubernetes.api.model.KubernetesListBuilder) JobBuilder(io.fabric8.kubernetes.api.model.batch.v1.JobBuilder) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) Test(org.junit.Test)

Example 12 with Dummy

use of io.fabric8.kubernetes.examples.crds.Dummy in project kas-fleetshard by bf2fc6cc711aee1a0c2a.

the class IngressControllerManagerTest method testGetManagedKafkaRoutesFor.

@Test
public void testGetManagedKafkaRoutesFor() {
    final String mkName = "my-managedkafka";
    ManagedKafka mk = new ManagedKafkaBuilder().withNewMetadata().withName(mkName).withNamespace(mkName).endMetadata().withSpec(new ManagedKafkaSpecBuilder().withNewEndpoint().withBootstrapServerHost("bs.bf2.example.tld").endEndpoint().build()).build();
    final Function<? super String, ? extends Route> makeRoute = broker -> new RouteBuilder().editOrNewMetadata().withName(mkName + "-" + broker.replace("broker", "kafka")).withNamespace(mkName).addNewOwnerReference().withApiVersion(Kafka.V1BETA2).withKind(Kafka.RESOURCE_KIND).withName(AbstractKafkaCluster.kafkaClusterName(mk)).endOwnerReference().endMetadata().editOrNewSpec().withHost(broker + "-bs.bf2.example.tld").withTo(new RouteTargetReferenceBuilder().withKind("Service").withName(mkName + "-" + broker).withWeight(100).build()).endSpec().build();
    final Function<? super String, ? extends Service> suffixToService = suffix -> new ServiceBuilder().editOrNewMetadata().withName(mkName + "-" + suffix).withNamespace(mkName).endMetadata().editOrNewSpec().withSelector(Map.of("dummy-label", mkName + "-" + suffix)).endSpec().build();
    final Function<? super String, ? extends Pod> suffixToPod = suffix -> new PodBuilder().editOrNewMetadata().withName(mkName + "-" + suffix).withNamespace(mkName).addToLabels(Map.of("dummy-label", mkName + "-" + suffix, "app.kubernetes.io/name", "kafka", OperandUtils.MANAGED_BY_LABEL, OperandUtils.STRIMZI_OPERATOR_NAME)).endMetadata().editOrNewSpec().withNodeName("zone" + "-" + suffix).endSpec().build();
    final Function<? super String, ? extends Node> suffixToNode = suffix -> new NodeBuilder().editOrNewMetadata().withName("zone" + "-" + suffix).withLabels(Map.of(IngressControllerManager.TOPOLOGY_KEY, "zone" + "-" + suffix, IngressControllerManager.WORKER_NODE_LABEL, "true")).endMetadata().build();
    List<String> suffixes = List.of("broker-0", "broker-1", "broker-2");
    suffixes.stream().map(makeRoute).forEach(route -> openShiftClient.routes().inNamespace(mkName).createOrReplace(route));
    suffixes.stream().map(suffixToService).forEach(svc -> openShiftClient.services().inNamespace(mkName).createOrReplace(svc));
    suffixes.stream().map(suffixToPod).forEach(pod -> openShiftClient.pods().inNamespace(mkName).createOrReplace(pod));
    suffixes.stream().map(suffixToNode).forEach(node -> openShiftClient.nodes().createOrReplace(node));
    ingressControllerManager.reconcileIngressControllers();
    List<ManagedKafkaRoute> managedKafkaRoutes = ingressControllerManager.getManagedKafkaRoutesFor(mk);
    assertEquals(5, managedKafkaRoutes.size());
    assertEquals(managedKafkaRoutes.stream().sorted(Comparator.comparing(ManagedKafkaRoute::getName)).collect(Collectors.toList()), managedKafkaRoutes, "Expected list of ManagedKafkaRoutes to be sorted by name");
    assertEquals("admin-server", managedKafkaRoutes.get(0).getName());
    assertEquals("admin-server", managedKafkaRoutes.get(0).getPrefix());
    assertEquals("ingresscontroller.kas.testing.domain.tld", managedKafkaRoutes.get(0).getRouter());
    assertEquals("bootstrap", managedKafkaRoutes.get(1).getName());
    assertEquals("", managedKafkaRoutes.get(1).getPrefix());
    assertEquals("ingresscontroller.kas.testing.domain.tld", managedKafkaRoutes.get(1).getRouter());
    assertEquals("broker-0", managedKafkaRoutes.get(2).getName());
    assertEquals("broker-0", managedKafkaRoutes.get(2).getPrefix());
    assertEquals("ingresscontroller.kas-zone-broker-0.testing.domain.tld", managedKafkaRoutes.get(2).getRouter());
    assertEquals("broker-1", managedKafkaRoutes.get(3).getName());
    assertEquals("broker-1", managedKafkaRoutes.get(3).getPrefix());
    assertEquals("ingresscontroller.kas-zone-broker-1.testing.domain.tld", managedKafkaRoutes.get(3).getRouter());
    assertEquals("broker-2", managedKafkaRoutes.get(4).getName());
    assertEquals("broker-2", managedKafkaRoutes.get(4).getPrefix());
    assertEquals("ingresscontroller.kas-zone-broker-2.testing.domain.tld", managedKafkaRoutes.get(4).getRouter());
}
Also used : Quantity(io.fabric8.kubernetes.api.model.Quantity) IntStream(java.util.stream.IntStream) BeforeEach(org.junit.jupiter.api.BeforeEach) KubernetesServerTestResource(io.quarkus.test.kubernetes.client.KubernetesServerTestResource) QuarkusMock(io.quarkus.test.junit.QuarkusMock) KubernetesCrudDispatcher(io.fabric8.kubernetes.client.server.mock.KubernetesCrudDispatcher) ManagedKafkaRoute(org.bf2.operator.resources.v1alpha1.ManagedKafkaRoute) Function(java.util.function.Function) QuarkusTest(io.quarkus.test.junit.QuarkusTest) Inject(javax.inject.Inject) Route(io.fabric8.openshift.api.model.Route) NodeBuilder(io.fabric8.kubernetes.api.model.NodeBuilder) PodBuilder(io.fabric8.kubernetes.api.model.PodBuilder) Map(java.util.Map) AbstractKafkaCluster(org.bf2.operator.operands.AbstractKafkaCluster) Service(io.fabric8.kubernetes.api.model.Service) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) Node(io.fabric8.kubernetes.api.model.Node) ServiceBuilder(io.fabric8.kubernetes.api.model.ServiceBuilder) LongSummaryStatistics(java.util.LongSummaryStatistics) QuarkusTestResource(io.quarkus.test.common.QuarkusTestResource) KubernetesServer(io.fabric8.kubernetes.client.server.mock.KubernetesServer) OperandUtils(org.bf2.common.OperandUtils) RouteBuilder(io.fabric8.openshift.api.model.RouteBuilder) ManagedKafkaSpecBuilder(org.bf2.operator.resources.v1alpha1.ManagedKafkaSpecBuilder) Pod(io.fabric8.kubernetes.api.model.Pod) OpenShiftClient(io.fabric8.openshift.client.OpenShiftClient) Collectors(java.util.stream.Collectors) Test(org.junit.jupiter.api.Test) Mockito(org.mockito.Mockito) AfterEach(org.junit.jupiter.api.AfterEach) List(java.util.List) KafkaCluster(org.bf2.operator.operands.KafkaCluster) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) KubernetesTestServer(io.quarkus.test.kubernetes.client.KubernetesTestServer) Kafka(io.strimzi.api.kafka.model.Kafka) Comparator(java.util.Comparator) RouteTargetReferenceBuilder(io.fabric8.openshift.api.model.RouteTargetReferenceBuilder) IngressController(io.fabric8.openshift.api.model.operator.v1.IngressController) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka) Collections(java.util.Collections) ManagedKafkaBuilder(org.bf2.operator.resources.v1alpha1.ManagedKafkaBuilder) RouteBuilder(io.fabric8.openshift.api.model.RouteBuilder) PodBuilder(io.fabric8.kubernetes.api.model.PodBuilder) ManagedKafkaBuilder(org.bf2.operator.resources.v1alpha1.ManagedKafkaBuilder) NodeBuilder(io.fabric8.kubernetes.api.model.NodeBuilder) ServiceBuilder(io.fabric8.kubernetes.api.model.ServiceBuilder) ManagedKafka(org.bf2.operator.resources.v1alpha1.ManagedKafka) ManagedKafkaRoute(org.bf2.operator.resources.v1alpha1.ManagedKafkaRoute) ManagedKafkaSpecBuilder(org.bf2.operator.resources.v1alpha1.ManagedKafkaSpecBuilder) RouteTargetReferenceBuilder(io.fabric8.openshift.api.model.RouteTargetReferenceBuilder) QuarkusTest(io.quarkus.test.junit.QuarkusTest) Test(org.junit.jupiter.api.Test)

Example 13 with Dummy

use of io.fabric8.kubernetes.examples.crds.Dummy in project quick by bakdata.

the class KubernetesResources method forDeletion.

/**
 * Creates a dummy resource with a name.
 *
 * @param k8sResourceClass class of the resource
 * @param name             name of the resource
 * @param <T>              type of the resource
 * @return a resource object with the given name
 */
// No need for handling reflective exceptions independently
@SuppressWarnings("OverlyBroadCatchBlock")
public static <T extends HasMetadata> T forDeletion(final Class<T> k8sResourceClass, final String name) {
    final ObjectMeta objectMeta = new ObjectMeta();
    objectMeta.setName(name);
    final T namedResource;
    try {
        namedResource = k8sResourceClass.getDeclaredConstructor().newInstance();
    } catch (final ReflectiveOperationException exception) {
        final String errorMessage = String.format("Could not create object of class %s. It must be a serializable k8s resource.", k8sResourceClass.getName());
        throw new IllegalArgumentException(errorMessage, exception);
    }
    namedResource.setMetadata(objectMeta);
    return namedResource;
}
Also used : ObjectMeta(io.fabric8.kubernetes.api.model.ObjectMeta)

Example 14 with Dummy

use of io.fabric8.kubernetes.examples.crds.Dummy in project fabric8 by jboss-fuse.

the class ProjectDeployerImpl method resolveProfileDeployments.

private DeployResults resolveProfileDeployments(ProjectRequirements requirements, FabricService fabric, Profile profile, ProfileBuilder builder, boolean mergeWithOldProfile) throws Exception {
    DependencyDTO rootDependency = requirements.getRootDependency();
    ProfileService profileService = fabricService.get().adapt(ProfileService.class);
    if (!mergeWithOldProfile) {
        // If we're not merging with the old profile, then create a dummy empty profile to merge with instead.
        ProfileBuilder emptyProfile = ProfileBuilder.Factory.create(profile.getVersion(), profile.getId()).setOverlay(true);
        profile = emptyProfile.getProfile();
    }
    if (rootDependency != null) {
        // as a hack lets just add this bundle in
        LOG.info("Got root: " + rootDependency);
        List<String> parentIds = profile.getParentIds();
        Profile overlay = profileService.getOverlayProfile(profile);
        String bundleUrl = rootDependency.toBundleUrlWithType();
        LOG.info("Using resolver to add extra features and bundles on " + bundleUrl);
        List<String> features = new ArrayList<String>(profile.getFeatures());
        List<String> bundles = new ArrayList<String>(profile.getBundles());
        List<String> optionals = new ArrayList<String>(profile.getOptionals());
        if (requirements.getFeatures() != null) {
            features.addAll(requirements.getFeatures());
        }
        if (requirements.getBundles() != null) {
            bundles.addAll(requirements.getBundles());
        }
        bundles.add(bundleUrl);
        LOG.info("Adding bundle: " + bundleUrl);
        // TODO we maybe should detect a karaf based container in a nicer way than this?
        boolean isKarafContainer = parentIds.contains("karaf") || parentIds.contains("containers-karaf");
        boolean addBundleDependencies = Objects.equal("bundle", rootDependency.getType()) || isKarafContainer;
        if (addBundleDependencies && requirements.isUseResolver()) {
            // lets build up a list of all current active features and bundles along with all discovered features
            List<Feature> availableFeatures = new ArrayList<Feature>();
            addAvailableFeaturesFromProfile(availableFeatures, fabric, overlay);
            Set<String> currentBundleLocations = new HashSet<>();
            currentBundleLocations.addAll(bundles);
            // lets add the current features
            DownloadManager downloadManager = DownloadManagers.createDownloadManager(fabric, executorService);
            Set<Feature> currentFeatures = AgentUtils.getFeatures(fabric, downloadManager, overlay);
            addBundlesFromProfile(currentBundleLocations, overlay);
            List<String> parentProfileIds = requirements.getParentProfiles();
            if (parentProfileIds != null) {
                for (String parentProfileId : parentProfileIds) {
                    Profile parentProfile = profileService.getProfile(profile.getVersion(), parentProfileId);
                    Profile parentOverlay = profileService.getOverlayProfile(parentProfile);
                    Set<Feature> parentFeatures = AgentUtils.getFeatures(fabric, downloadManager, parentOverlay);
                    currentFeatures.addAll(parentFeatures);
                    addAvailableFeaturesFromProfile(availableFeatures, fabric, parentOverlay);
                    addBundlesFromProfile(currentBundleLocations, parentOverlay);
                }
            }
            // lets add all known features from the known repositories
            for (DependencyDTO dependency : rootDependency.getChildren()) {
                if ("test".equals(dependency.getScope()) || "provided".equals(dependency.getScope())) {
                    continue;
                }
                if ("jar".equals(dependency.getType())) {
                    String match = getAllServiceMixBundles().get(dependency.getGroupId() + ":" + dependency.getArtifactId() + ":" + dependency.getVersion());
                    if (match != null) {
                        LOG.info("Replacing artifact " + dependency + " with servicemix bundle " + match);
                        String[] parts = match.split(":");
                        dependency.setGroupId(parts[0]);
                        dependency.setArtifactId(parts[1]);
                        dependency.setVersion(parts[2]);
                        dependency.setType("bundle");
                    }
                }
                String prefix = dependency.toBundleUrlWithoutVersion();
                Feature feature = findFeatureWithBundleLocationPrefix(currentFeatures, prefix);
                if (feature != null) {
                    LOG.info("Feature is already is in the profile " + feature.getId() + " for " + dependency.toBundleUrl());
                } else {
                    feature = findFeatureWithBundleLocationPrefix(availableFeatures, prefix);
                    if (feature != null) {
                        String name = feature.getName();
                        if (features.contains(name)) {
                            LOG.info("Feature is already added " + name + " for " + dependency.toBundleUrl());
                        } else {
                            LOG.info("Found a matching feature for bundle " + dependency.toBundleUrl() + ": " + feature.getId());
                            features.add(name);
                        }
                    } else {
                        String bundleUrlWithType = dependency.toBundleUrlWithType();
                        String foundBundleUri = findBundleUri(currentBundleLocations, prefix);
                        if (foundBundleUri != null) {
                            LOG.info("Bundle already included " + foundBundleUri + " for " + bundleUrlWithType);
                        } else {
                            boolean ignore = false;
                            String bundleWithoutMvnPrefix = getMavenCoords(bundleUrlWithType);
                            for (String ignoreBundlePrefix : RESOLVER_IGNORE_BUNDLE_PREFIXES) {
                                if (bundleWithoutMvnPrefix.startsWith(ignoreBundlePrefix)) {
                                    ignore = true;
                                    break;
                                }
                            }
                            if (ignore) {
                                LOG.info("Ignoring bundle: " + bundleUrlWithType);
                            } else {
                                boolean optional = dependency.isOptional();
                                LOG.info("Adding " + (optional ? "optional " : "") + " bundle: " + bundleUrlWithType);
                                if (optional) {
                                    optionals.add(bundleUrlWithType);
                                } else {
                                    bundles.add(bundleUrlWithType);
                                }
                            }
                        }
                    }
                }
            }
            // Modify the profile through the {@link ProfileBuilder}
            builder.setOptionals(optionals).setFeatures(features);
        }
        builder.setBundles(bundles);
    }
    profile = profileService.updateProfile(builder.getProfile());
    Integer minimumInstances = requirements.getMinimumInstances();
    if (minimumInstances != null) {
        FabricRequirements fabricRequirements = fabricService.get().getRequirements();
        ProfileRequirements profileRequirements = fabricRequirements.getOrCreateProfileRequirement(profile.getId());
        profileRequirements.setMinimumInstances(minimumInstances);
        fabricRequirements.setVersion(requirements.getVersion());
        fabricService.get().setRequirements(fabricRequirements);
    }
    // lets find a hawtio profile and version
    String profileUrl = findHawtioUrl(fabric);
    if (profileUrl == null) {
        profileUrl = "/";
    }
    if (!profileUrl.endsWith("/")) {
        profileUrl += "/";
    }
    String profilePath = Profiles.convertProfileIdToPath(profile.getId());
    profileUrl += "index.html#/wiki/branch/" + profile.getVersion() + "/view/fabric/profiles/" + profilePath;
    return new DeployResults(profile, profileUrl);
}
Also used : ProfileRequirements(io.fabric8.api.ProfileRequirements) DeployResults(io.fabric8.deployer.dto.DeployResults) ArrayList(java.util.ArrayList) DependencyDTO(io.fabric8.deployer.dto.DependencyDTO) ProfileBuilder(io.fabric8.api.ProfileBuilder) Feature(io.fabric8.agent.model.Feature) DownloadManager(io.fabric8.agent.download.DownloadManager) Profile(io.fabric8.api.Profile) ProfileService(io.fabric8.api.ProfileService) FabricRequirements(io.fabric8.api.FabricRequirements) HashSet(java.util.HashSet)

Example 15 with Dummy

use of io.fabric8.kubernetes.examples.crds.Dummy in project fabric8 by jboss-fuse.

the class SubsystemResolver method prepare.

public void prepare(Collection<Feature> allFeatures, Map<String, Set<String>> requirements, Map<String, Set<BundleRevision>> system) throws Exception {
    // Build subsystems on the fly
    for (Map.Entry<String, Set<String>> entry : requirements.entrySet()) {
        String[] parts = entry.getKey().split("/");
        if (root == null) {
            root = new Subsystem(parts[0]);
        } else if (!root.getName().equals(parts[0])) {
            throw new IllegalArgumentException("Can not use multiple roots: " + root.getName() + ", " + parts[0]);
        }
        Subsystem ss = root;
        for (int i = 1; i < parts.length; i++) {
            ss = getOrCreateChild(ss, parts[i]);
        }
        for (String requirement : entry.getValue()) {
            ss.require(requirement);
        }
    }
    if (root == null) {
        return;
    }
    // Pre-resolve
    root.build(allFeatures);
    // Add system resources
    BundleRevision sysBundleRev = null;
    boolean hasEeCap = false;
    for (Map.Entry<String, Set<BundleRevision>> entry : system.entrySet()) {
        Subsystem ss = null;
        String[] parts = entry.getKey().split("/");
        String path = parts[0];
        if (path.equals(root.getName())) {
            ss = root;
        }
        for (int i = 1; ss != null && i < parts.length; i++) {
            path += "/" + parts[i];
            ss = ss.getChild(path);
        }
        if (ss != null) {
            ResourceImpl dummy = new ResourceImpl("dummy", "dummy", Version.emptyVersion);
            for (BundleRevision res : entry.getValue()) {
                // We need to explicitely provide service capabilities for bundles
                // We use both actual services and services declared from the headers
                // TODO: use actual services
                Map<String, String> headers = new DictionaryAsMap<>(res.getBundle().getHeaders());
                Resource tmp = ResourceBuilder.build(res.getBundle().getLocation(), headers);
                for (Capability cap : tmp.getCapabilities(ServiceNamespace.SERVICE_NAMESPACE)) {
                    dummy.addCapability(new CapabilityImpl(dummy, cap.getNamespace(), cap.getDirectives(), cap.getAttributes()));
                }
                ss.addSystemResource(res);
                for (Capability cap : res.getCapabilities(null)) {
                    hasEeCap |= cap.getNamespace().equals(EXECUTION_ENVIRONMENT_NAMESPACE);
                }
                if (res.getBundle().getBundleId() == 0) {
                    sysBundleRev = res;
                }
            }
            ss.addSystemResource(dummy);
        }
    }
    // Under Equinox, the osgi.ee capabilities are not provided by the system bundle
    if (!hasEeCap && sysBundleRev != null) {
        String provideCaps = sysBundleRev.getBundle().getHeaders().get(PROVIDE_CAPABILITY);
        environmentResource = new ResourceImpl("environment", "karaf.environment", Version.emptyVersion);
        environmentResource.addCapabilities(ResourceBuilder.parseCapability(environmentResource, provideCaps));
        root.addSystemResource(environmentResource);
    }
}
Also used : CapabilitySet(io.fabric8.agent.resolver.CapabilitySet) Set(java.util.Set) Capability(org.osgi.resource.Capability) DictionaryAsMap(org.apache.felix.utils.collections.DictionaryAsMap) Resource(org.osgi.resource.Resource) ResourceImpl(io.fabric8.agent.resolver.ResourceImpl) CapabilityImpl(io.fabric8.agent.resolver.CapabilityImpl) BundleRevision(org.osgi.framework.wiring.BundleRevision) HashMap(java.util.HashMap) DictionaryAsMap(org.apache.felix.utils.collections.DictionaryAsMap) Map(java.util.Map)

Aggregations

Test (org.junit.jupiter.api.Test)16 Test (org.junit.Test)12 KubernetesClient (io.fabric8.kubernetes.client.KubernetesClient)8 Pod (io.fabric8.kubernetes.api.model.Pod)7 HashMap (java.util.HashMap)7 Map (java.util.Map)7 Secret (io.fabric8.kubernetes.api.model.Secret)6 List (java.util.List)6 GenericKubernetesResource (io.fabric8.kubernetes.api.model.GenericKubernetesResource)5 KubernetesClientBuilder (io.fabric8.kubernetes.client.KubernetesClientBuilder)5 Resource (io.fabric8.kubernetes.client.dsl.Resource)5 Date (java.util.Date)5 ObjectMeta (io.fabric8.kubernetes.api.model.ObjectMeta)4 PodBuilder (io.fabric8.kubernetes.api.model.PodBuilder)4 KubernetesVersion (io.strimzi.operator.KubernetesVersion)4 PlatformFeaturesAvailability (io.strimzi.operator.PlatformFeaturesAvailability)4 ClusterOperatorConfig (io.strimzi.operator.cluster.ClusterOperatorConfig)4 KafkaVersionTestUtils (io.strimzi.operator.cluster.KafkaVersionTestUtils)4 ResourceUtils (io.strimzi.operator.cluster.ResourceUtils)4 ResourceOperatorSupplier (io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier)4