Search in sources :

Example 11 with Role

use of io.fabric8.kubernetes.api.model.rbac.Role in project strimzi by strimzi.

the class ClusterRoleBindingOperatorIT method getModified.

@Override
protected ClusterRoleBinding getModified() {
    Subject ks = new SubjectBuilder().withKind("ServiceAccount").withName("my-service-account2").withNamespace("my-namespace2").build();
    // RoleRef cannot be changed
    RoleRef roleRef = new RoleRefBuilder().withName("my-cluster-role").withApiGroup("rbac.authorization.k8s.io").withKind("ClusterRole").build();
    return new ClusterRoleBindingBuilder().withNewMetadata().withName(resourceName).withLabels(singletonMap("state", "modified")).endMetadata().withSubjects(ks).withRoleRef(roleRef).build();
}
Also used : RoleRef(io.fabric8.kubernetes.api.model.rbac.RoleRef) ClusterRoleBindingBuilder(io.fabric8.kubernetes.api.model.rbac.ClusterRoleBindingBuilder) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) Subject(io.fabric8.kubernetes.api.model.rbac.Subject) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)

Example 12 with Role

use of io.fabric8.kubernetes.api.model.rbac.Role in project strimzi by strimzi.

the class RoleBindingOperatorIT method getModified.

@Override
protected RoleBinding getModified() {
    Subject ks = new SubjectBuilder().withKind("ServiceAccount").withName("my-service-account2").withNamespace("my-namespace2").build();
    // RoleRef cannot be changed
    RoleRef roleRef = new RoleRefBuilder().withName("my-cluster-role").withApiGroup("rbac.authorization.k8s.io").withKind("ClusterRole").build();
    return new RoleBindingBuilder().withNewMetadata().withName(resourceName).withNamespace(namespace).withLabels(singletonMap("state", "modified")).endMetadata().withSubjects(ks).withRoleRef(roleRef).build();
}
Also used : RoleBindingBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder) RoleRef(io.fabric8.kubernetes.api.model.rbac.RoleRef) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) Subject(io.fabric8.kubernetes.api.model.rbac.Subject) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)

Example 13 with Role

use of io.fabric8.kubernetes.api.model.rbac.Role in project strimzi by strimzi.

the class ConfigProviderST method testConnectWithConnectorUsingConfigAndEnvProvider.

@ParallelNamespaceTest
void testConnectWithConnectorUsingConfigAndEnvProvider(ExtensionContext extensionContext) {
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(namespace, extensionContext);
    final String producerName = "producer-" + ClientUtils.generateRandomConsumerGroup();
    final String customFileSinkPath = "/tmp/my-own-path.txt";
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build());
    Map<String, String> configData = new HashMap<>();
    configData.put("topics", topicName);
    configData.put("file", customFileSinkPath);
    configData.put("key", "org.apache.kafka.connect.storage.StringConverter");
    configData.put("value", "org.apache.kafka.connect.storage.StringConverter");
    String cmName = "connector-config";
    String configRoleName = "connector-config-role";
    ConfigMap connectorConfig = new ConfigMapBuilder().editOrNewMetadata().withName(cmName).endMetadata().withData(configData).build();
    kubeClient().getClient().configMaps().inNamespace(namespaceName).create(connectorConfig);
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1, false).editOrNewMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("config.providers", "configmaps,env").addToConfig("config.providers.configmaps.class", "io.strimzi.kafka.KubernetesConfigMapConfigProvider").addToConfig("config.providers.env.class", "io.strimzi.kafka.EnvVarConfigProvider").editOrNewExternalConfiguration().addNewEnv().withName("FILE_SINK_FILE").withNewValueFrom().withNewConfigMapKeyRef("file", cmName, false).endValueFrom().endEnv().endExternalConfiguration().endSpec().build());
    LOGGER.info("Creating needed RoleBinding and Role for Kubernetes Config Provider");
    ResourceManager.getInstance().createResource(extensionContext, new RoleBindingBuilder().editOrNewMetadata().withName("connector-config-rb").withNamespace(namespaceName).endMetadata().withSubjects(new SubjectBuilder().withKind("ServiceAccount").withName(clusterName + "-connect").withNamespace(namespaceName).build()).withRoleRef(new RoleRefBuilder().withKind("Role").withName(configRoleName).withApiGroup("rbac.authorization.k8s.io").build()).build());
    // create a role
    Role configRole = new RoleBuilder().editOrNewMetadata().withName(configRoleName).withNamespace(namespaceName).endMetadata().addNewRule().withApiGroups("").withResources("configmaps").withResourceNames(cmName).withVerbs("get").endRule().build();
    kubeClient().getClient().resource(configRole).createOrReplace();
    String configPrefix = "configmaps:" + namespaceName + "/connector-config:";
    resourceManager.createResource(extensionContext, KafkaConnectorTemplates.kafkaConnector(clusterName).editSpec().withClassName("org.apache.kafka.connect.file.FileStreamSinkConnector").addToConfig("file", "${env:FILE_SINK_FILE}").addToConfig("key.converter", "${" + configPrefix + "key}").addToConfig("value.converter", "${" + configPrefix + "value}").addToConfig("topics", "${" + configPrefix + "topics}").endSpec().build());
    KafkaClients kafkaBasicClientJob = new KafkaClientsBuilder().withProducerName(producerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(clusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withDelayMs(0).withNamespaceName(namespaceName).build();
    resourceManager.createResource(extensionContext, kafkaBasicClientJob.producerStrimzi());
    String kafkaConnectPodName = kubeClient().listPods(namespaceName, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, customFileSinkPath, "Hello-world - 99");
}
Also used : Role(io.fabric8.kubernetes.api.model.rbac.Role) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) RoleBindingBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) HashMap(java.util.HashMap) ConfigMapBuilder(io.fabric8.kubernetes.api.model.ConfigMapBuilder) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) RoleBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBuilder) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 14 with Role

use of io.fabric8.kubernetes.api.model.rbac.Role in project strimzi by strimzi.

the class Main method maybeCreateClusterRoles.

/*test*/
static Future<Void> maybeCreateClusterRoles(Vertx vertx, ClusterOperatorConfig config, KubernetesClient client) {
    if (config.isCreateClusterRoles()) {
        List<Future> futures = new ArrayList<>();
        ClusterRoleOperator cro = new ClusterRoleOperator(vertx, client);
        Map<String, String> clusterRoles = new HashMap<>(6);
        clusterRoles.put("strimzi-cluster-operator-namespaced", "020-ClusterRole-strimzi-cluster-operator-role.yaml");
        clusterRoles.put("strimzi-cluster-operator-global", "021-ClusterRole-strimzi-cluster-operator-role.yaml");
        clusterRoles.put("strimzi-kafka-broker", "030-ClusterRole-strimzi-kafka-broker.yaml");
        clusterRoles.put("strimzi-entity-operator", "031-ClusterRole-strimzi-entity-operator.yaml");
        clusterRoles.put("strimzi-kafka-client", "033-ClusterRole-strimzi-kafka-client.yaml");
        for (Map.Entry<String, String> clusterRole : clusterRoles.entrySet()) {
            LOGGER.info("Creating cluster role {}", clusterRole.getKey());
            try (BufferedReader br = new BufferedReader(new InputStreamReader(Main.class.getResourceAsStream("/cluster-roles/" + clusterRole.getValue()), StandardCharsets.UTF_8))) {
                String yaml = br.lines().collect(Collectors.joining(System.lineSeparator()));
                ClusterRole role = ClusterRoleOperator.convertYamlToClusterRole(yaml);
                Future fut = cro.reconcile(new Reconciliation("start-cluster-operator", "Deployment", config.getOperatorNamespace(), "cluster-operator"), role.getMetadata().getName(), role);
                futures.add(fut);
            } catch (IOException e) {
                LOGGER.error("Failed to create Cluster Roles.", e);
                throw new RuntimeException(e);
            }
        }
        Promise<Void> returnPromise = Promise.promise();
        CompositeFuture.all(futures).onComplete(res -> {
            if (res.succeeded()) {
                returnPromise.complete();
            } else {
                returnPromise.fail("Failed to create Cluster Roles.");
            }
        });
        return returnPromise.future();
    } else {
        return Future.succeededFuture();
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IOException(java.io.IOException) ClusterRole(io.fabric8.kubernetes.api.model.rbac.ClusterRole) ClusterRoleOperator(io.strimzi.operator.common.operator.resource.ClusterRoleOperator) Reconciliation(io.strimzi.operator.common.Reconciliation) BufferedReader(java.io.BufferedReader) CompositeFuture(io.vertx.core.CompositeFuture) Future(io.vertx.core.Future) HashMap(java.util.HashMap) Map(java.util.Map)

Example 15 with Role

use of io.fabric8.kubernetes.api.model.rbac.Role in project strimzi by strimzi.

the class EntityOperator method generateRole.

/**
 * Read the entity operator ClusterRole, and use the rules to create a new Role.
 * This is done to avoid duplication of the rules set defined in source code.
 * If the namespace of the role is not the same as the namespace of the parent resource (Kafka CR), we do not set
 * the owner reference.
 *
 * @param ownerNamespace        The namespace of the parent resource (the Kafka CR)
 * @param namespace             The namespace this role will be located
 *
 * @return role for the entity operator
 */
public Role generateRole(String ownerNamespace, String namespace) {
    List<PolicyRule> rules;
    try (BufferedReader br = new BufferedReader(new InputStreamReader(Main.class.getResourceAsStream("/cluster-roles/031-ClusterRole-strimzi-entity-operator.yaml"), StandardCharsets.UTF_8))) {
        String yaml = br.lines().collect(Collectors.joining(System.lineSeparator()));
        ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory());
        ClusterRole cr = yamlReader.readValue(yaml, ClusterRole.class);
        rules = cr.getRules();
    } catch (IOException e) {
        LOGGER.errorCr(reconciliation, "Failed to read entity-operator ClusterRole.", e);
        throw new RuntimeException(e);
    }
    Role role = super.generateRole(namespace, rules);
    // We set OwnerReference only within the same namespace since it does not work cross-namespace
    if (!namespace.equals(ownerNamespace)) {
        role.getMetadata().setOwnerReferences(Collections.emptyList());
    }
    return role;
}
Also used : Role(io.fabric8.kubernetes.api.model.rbac.Role) ClusterRole(io.fabric8.kubernetes.api.model.rbac.ClusterRole) PolicyRule(io.fabric8.kubernetes.api.model.rbac.PolicyRule) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) YAMLFactory(com.fasterxml.jackson.dataformat.yaml.YAMLFactory) IOException(java.io.IOException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) ClusterRole(io.fabric8.kubernetes.api.model.rbac.ClusterRole)

Aggregations

RoleRefBuilder (io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)28 RoleRef (io.fabric8.kubernetes.api.model.rbac.RoleRef)24 SubjectBuilder (io.fabric8.kubernetes.api.model.rbac.SubjectBuilder)22 Role (io.fabric8.kubernetes.api.model.rbac.Role)19 RoleBinding (io.fabric8.kubernetes.api.model.rbac.RoleBinding)19 Subject (io.fabric8.kubernetes.api.model.rbac.Subject)18 Reconciliation (io.strimzi.operator.common.Reconciliation)12 ParallelTest (io.strimzi.test.annotations.ParallelTest)12 IOException (java.io.IOException)12 Kafka (io.strimzi.api.kafka.model.Kafka)10 KafkaBuilder (io.strimzi.api.kafka.model.KafkaBuilder)10 List (java.util.List)10 RoleBindingBuilder (io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder)8 Future (io.vertx.core.Future)8 HashMap (java.util.HashMap)7 ConfigMap (io.fabric8.kubernetes.api.model.ConfigMap)5 ClusterRoleBindingBuilder (io.fabric8.kubernetes.api.model.rbac.ClusterRoleBindingBuilder)5 ArrayList (java.util.ArrayList)5 ServiceAccount (io.fabric8.kubernetes.api.model.ServiceAccount)4 ClusterRole (io.fabric8.kubernetes.api.model.rbac.ClusterRole)4