Search in sources :

Example 26 with SubjectBuilder

use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project strimzi-kafka-operator by strimzi.

the class ConfigProviderST method testConnectWithConnectorUsingConfigAndEnvProvider.

@ParallelNamespaceTest
void testConnectWithConnectorUsingConfigAndEnvProvider(ExtensionContext extensionContext) {
    final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
    final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
    final String namespaceName = StUtils.getNamespaceBasedOnRbac(namespace, extensionContext);
    final String producerName = "producer-" + ClientUtils.generateRandomConsumerGroup();
    final String customFileSinkPath = "/tmp/my-own-path.txt";
    resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build());
    Map<String, String> configData = new HashMap<>();
    configData.put("topics", topicName);
    configData.put("file", customFileSinkPath);
    configData.put("key", "org.apache.kafka.connect.storage.StringConverter");
    configData.put("value", "org.apache.kafka.connect.storage.StringConverter");
    String cmName = "connector-config";
    String configRoleName = "connector-config-role";
    ConfigMap connectorConfig = new ConfigMapBuilder().editOrNewMetadata().withName(cmName).endMetadata().withData(configData).build();
    kubeClient().getClient().configMaps().inNamespace(namespaceName).create(connectorConfig);
    resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1, false).editOrNewMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("config.providers", "configmaps,env").addToConfig("config.providers.configmaps.class", "io.strimzi.kafka.KubernetesConfigMapConfigProvider").addToConfig("config.providers.env.class", "io.strimzi.kafka.EnvVarConfigProvider").editOrNewExternalConfiguration().addNewEnv().withName("FILE_SINK_FILE").withNewValueFrom().withNewConfigMapKeyRef("file", cmName, false).endValueFrom().endEnv().endExternalConfiguration().endSpec().build());
    LOGGER.info("Creating needed RoleBinding and Role for Kubernetes Config Provider");
    ResourceManager.getInstance().createResource(extensionContext, new RoleBindingBuilder().editOrNewMetadata().withName("connector-config-rb").withNamespace(namespaceName).endMetadata().withSubjects(new SubjectBuilder().withKind("ServiceAccount").withName(clusterName + "-connect").withNamespace(namespaceName).build()).withRoleRef(new RoleRefBuilder().withKind("Role").withName(configRoleName).withApiGroup("rbac.authorization.k8s.io").build()).build());
    // create a role
    Role configRole = new RoleBuilder().editOrNewMetadata().withName(configRoleName).withNamespace(namespaceName).endMetadata().addNewRule().withApiGroups("").withResources("configmaps").withResourceNames(cmName).withVerbs("get").endRule().build();
    kubeClient().getClient().resource(configRole).createOrReplace();
    String configPrefix = "configmaps:" + namespaceName + "/connector-config:";
    resourceManager.createResource(extensionContext, KafkaConnectorTemplates.kafkaConnector(clusterName).editSpec().withClassName("org.apache.kafka.connect.file.FileStreamSinkConnector").addToConfig("file", "${env:FILE_SINK_FILE}").addToConfig("key.converter", "${" + configPrefix + "key}").addToConfig("value.converter", "${" + configPrefix + "value}").addToConfig("topics", "${" + configPrefix + "topics}").endSpec().build());
    KafkaClients kafkaBasicClientJob = new KafkaClientsBuilder().withProducerName(producerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(clusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withDelayMs(0).withNamespaceName(namespaceName).build();
    resourceManager.createResource(extensionContext, kafkaBasicClientJob.producerStrimzi());
    String kafkaConnectPodName = kubeClient().listPods(namespaceName, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
    KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, customFileSinkPath, "Hello-world - 99");
}
Also used : Role(io.fabric8.kubernetes.api.model.rbac.Role) KafkaClientsBuilder(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder) RoleBindingBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) KafkaClients(io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients) HashMap(java.util.HashMap) ConfigMapBuilder(io.fabric8.kubernetes.api.model.ConfigMapBuilder) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) RoleBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBuilder) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder) ParallelNamespaceTest(io.strimzi.systemtest.annotations.ParallelNamespaceTest)

Example 27 with SubjectBuilder

use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project strimzi-kafka-operator by strimzi.

the class KafkaCluster method generateClusterRoleBinding.

/**
 * Creates the ClusterRoleBinding which is used to bind the Kafka SA to the ClusterRole
 * which permissions the Kafka init container to access K8S nodes (necessary for rack-awareness).
 *
 * @param assemblyNamespace The namespace.
 * @return The cluster role binding.
 */
public ClusterRoleBinding generateClusterRoleBinding(String assemblyNamespace) {
    if (rack != null || isExposedWithNodePort()) {
        Subject ks = new SubjectBuilder().withKind("ServiceAccount").withName(getServiceAccountName()).withNamespace(assemblyNamespace).build();
        RoleRef roleRef = new RoleRefBuilder().withName("strimzi-kafka-broker").withApiGroup("rbac.authorization.k8s.io").withKind("ClusterRole").build();
        return getClusterRoleBinding(KafkaResources.initContainerClusterRoleBindingName(cluster, namespace), ks, roleRef);
    } else {
        return null;
    }
}
Also used : RoleRef(io.fabric8.kubernetes.api.model.rbac.RoleRef) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) Subject(io.fabric8.kubernetes.api.model.rbac.Subject) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)

Example 28 with SubjectBuilder

use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project strimzi-kafka-operator by strimzi.

the class KafkaConnectCluster method generateClusterRoleBinding.

/**
 * Creates the ClusterRoleBinding which is used to bind the Kafka Connect SA to the ClusterRole
 * which permissions the Kafka init container to access K8S nodes (necessary for rack-awareness).
 *
 * @return The cluster role binding.
 */
public ClusterRoleBinding generateClusterRoleBinding() {
    if (rack == null) {
        return null;
    }
    Subject subject = new SubjectBuilder().withKind("ServiceAccount").withName(getServiceAccountName()).withNamespace(namespace).build();
    RoleRef roleRef = new RoleRefBuilder().withName("strimzi-kafka-client").withApiGroup("rbac.authorization.k8s.io").withKind("ClusterRole").build();
    return getClusterRoleBinding(KafkaConnectResources.initContainerClusterRoleBindingName(cluster, namespace), subject, roleRef);
}
Also used : RoleRef(io.fabric8.kubernetes.api.model.rbac.RoleRef) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) Subject(io.fabric8.kubernetes.api.model.rbac.Subject) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)

Example 29 with SubjectBuilder

use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project kubernetes-client by fabric8io.

the class RoleBindingTest method kubernetesRoleBuilderTest.

@Test
public void kubernetesRoleBuilderTest() throws Exception {
    // given
    final String originalJson = Helper.loadJson("/valid-roleBinding.json");
    // when
    RoleBinding kubernetesRoleBinding = new RoleBindingBuilder().withNewMetadata().withName("read-jobs").withNamespace("default").endMetadata().addToSubjects(0, new SubjectBuilder().withApiGroup("rbac.authorization.k8s.io").withKind("User").withName("jane").withNamespace("default").build()).withRoleRef(new RoleRefBuilder().withApiGroup("rbac.authorization.k8s.io").withKind("Role").withName("job-reader").build()).build();
    final String serializedJson = mapper.writeValueAsString(kubernetesRoleBinding);
    // then
    assertThatJson(serializedJson).when(IGNORING_ARRAY_ORDER, TREATING_NULL_AS_ABSENT, IGNORING_EXTRA_FIELDS).isEqualTo(originalJson);
}
Also used : RoleBindingBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder) RoleBinding(io.fabric8.kubernetes.api.model.rbac.RoleBinding) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder) Test(org.junit.jupiter.api.Test)

Example 30 with SubjectBuilder

use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project stackgres by ongres.

the class PatroniRole method createRoleBinding.

/**
 * Create the RoleBinding for patroni associated to the cluster.
 */
private RoleBinding createRoleBinding(StackGresDistributedLogsContext context) {
    final StackGresDistributedLogs cluster = context.getSource();
    final Map<String, String> labels = labelFactory.clusterLabels(cluster);
    return new RoleBindingBuilder().withNewMetadata().withName(roleName(context)).withNamespace(cluster.getMetadata().getNamespace()).withLabels(labels).endMetadata().withSubjects(new SubjectBuilder().withKind("ServiceAccount").withName(roleName(context)).withNamespace(cluster.getMetadata().getNamespace()).build()).withRoleRef(new RoleRefBuilder().withKind("Role").withName(roleName(context)).withApiGroup("rbac.authorization.k8s.io").build()).build();
}
Also used : RoleBindingBuilder(io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder) StackGresDistributedLogs(io.stackgres.common.crd.sgdistributedlogs.StackGresDistributedLogs) SubjectBuilder(io.fabric8.kubernetes.api.model.rbac.SubjectBuilder) RoleRefBuilder(io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)

Aggregations

SubjectBuilder (io.fabric8.kubernetes.api.model.rbac.SubjectBuilder)31 RoleRefBuilder (io.fabric8.kubernetes.api.model.rbac.RoleRefBuilder)29 RoleRef (io.fabric8.kubernetes.api.model.rbac.RoleRef)18 Subject (io.fabric8.kubernetes.api.model.rbac.Subject)18 RoleBindingBuilder (io.fabric8.kubernetes.api.model.rbac.RoleBindingBuilder)13 ClusterRoleBindingBuilder (io.fabric8.kubernetes.api.model.rbac.ClusterRoleBindingBuilder)10 RoleBinding (io.fabric8.kubernetes.api.model.rbac.RoleBinding)6 ClusterRoleBinding (io.fabric8.kubernetes.api.model.rbac.ClusterRoleBinding)5 Test (org.junit.jupiter.api.Test)3 ConfigMap (io.fabric8.kubernetes.api.model.ConfigMap)2 ConfigMapBuilder (io.fabric8.kubernetes.api.model.ConfigMapBuilder)2 Role (io.fabric8.kubernetes.api.model.rbac.Role)2 RoleBuilder (io.fabric8.kubernetes.api.model.rbac.RoleBuilder)2 KubernetesClient (io.fabric8.kubernetes.client.KubernetesClient)2 EnableKubernetesMockClient (io.fabric8.kubernetes.client.server.mock.EnableKubernetesMockClient)2 ParallelNamespaceTest (io.strimzi.systemtest.annotations.ParallelNamespaceTest)2 KafkaClients (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClients)2 KafkaClientsBuilder (io.strimzi.systemtest.kafkaclients.internalClients.KafkaClientsBuilder)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2