use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project strimzi-kafka-operator by strimzi.
the class ConfigProviderST method testConnectWithConnectorUsingConfigAndEnvProvider.
@ParallelNamespaceTest
void testConnectWithConnectorUsingConfigAndEnvProvider(ExtensionContext extensionContext) {
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
final String namespaceName = StUtils.getNamespaceBasedOnRbac(namespace, extensionContext);
final String producerName = "producer-" + ClientUtils.generateRandomConsumerGroup();
final String customFileSinkPath = "/tmp/my-own-path.txt";
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 3).build());
Map<String, String> configData = new HashMap<>();
configData.put("topics", topicName);
configData.put("file", customFileSinkPath);
configData.put("key", "org.apache.kafka.connect.storage.StringConverter");
configData.put("value", "org.apache.kafka.connect.storage.StringConverter");
String cmName = "connector-config";
String configRoleName = "connector-config-role";
ConfigMap connectorConfig = new ConfigMapBuilder().editOrNewMetadata().withName(cmName).endMetadata().withData(configData).build();
kubeClient().getClient().configMaps().inNamespace(namespaceName).create(connectorConfig);
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, 1, false).editOrNewMetadata().addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true").endMetadata().editOrNewSpec().addToConfig("key.converter.schemas.enable", false).addToConfig("value.converter.schemas.enable", false).addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter").addToConfig("config.providers", "configmaps,env").addToConfig("config.providers.configmaps.class", "io.strimzi.kafka.KubernetesConfigMapConfigProvider").addToConfig("config.providers.env.class", "io.strimzi.kafka.EnvVarConfigProvider").editOrNewExternalConfiguration().addNewEnv().withName("FILE_SINK_FILE").withNewValueFrom().withNewConfigMapKeyRef("file", cmName, false).endValueFrom().endEnv().endExternalConfiguration().endSpec().build());
LOGGER.info("Creating needed RoleBinding and Role for Kubernetes Config Provider");
ResourceManager.getInstance().createResource(extensionContext, new RoleBindingBuilder().editOrNewMetadata().withName("connector-config-rb").withNamespace(namespaceName).endMetadata().withSubjects(new SubjectBuilder().withKind("ServiceAccount").withName(clusterName + "-connect").withNamespace(namespaceName).build()).withRoleRef(new RoleRefBuilder().withKind("Role").withName(configRoleName).withApiGroup("rbac.authorization.k8s.io").build()).build());
// create a role
Role configRole = new RoleBuilder().editOrNewMetadata().withName(configRoleName).withNamespace(namespaceName).endMetadata().addNewRule().withApiGroups("").withResources("configmaps").withResourceNames(cmName).withVerbs("get").endRule().build();
kubeClient().getClient().resource(configRole).createOrReplace();
String configPrefix = "configmaps:" + namespaceName + "/connector-config:";
resourceManager.createResource(extensionContext, KafkaConnectorTemplates.kafkaConnector(clusterName).editSpec().withClassName("org.apache.kafka.connect.file.FileStreamSinkConnector").addToConfig("file", "${env:FILE_SINK_FILE}").addToConfig("key.converter", "${" + configPrefix + "key}").addToConfig("value.converter", "${" + configPrefix + "value}").addToConfig("topics", "${" + configPrefix + "topics}").endSpec().build());
KafkaClients kafkaBasicClientJob = new KafkaClientsBuilder().withProducerName(producerName).withBootstrapAddress(KafkaResources.plainBootstrapAddress(clusterName)).withTopicName(topicName).withMessageCount(MESSAGE_COUNT).withDelayMs(0).withNamespaceName(namespaceName).build();
resourceManager.createResource(extensionContext, kafkaBasicClientJob.producerStrimzi());
String kafkaConnectPodName = kubeClient().listPods(namespaceName, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND).get(0).getMetadata().getName();
KafkaConnectUtils.waitForMessagesInKafkaConnectFileSink(namespaceName, kafkaConnectPodName, customFileSinkPath, "Hello-world - 99");
}
use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project strimzi-kafka-operator by strimzi.
the class KafkaCluster method generateClusterRoleBinding.
/**
* Creates the ClusterRoleBinding which is used to bind the Kafka SA to the ClusterRole
* which permissions the Kafka init container to access K8S nodes (necessary for rack-awareness).
*
* @param assemblyNamespace The namespace.
* @return The cluster role binding.
*/
public ClusterRoleBinding generateClusterRoleBinding(String assemblyNamespace) {
if (rack != null || isExposedWithNodePort()) {
Subject ks = new SubjectBuilder().withKind("ServiceAccount").withName(getServiceAccountName()).withNamespace(assemblyNamespace).build();
RoleRef roleRef = new RoleRefBuilder().withName("strimzi-kafka-broker").withApiGroup("rbac.authorization.k8s.io").withKind("ClusterRole").build();
return getClusterRoleBinding(KafkaResources.initContainerClusterRoleBindingName(cluster, namespace), ks, roleRef);
} else {
return null;
}
}
use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project strimzi-kafka-operator by strimzi.
the class KafkaConnectCluster method generateClusterRoleBinding.
/**
* Creates the ClusterRoleBinding which is used to bind the Kafka Connect SA to the ClusterRole
* which permissions the Kafka init container to access K8S nodes (necessary for rack-awareness).
*
* @return The cluster role binding.
*/
public ClusterRoleBinding generateClusterRoleBinding() {
if (rack == null) {
return null;
}
Subject subject = new SubjectBuilder().withKind("ServiceAccount").withName(getServiceAccountName()).withNamespace(namespace).build();
RoleRef roleRef = new RoleRefBuilder().withName("strimzi-kafka-client").withApiGroup("rbac.authorization.k8s.io").withKind("ClusterRole").build();
return getClusterRoleBinding(KafkaConnectResources.initContainerClusterRoleBindingName(cluster, namespace), subject, roleRef);
}
use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project kubernetes-client by fabric8io.
the class RoleBindingTest method kubernetesRoleBuilderTest.
@Test
public void kubernetesRoleBuilderTest() throws Exception {
// given
final String originalJson = Helper.loadJson("/valid-roleBinding.json");
// when
RoleBinding kubernetesRoleBinding = new RoleBindingBuilder().withNewMetadata().withName("read-jobs").withNamespace("default").endMetadata().addToSubjects(0, new SubjectBuilder().withApiGroup("rbac.authorization.k8s.io").withKind("User").withName("jane").withNamespace("default").build()).withRoleRef(new RoleRefBuilder().withApiGroup("rbac.authorization.k8s.io").withKind("Role").withName("job-reader").build()).build();
final String serializedJson = mapper.writeValueAsString(kubernetesRoleBinding);
// then
assertThatJson(serializedJson).when(IGNORING_ARRAY_ORDER, TREATING_NULL_AS_ABSENT, IGNORING_EXTRA_FIELDS).isEqualTo(originalJson);
}
use of io.fabric8.kubernetes.api.model.rbac.SubjectBuilder in project stackgres by ongres.
the class PatroniRole method createRoleBinding.
/**
* Create the RoleBinding for patroni associated to the cluster.
*/
private RoleBinding createRoleBinding(StackGresDistributedLogsContext context) {
final StackGresDistributedLogs cluster = context.getSource();
final Map<String, String> labels = labelFactory.clusterLabels(cluster);
return new RoleBindingBuilder().withNewMetadata().withName(roleName(context)).withNamespace(cluster.getMetadata().getNamespace()).withLabels(labels).endMetadata().withSubjects(new SubjectBuilder().withKind("ServiceAccount").withName(roleName(context)).withNamespace(cluster.getMetadata().getNamespace()).build()).withRoleRef(new RoleRefBuilder().withKind("Role").withName(roleName(context)).withApiGroup("rbac.authorization.k8s.io").build()).build();
}
Aggregations