use of org.apache.pulsar.broker.authentication.AuthenticationProviderToken in project starlight-for-kafka by datastax.
the class SaslMultitenantTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
user1Token = AuthTokenUtils.createToken(secretKey, USER1, Optional.empty());
user2Token = AuthTokenUtils.createToken(secretKey, USER2, Optional.empty());
adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
proxyToken = AuthTokenUtils.createToken(secretKey, PROXY_USER, Optional.empty());
super.resetConfig();
authConf.setAllowAutoTopicCreation(true);
conf.setKafkaNamespace(NAMESPACE);
conf.setKafkaTransactionCoordinatorEnabled(true);
conf.setProxyRoles(Sets.newHashSet(PROXY_USER));
conf.setKopAllowedNamespaces(Collections.singleton(KafkaServiceConfiguration.TENANT_PLACEHOLDER + "/" + NAMESPACE));
((KafkaServiceConfiguration) conf).setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
((KafkaServiceConfiguration) conf).setKafkaMetadataTenant("DONT-USE-ME");
((KafkaServiceConfiguration) conf).setKafkaMetadataNamespace("__kafka");
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER, PROXY_USER));
conf.setAuthenticationProviders(Sets.newHashSet("org.apache.pulsar.broker.authentication." + "AuthenticationProviderToken"));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
super.internalSetup();
admin.tenants().createTenant(TENANT1, TenantInfo.builder().adminRoles(Collections.singleton(USER1)).allowedClusters(Collections.singleton(configClusterName)).build());
admin.namespaces().createNamespace(TENANT1 + "/" + NAMESPACE);
admin.namespaces().createNamespace(TENANT1 + "/" + NAMESPACE_NON_DEFAULT);
admin.tenants().createTenant(TENANT2, TenantInfo.builder().adminRoles(Collections.singleton(USER2)).allowedClusters(Collections.singleton(configClusterName)).build());
admin.namespaces().createNamespace(TENANT2 + "/" + NAMESPACE);
admin.namespaces().createNamespace(TENANT2 + "/" + NAMESPACE_NON_DEFAULT);
}
use of org.apache.pulsar.broker.authentication.AuthenticationProviderToken in project starlight-for-kafka by datastax.
the class KafkaAuthorizationTestBase method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
// this is for SchemaRegistry testing with Authentication and Authorization
enableSchemaRegistry = true;
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
userToken = AuthTokenUtils.createToken(secretKey, SIMPLE_USER, Optional.empty());
adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
anotherToken = AuthTokenUtils.createToken(secretKey, ANOTHER_USER, Optional.empty());
proxyToken = AuthTokenUtils.createToken(secretKey, PROXY_USER, Optional.empty());
boolean originalKafkaEnableMultiTenantMetadata = conf.isKafkaEnableMultiTenantMetadata();
super.resetConfig();
conf.setProxyRoles(Sets.newHashSet(PROXY_USER));
conf.setKafkaEnableMultiTenantMetadata(originalKafkaEnableMultiTenantMetadata);
conf.setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
conf.setKafkaMetadataTenant("internal");
conf.setKafkaMetadataNamespace("__kafka");
conf.setKafkaTenant(TENANT);
conf.setKafkaNamespace(NAMESPACE);
conf.setKopSchemaRegistryNamespace(NAMESPACE);
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER, PROXY_USER));
conf.setAuthenticationProviders(Sets.newHashSet(AuthenticationProviderToken.class.getName()));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
super.internalSetup();
admin.namespaces().setNamespaceReplicationClusters(TENANT + "/" + NAMESPACE, Sets.newHashSet(super.configClusterName));
admin.topics().createPartitionedTopic(TOPIC, 1);
admin.namespaces().grantPermissionOnNamespace(TENANT + "/" + NAMESPACE, SIMPLE_USER, Sets.newHashSet(AuthAction.consume, AuthAction.produce));
}
use of org.apache.pulsar.broker.authentication.AuthenticationProviderToken in project kop by streamnative.
the class SaslPlainEndToEndTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
String adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
userToken = AuthTokenUtils.createToken(secretKey, SIMPLE_USER, Optional.empty());
anotherToken = AuthTokenUtils.createToken(secretKey, ANOTHER_USER, Optional.empty());
super.resetConfig();
conf.setKopAllowedNamespaces(Collections.singleton(TENANT + "/" + NAMESPACE));
((KafkaServiceConfiguration) conf).setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
((KafkaServiceConfiguration) conf).setKafkaMetadataTenant("internal");
((KafkaServiceConfiguration) conf).setKafkaMetadataNamespace("__kafka");
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER));
conf.setAuthenticationProviders(Sets.newHashSet("org.apache.pulsar.broker.authentication." + "AuthenticationProviderToken"));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
super.internalSetup();
admin.topics().createPartitionedTopic(TOPIC, 1);
admin.namespaces().grantPermissionOnNamespace(TENANT + "/" + NAMESPACE, SIMPLE_USER, Sets.newHashSet(AuthAction.consume, AuthAction.produce));
}
use of org.apache.pulsar.broker.authentication.AuthenticationProviderToken in project kop by streamnative.
the class KafkaRequestHandlerWithAuthorizationTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
super.resetConfig();
conf.setDefaultNumPartitions(DEFAULT_PARTITION_NUM);
conf.setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
conf.setKafkaMetadataTenant("internal");
conf.setKafkaMetadataNamespace("__kafka");
conf.setKafkaTenant(TENANT);
conf.setKafkaNamespace(NAMESPACE);
conf.setKafkaTransactionCoordinatorEnabled(true);
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER));
conf.setAuthenticationProviders(Sets.newHashSet(AuthenticationProviderToken.class.getName()));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
super.internalSetup();
log.info("success internal setup");
if (!admin.namespaces().getNamespaces(TENANT).contains(TENANT + "/__kafka")) {
admin.namespaces().createNamespace(TENANT + "/__kafka");
admin.namespaces().setNamespaceReplicationClusters(TENANT + "/__kafka", Sets.newHashSet("test"));
admin.namespaces().setRetention(TENANT + "/__kafka", new RetentionPolicies(-1, -1));
}
admin.topics().createPartitionedTopic(TOPIC, DEFAULT_PARTITION_NUM);
log.info("created namespaces, init handler");
handler = newRequestHandler();
ChannelHandlerContext mockCtx = mock(ChannelHandlerContext.class);
Channel mockChannel = mock(Channel.class);
doReturn(mockChannel).when(mockCtx).channel();
handler.ctx = mockCtx;
serviceAddress = new InetSocketAddress(pulsar.getBindAddress(), kafkaBrokerPort);
// Make sure group coordinator already handle immigration
handleGroupImmigration();
}
use of org.apache.pulsar.broker.authentication.AuthenticationProviderToken in project kop by streamnative.
the class DelayAuthorizationFailedCloseTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
String adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
super.resetConfig();
conf.setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
conf.setKafkaMetadataTenant("internal");
conf.setKafkaMetadataNamespace("__kafka");
conf.setKafkaTenant(TENANT);
conf.setKafkaNamespace(NAMESPACE);
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER));
conf.setAuthenticationProviders(Sets.newHashSet(AuthenticationProviderToken.class.getName()));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
conf.setFailedAuthenticationDelayMs(FAILED_AUTHENTICATION_DELAY_MS);
super.internalSetup();
log.info("success internal setup");
if (!admin.namespaces().getNamespaces(TENANT).contains(TENANT + "/__kafka")) {
admin.namespaces().createNamespace(TENANT + "/__kafka");
admin.namespaces().setNamespaceReplicationClusters(TENANT + "/__kafka", Sets.newHashSet("test"));
admin.namespaces().setRetention(TENANT + "/__kafka", new RetentionPolicies(-1, -1));
}
log.info("created namespaces, init handler");
time = Time.SYSTEM;
Metrics metrics = new Metrics(time);
ProducerConfiguration producerConfiguration = producerConfiguration();
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(new ProducerConfig(producerConfiguration.toProperties()));
String clientId = "clientId";
selector = new Selector(DEFAULT_CONNECTION_MAX_IDLE_MS, metrics, time, "test-selector", channelBuilder, new LogContext(String.format("[Test Selector clientId=%s] ", clientId)));
}
Aggregations