use of io.streamnative.kafka.client.api.ProducerConfiguration in project starlight-for-kafka by datastax.
the class KafkaIdleConnectionTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
conf.setConnectionMaxIdleMs(DEFAULT_BROKER_CONNECTION_MAX_IDLE_MS);
super.internalSetup();
time = Time.SYSTEM;
Metrics metrics = new Metrics(time);
ProducerConfiguration producerConfiguration = ProducerConfiguration.builder().bootstrapServers("localhost:" + getKafkaBrokerPort()).keySerializer(KafkaVersion.DEFAULT.getStringSerializer()).valueSerializer(KafkaVersion.DEFAULT.getStringSerializer()).build();
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(new ProducerConfig(producerConfiguration.toProperties()));
String clientId = "clientId";
selector = new Selector(DEFAULT_CONNECTION_MAX_IDLE_MS, metrics, time, "test-selector", channelBuilder, new LogContext(String.format("[Test Selector clientId=%s] ", clientId)));
}
use of io.streamnative.kafka.client.api.ProducerConfiguration in project kop by streamnative.
the class DelayAuthorizationFailedCloseTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
String adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
super.resetConfig();
conf.setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
conf.setKafkaMetadataTenant("internal");
conf.setKafkaMetadataNamespace("__kafka");
conf.setKafkaTenant(TENANT);
conf.setKafkaNamespace(NAMESPACE);
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER));
conf.setAuthenticationProviders(Sets.newHashSet(AuthenticationProviderToken.class.getName()));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
conf.setFailedAuthenticationDelayMs(FAILED_AUTHENTICATION_DELAY_MS);
super.internalSetup();
log.info("success internal setup");
if (!admin.namespaces().getNamespaces(TENANT).contains(TENANT + "/__kafka")) {
admin.namespaces().createNamespace(TENANT + "/__kafka");
admin.namespaces().setNamespaceReplicationClusters(TENANT + "/__kafka", Sets.newHashSet("test"));
admin.namespaces().setRetention(TENANT + "/__kafka", new RetentionPolicies(-1, -1));
}
log.info("created namespaces, init handler");
time = Time.SYSTEM;
Metrics metrics = new Metrics(time);
ProducerConfiguration producerConfiguration = producerConfiguration();
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(new ProducerConfig(producerConfiguration.toProperties()));
String clientId = "clientId";
selector = new Selector(DEFAULT_CONNECTION_MAX_IDLE_MS, metrics, time, "test-selector", channelBuilder, new LogContext(String.format("[Test Selector clientId=%s] ", clientId)));
}
use of io.streamnative.kafka.client.api.ProducerConfiguration in project starlight-for-kafka by datastax.
the class DelayAuthorizationFailedCloseTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
SecretKey secretKey = AuthTokenUtils.createSecretKey(SignatureAlgorithm.HS256);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
Properties properties = new Properties();
properties.setProperty("tokenSecretKey", AuthTokenUtils.encodeKeyBase64(secretKey));
ServiceConfiguration authConf = new ServiceConfiguration();
authConf.setProperties(properties);
provider.initialize(authConf);
String adminToken = AuthTokenUtils.createToken(secretKey, ADMIN_USER, Optional.empty());
super.resetConfig();
conf.setSaslAllowedMechanisms(Sets.newHashSet("PLAIN"));
conf.setKafkaMetadataTenant("internal");
conf.setKafkaMetadataNamespace("__kafka");
conf.setKafkaTenant(TENANT);
conf.setKafkaNamespace(NAMESPACE);
conf.setClusterName(super.configClusterName);
conf.setAuthorizationEnabled(true);
conf.setAuthenticationEnabled(true);
conf.setAuthorizationAllowWildcardsMatching(true);
conf.setSuperUserRoles(Sets.newHashSet(ADMIN_USER));
conf.setAuthenticationProviders(Sets.newHashSet(AuthenticationProviderToken.class.getName()));
conf.setBrokerClientAuthenticationPlugin(AuthenticationToken.class.getName());
conf.setBrokerClientAuthenticationParameters("token:" + adminToken);
conf.setProperties(properties);
conf.setFailedAuthenticationDelayMs(FAILED_AUTHENTICATION_DELAY_MS);
super.internalSetup();
log.info("success internal setup");
if (!admin.namespaces().getNamespaces(TENANT).contains(TENANT + "/__kafka")) {
admin.namespaces().createNamespace(TENANT + "/__kafka");
admin.namespaces().setNamespaceReplicationClusters(TENANT + "/__kafka", Sets.newHashSet("test"));
admin.namespaces().setRetention(TENANT + "/__kafka", new RetentionPolicies(-1, -1));
}
log.info("created namespaces, init handler");
time = Time.SYSTEM;
Metrics metrics = new Metrics(time);
ProducerConfiguration producerConfiguration = producerConfiguration();
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(new ProducerConfig(producerConfiguration.toProperties()));
String clientId = "clientId";
selector = new Selector(DEFAULT_CONNECTION_MAX_IDLE_MS, metrics, time, "test-selector", channelBuilder, new LogContext(String.format("[Test Selector clientId=%s] ", clientId)));
}
use of io.streamnative.kafka.client.api.ProducerConfiguration in project kop by streamnative.
the class KafkaIdleConnectionTest method setup.
@BeforeClass
@Override
protected void setup() throws Exception {
conf.setConnectionMaxIdleMs(DEFAULT_BROKER_CONNECTION_MAX_IDLE_MS);
super.internalSetup();
time = Time.SYSTEM;
Metrics metrics = new Metrics(time);
ProducerConfiguration producerConfiguration = ProducerConfiguration.builder().bootstrapServers("localhost:" + getKafkaBrokerPort()).keySerializer(KafkaVersion.DEFAULT.getStringSerializer()).valueSerializer(KafkaVersion.DEFAULT.getStringSerializer()).build();
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(new ProducerConfig(producerConfiguration.toProperties()));
String clientId = "clientId";
selector = new Selector(DEFAULT_CONNECTION_MAX_IDLE_MS, metrics, time, "test-selector", channelBuilder, new LogContext(String.format("[Test Selector clientId=%s] ", clientId)));
}
Aggregations