use of org.apache.kafka.common.security.auth.SecurityProtocol in project apache-kafka-on-k8s by banzaicloud.
the class SaslAuthenticatorTest method testTokenAuthenticationOverSaslScram.
@Test
public void testTokenAuthenticationOverSaslScram() throws Exception {
SecurityProtocol securityProtocol = SecurityProtocol.SASL_SSL;
TestJaasConfig jaasConfig = configureMechanisms("SCRAM-SHA-256", Arrays.asList("SCRAM-SHA-256"));
// create jaas config for token auth
Map<String, Object> options = new HashMap<>();
String tokenId = "token1";
String tokenHmac = "abcdefghijkl";
// tokenId
options.put("username", tokenId);
// token hmac
options.put("password", tokenHmac);
// enable token authentication
options.put(ScramLoginModule.TOKEN_AUTH_CONFIG, "true");
jaasConfig.createOrUpdateEntry(TestJaasConfig.LOGIN_CONTEXT_CLIENT, ScramLoginModule.class.getName(), options);
server = createEchoServer(securityProtocol);
// Check invalid tokenId/tokenInfo in tokenCache
createAndCheckClientConnectionFailure(securityProtocol, "0");
// Check valid token Info and invalid credentials
KafkaPrincipal owner = SecurityUtils.parseKafkaPrincipal("User:Owner");
KafkaPrincipal renewer = SecurityUtils.parseKafkaPrincipal("User:Renewer1");
TokenInformation tokenInfo = new TokenInformation(tokenId, owner, Collections.singleton(renewer), System.currentTimeMillis(), System.currentTimeMillis(), System.currentTimeMillis());
server.tokenCache().addToken(tokenId, tokenInfo);
createAndCheckClientConnectionFailure(securityProtocol, "0");
// Check with valid token Info and credentials
updateTokenCredentialCache(tokenId, tokenHmac);
createAndCheckClientConnection(securityProtocol, "0");
}
use of org.apache.kafka.common.security.auth.SecurityProtocol in project apache-kafka-on-k8s by banzaicloud.
the class SaslAuthenticatorTest method testValidSaslPlainOverPlaintext.
/**
* Tests good path SASL/PLAIN client and server channels using PLAINTEXT transport layer.
*/
@Test
public void testValidSaslPlainOverPlaintext() throws Exception {
String node = "0";
SecurityProtocol securityProtocol = SecurityProtocol.SASL_PLAINTEXT;
configureMechanisms("PLAIN", Arrays.asList("PLAIN"));
server = createEchoServer(securityProtocol);
createAndCheckClientConnection(securityProtocol, node);
server.verifyAuthenticationMetrics(1, 0);
}
use of org.apache.kafka.common.security.auth.SecurityProtocol in project apache-kafka-on-k8s by banzaicloud.
the class SaslAuthenticatorTest method testInvalidUsernameSaslPlain.
/**
* Tests that SASL/PLAIN clients with invalid username fail authentication.
*/
@Test
public void testInvalidUsernameSaslPlain() throws Exception {
String node = "0";
SecurityProtocol securityProtocol = SecurityProtocol.SASL_SSL;
TestJaasConfig jaasConfig = configureMechanisms("PLAIN", Arrays.asList("PLAIN"));
jaasConfig.setClientOptions("PLAIN", "invaliduser", TestJaasConfig.PASSWORD);
server = createEchoServer(securityProtocol);
createAndCheckClientAuthenticationFailure(securityProtocol, node, "PLAIN", "Authentication failed: Invalid username or password");
server.verifyAuthenticationMetrics(0, 1);
}
use of org.apache.kafka.common.security.auth.SecurityProtocol in project apache-kafka-on-k8s by banzaicloud.
the class ClientUtils method createChannelBuilder.
/**
* @param config client configs
* @return configured ChannelBuilder based on the configs.
*/
public static ChannelBuilder createChannelBuilder(AbstractConfig config) {
SecurityProtocol securityProtocol = SecurityProtocol.forName(config.getString(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG));
String clientSaslMechanism = config.getString(SaslConfigs.SASL_MECHANISM);
return ChannelBuilders.clientChannelBuilder(securityProtocol, JaasContext.Type.CLIENT, config, null, clientSaslMechanism, true);
}
use of org.apache.kafka.common.security.auth.SecurityProtocol in project apache-kafka-on-k8s by banzaicloud.
the class RequestResponseTest method createUpdateMetadataRequest.
private UpdateMetadataRequest createUpdateMetadataRequest(int version, String rack) {
Map<TopicPartition, UpdateMetadataRequest.PartitionState> partitionStates = new HashMap<>();
List<Integer> isr = asList(1, 2);
List<Integer> replicas = asList(1, 2, 3, 4);
List<Integer> offlineReplicas = asList();
partitionStates.put(new TopicPartition("topic5", 105), new UpdateMetadataRequest.PartitionState(0, 2, 1, isr, 2, replicas, offlineReplicas));
partitionStates.put(new TopicPartition("topic5", 1), new UpdateMetadataRequest.PartitionState(1, 1, 1, isr, 2, replicas, offlineReplicas));
partitionStates.put(new TopicPartition("topic20", 1), new UpdateMetadataRequest.PartitionState(1, 0, 1, isr, 2, replicas, offlineReplicas));
SecurityProtocol plaintext = SecurityProtocol.PLAINTEXT;
List<UpdateMetadataRequest.EndPoint> endPoints1 = new ArrayList<>();
endPoints1.add(new UpdateMetadataRequest.EndPoint("host1", 1223, plaintext, ListenerName.forSecurityProtocol(plaintext)));
List<UpdateMetadataRequest.EndPoint> endPoints2 = new ArrayList<>();
endPoints2.add(new UpdateMetadataRequest.EndPoint("host1", 1244, plaintext, ListenerName.forSecurityProtocol(plaintext)));
if (version > 0) {
SecurityProtocol ssl = SecurityProtocol.SSL;
endPoints2.add(new UpdateMetadataRequest.EndPoint("host2", 1234, ssl, ListenerName.forSecurityProtocol(ssl)));
endPoints2.add(new UpdateMetadataRequest.EndPoint("host2", 1334, ssl, new ListenerName("CLIENT")));
}
Set<UpdateMetadataRequest.Broker> liveBrokers = Utils.mkSet(new UpdateMetadataRequest.Broker(0, endPoints1, rack), new UpdateMetadataRequest.Broker(1, endPoints2, rack));
return new UpdateMetadataRequest.Builder((short) version, 1, 10, partitionStates, liveBrokers).build();
}
Aggregations