use of org.apache.kafka.common.security.TestSecurityConfig in project kafka by apache.
the class SslTransportLayerTest method testClientEndpointNotValidated.
/**
* According to RFC 2818:
* <blockquote>Typically, the server has no external knowledge of what the client's
* identity ought to be and so checks (other than that the client has a
* certificate chain rooted in an appropriate CA) are not possible. If a
* server has such knowledge (typically from some source external to
* HTTP or TLS) it SHOULD check the identity as described above.</blockquote>
*
* However, Java SSL engine does not perform any endpoint validation for client IP address.
* Hence it is safe to avoid reverse DNS lookup while creating the SSL engine. This test checks
* that client validation does not fail even if the client certificate has an invalid hostname.
* This test is to ensure that if client endpoint validation is added to Java in future, we can detect
* and update Kafka SSL code to enable validation on the server-side and provide hostname if required.
*/
@ParameterizedTest
@ArgumentsSource(SslTransportLayerArgumentsProvider.class)
public void testClientEndpointNotValidated(Args args) throws Exception {
String node = "0";
// Create client certificate with an invalid hostname
args.clientCertStores = certBuilder(false, "non-existent.com", args.useInlinePem).build();
args.serverCertStores = certBuilder(true, "localhost", args.useInlinePem).build();
args.sslServerConfigs = args.getTrustingConfig(args.serverCertStores, args.clientCertStores);
args.sslClientConfigs = args.getTrustingConfig(args.clientCertStores, args.serverCertStores);
// Create a server with endpoint validation enabled on the server SSL engine
SslChannelBuilder serverChannelBuilder = new TestSslChannelBuilder(Mode.SERVER) {
@Override
protected TestSslTransportLayer newTransportLayer(String id, SelectionKey key, SSLEngine sslEngine) throws IOException {
SSLParameters sslParams = sslEngine.getSSLParameters();
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
sslEngine.setSSLParameters(sslParams);
return super.newTransportLayer(id, key, sslEngine);
}
};
serverChannelBuilder.configure(args.sslServerConfigs);
server = new NioEchoServer(ListenerName.forSecurityProtocol(SecurityProtocol.SSL), SecurityProtocol.SSL, new TestSecurityConfig(args.sslServerConfigs), "localhost", serverChannelBuilder, null, time);
server.start();
createSelector(args.sslClientConfigs);
InetSocketAddress addr = new InetSocketAddress("localhost", server.port());
selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE);
NetworkTestUtils.checkClientConnection(selector, node, 100, 10);
}
use of org.apache.kafka.common.security.TestSecurityConfig in project kafka by apache.
the class ChannelBuildersTest method testChannelBuilderConfigs.
@Test
public void testChannelBuilderConfigs() {
Properties props = new Properties();
props.put("listener.name.listener1.gssapi.sasl.kerberos.service.name", "testkafka");
props.put("listener.name.listener1.sasl.kerberos.service.name", "testkafkaglobal");
props.put("plain.sasl.server.callback.handler.class", "callback");
props.put("listener.name.listener1.gssapi.config1.key", "custom.config1");
props.put("custom.config2.key", "custom.config2");
TestSecurityConfig securityConfig = new TestSecurityConfig(props);
// test configs with listener prefix
Map<String, Object> configs = ChannelBuilders.channelBuilderConfigs(securityConfig, new ListenerName("listener1"));
assertNull(configs.get("listener.name.listener1.gssapi.sasl.kerberos.service.name"));
assertFalse(securityConfig.unused().contains("listener.name.listener1.gssapi.sasl.kerberos.service.name"));
assertEquals(configs.get("gssapi.sasl.kerberos.service.name"), "testkafka");
assertFalse(securityConfig.unused().contains("gssapi.sasl.kerberos.service.name"));
assertEquals(configs.get("sasl.kerberos.service.name"), "testkafkaglobal");
assertFalse(securityConfig.unused().contains("sasl.kerberos.service.name"));
assertNull(configs.get("listener.name.listener1.sasl.kerberos.service.name"));
assertFalse(securityConfig.unused().contains("listener.name.listener1.sasl.kerberos.service.name"));
assertNull(configs.get("plain.sasl.server.callback.handler.class"));
assertFalse(securityConfig.unused().contains("plain.sasl.server.callback.handler.class"));
assertEquals(configs.get("listener.name.listener1.gssapi.config1.key"), "custom.config1");
assertFalse(securityConfig.unused().contains("listener.name.listener1.gssapi.config1.key"));
assertEquals(configs.get("custom.config2.key"), "custom.config2");
assertFalse(securityConfig.unused().contains("custom.config2.key"));
// test configs without listener prefix
securityConfig = new TestSecurityConfig(props);
configs = ChannelBuilders.channelBuilderConfigs(securityConfig, null);
assertEquals(configs.get("listener.name.listener1.gssapi.sasl.kerberos.service.name"), "testkafka");
assertFalse(securityConfig.unused().contains("listener.name.listener1.gssapi.sasl.kerberos.service.name"));
assertNull(configs.get("gssapi.sasl.kerberos.service.name"));
assertFalse(securityConfig.unused().contains("gssapi.sasl.kerberos.service.name"));
assertEquals(configs.get("listener.name.listener1.sasl.kerberos.service.name"), "testkafkaglobal");
assertFalse(securityConfig.unused().contains("listener.name.listener1.sasl.kerberos.service.name"));
assertNull(configs.get("sasl.kerberos.service.name"));
assertFalse(securityConfig.unused().contains("sasl.kerberos.service.name"));
assertEquals(configs.get("plain.sasl.server.callback.handler.class"), "callback");
assertFalse(securityConfig.unused().contains("plain.sasl.server.callback.handler.class"));
assertEquals(configs.get("listener.name.listener1.gssapi.config1.key"), "custom.config1");
assertFalse(securityConfig.unused().contains("listener.name.listener1.gssapi.config1.key"));
assertEquals(configs.get("custom.config2.key"), "custom.config2");
assertFalse(securityConfig.unused().contains("custom.config2.key"));
}
use of org.apache.kafka.common.security.TestSecurityConfig in project kafka by apache.
the class SaslAuthenticatorTest method startServerApiVersionsUnsupportedByClient.
private NioEchoServer startServerApiVersionsUnsupportedByClient(final SecurityProtocol securityProtocol, String saslMechanism) throws Exception {
final ListenerName listenerName = ListenerName.forSecurityProtocol(securityProtocol);
final Map<String, ?> configs = Collections.emptyMap();
final JaasContext jaasContext = JaasContext.loadServerContext(listenerName, saslMechanism, configs);
final Map<String, JaasContext> jaasContexts = Collections.singletonMap(saslMechanism, jaasContext);
boolean isScram = ScramMechanism.isScram(saslMechanism);
if (isScram)
ScramCredentialUtils.createCache(credentialCache, Arrays.asList(saslMechanism));
Supplier<ApiVersionsResponse> apiVersionSupplier = () -> {
ApiVersionCollection versionCollection = new ApiVersionCollection(2);
versionCollection.add(new ApiVersion().setApiKey(ApiKeys.SASL_HANDSHAKE.id).setMinVersion((short) 0).setMaxVersion((short) 100));
versionCollection.add(new ApiVersion().setApiKey(ApiKeys.SASL_AUTHENTICATE.id).setMinVersion((short) 0).setMaxVersion((short) 100));
return new ApiVersionsResponse(new ApiVersionsResponseData().setApiKeys(versionCollection));
};
SaslChannelBuilder serverChannelBuilder = new SaslChannelBuilder(Mode.SERVER, jaasContexts, securityProtocol, listenerName, false, saslMechanism, true, credentialCache, null, null, time, new LogContext(), apiVersionSupplier);
serverChannelBuilder.configure(saslServerConfigs);
server = new NioEchoServer(listenerName, securityProtocol, new TestSecurityConfig(saslServerConfigs), "localhost", serverChannelBuilder, credentialCache, time);
server.start();
return server;
}
use of org.apache.kafka.common.security.TestSecurityConfig in project kafka by apache.
the class SaslAuthenticatorTest method startServerWithoutSaslAuthenticateHeader.
private NioEchoServer startServerWithoutSaslAuthenticateHeader(final SecurityProtocol securityProtocol, String saslMechanism) throws Exception {
final ListenerName listenerName = ListenerName.forSecurityProtocol(securityProtocol);
final Map<String, ?> configs = Collections.emptyMap();
final JaasContext jaasContext = JaasContext.loadServerContext(listenerName, saslMechanism, configs);
final Map<String, JaasContext> jaasContexts = Collections.singletonMap(saslMechanism, jaasContext);
boolean isScram = ScramMechanism.isScram(saslMechanism);
if (isScram)
ScramCredentialUtils.createCache(credentialCache, Arrays.asList(saslMechanism));
Supplier<ApiVersionsResponse> apiVersionSupplier = () -> {
ApiVersionsResponse defaultApiVersionResponse = ApiVersionsResponse.defaultApiVersionsResponse(ApiMessageType.ListenerType.ZK_BROKER);
ApiVersionCollection apiVersions = new ApiVersionCollection();
for (ApiVersion apiVersion : defaultApiVersionResponse.data().apiKeys()) {
if (apiVersion.apiKey() != ApiKeys.SASL_AUTHENTICATE.id) {
// ApiVersion can NOT be reused in second ApiVersionCollection
// due to the internal pointers it contains.
apiVersions.add(apiVersion.duplicate());
}
}
ApiVersionsResponseData data = new ApiVersionsResponseData().setErrorCode(Errors.NONE.code()).setThrottleTimeMs(0).setApiKeys(apiVersions);
return new ApiVersionsResponse(data);
};
SaslChannelBuilder serverChannelBuilder = new SaslChannelBuilder(Mode.SERVER, jaasContexts, securityProtocol, listenerName, false, saslMechanism, true, credentialCache, null, null, time, new LogContext(), apiVersionSupplier) {
@Override
protected SaslServerAuthenticator buildServerAuthenticator(Map<String, ?> configs, Map<String, AuthenticateCallbackHandler> callbackHandlers, String id, TransportLayer transportLayer, Map<String, Subject> subjects, Map<String, Long> connectionsMaxReauthMsByMechanism, ChannelMetadataRegistry metadataRegistry) {
return new SaslServerAuthenticator(configs, callbackHandlers, id, subjects, null, listenerName, securityProtocol, transportLayer, connectionsMaxReauthMsByMechanism, metadataRegistry, time, apiVersionSupplier) {
@Override
protected void enableKafkaSaslAuthenticateHeaders(boolean flag) {
// Don't enable Kafka SASL_AUTHENTICATE headers
}
};
}
};
serverChannelBuilder.configure(saslServerConfigs);
server = new NioEchoServer(listenerName, securityProtocol, new TestSecurityConfig(saslServerConfigs), "localhost", serverChannelBuilder, credentialCache, time);
server.start();
return server;
}
use of org.apache.kafka.common.security.TestSecurityConfig in project kafka by apache.
the class SaslAuthenticatorTest method testCannotReauthenticateWithDifferentMechanism.
/**
* Re-authentication must fail if mechanism changes
*/
@Test
public void testCannotReauthenticateWithDifferentMechanism() throws Exception {
String node = "0";
SecurityProtocol securityProtocol = SecurityProtocol.SASL_SSL;
configureMechanisms("DIGEST-MD5", Arrays.asList("DIGEST-MD5", "PLAIN"));
configureDigestMd5ServerCallback(securityProtocol);
server = createEchoServer(securityProtocol);
String saslMechanism = (String) saslClientConfigs.get(SaslConfigs.SASL_MECHANISM);
Map<String, ?> configs = new TestSecurityConfig(saslClientConfigs).values();
this.channelBuilder = new AlternateSaslChannelBuilder(Mode.CLIENT, Collections.singletonMap(saslMechanism, JaasContext.loadClientContext(configs)), securityProtocol, null, false, saslMechanism, true, credentialCache, null, time);
this.channelBuilder.configure(configs);
// initial authentication must succeed
this.selector = NetworkTestUtils.createSelector(channelBuilder, time);
InetSocketAddress addr = new InetSocketAddress("localhost", server.port());
selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE);
checkClientConnection(node);
// ensure metrics are as expected before trying to re-authenticate
server.verifyAuthenticationMetrics(1, 0);
server.verifyReauthenticationMetrics(0, 0);
/*
* Now re-authenticate with a different mechanism and ensure it fails. We have
* to sleep long enough so that the next write will trigger a re-authentication.
*/
delay((long) (CONNECTIONS_MAX_REAUTH_MS_VALUE * 1.1));
assertThrows(AssertionFailedError.class, () -> checkClientConnection(node));
server.verifyAuthenticationMetrics(1, 0);
server.verifyReauthenticationMetrics(0, 1);
}
Aggregations