use of org.apache.kafka.common.network.DefaultChannelMetadataRegistry in project kafka by apache.
the class SaslServerAuthenticatorTest method testOversizeRequest.
@Test
public void testOversizeRequest() throws IOException {
TransportLayer transportLayer = mock(TransportLayer.class);
Map<String, ?> configs = Collections.singletonMap(BrokerSecurityConfigs.SASL_ENABLED_MECHANISMS_CONFIG, Collections.singletonList(SCRAM_SHA_256.mechanismName()));
SaslServerAuthenticator authenticator = setupAuthenticator(configs, transportLayer, SCRAM_SHA_256.mechanismName(), new DefaultChannelMetadataRegistry());
when(transportLayer.read(any(ByteBuffer.class))).then(invocation -> {
invocation.<ByteBuffer>getArgument(0).putInt(SaslServerAuthenticator.MAX_RECEIVE_SIZE + 1);
return 4;
});
assertThrows(InvalidReceiveException.class, authenticator::authenticate);
verify(transportLayer).read(any(ByteBuffer.class));
}
use of org.apache.kafka.common.network.DefaultChannelMetadataRegistry in project kafka by apache.
the class SaslServerAuthenticatorTest method testApiVersionsRequest.
private void testApiVersionsRequest(short version, String expectedSoftwareName, String expectedSoftwareVersion) throws IOException {
TransportLayer transportLayer = mock(TransportLayer.class, Answers.RETURNS_DEEP_STUBS);
Map<String, ?> configs = Collections.singletonMap(BrokerSecurityConfigs.SASL_ENABLED_MECHANISMS_CONFIG, Collections.singletonList(SCRAM_SHA_256.mechanismName()));
ChannelMetadataRegistry metadataRegistry = new DefaultChannelMetadataRegistry();
SaslServerAuthenticator authenticator = setupAuthenticator(configs, transportLayer, SCRAM_SHA_256.mechanismName(), metadataRegistry);
RequestHeader header = new RequestHeader(ApiKeys.API_VERSIONS, version, "clientId", 0);
ByteBuffer headerBuffer = RequestTestUtils.serializeRequestHeader(header);
ApiVersionsRequest request = new ApiVersionsRequest.Builder().build(version);
ByteBuffer requestBuffer = request.serialize();
requestBuffer.rewind();
when(transportLayer.socketChannel().socket().getInetAddress()).thenReturn(InetAddress.getLoopbackAddress());
when(transportLayer.read(any(ByteBuffer.class))).then(invocation -> {
invocation.<ByteBuffer>getArgument(0).putInt(headerBuffer.remaining() + requestBuffer.remaining());
return 4;
}).then(invocation -> {
invocation.<ByteBuffer>getArgument(0).put(headerBuffer.duplicate()).put(requestBuffer.duplicate());
return headerBuffer.remaining() + requestBuffer.remaining();
});
authenticator.authenticate();
assertEquals(expectedSoftwareName, metadataRegistry.clientInformation().softwareName());
assertEquals(expectedSoftwareVersion, metadataRegistry.clientInformation().softwareVersion());
verify(transportLayer, times(2)).read(any(ByteBuffer.class));
}
use of org.apache.kafka.common.network.DefaultChannelMetadataRegistry in project kafka by apache.
the class SaslServerAuthenticatorTest method testUnexpectedRequestType.
@Test
public void testUnexpectedRequestType() throws IOException {
TransportLayer transportLayer = mock(TransportLayer.class);
Map<String, ?> configs = Collections.singletonMap(BrokerSecurityConfigs.SASL_ENABLED_MECHANISMS_CONFIG, Collections.singletonList(SCRAM_SHA_256.mechanismName()));
SaslServerAuthenticator authenticator = setupAuthenticator(configs, transportLayer, SCRAM_SHA_256.mechanismName(), new DefaultChannelMetadataRegistry());
RequestHeader header = new RequestHeader(ApiKeys.METADATA, (short) 0, "clientId", 13243);
ByteBuffer headerBuffer = RequestTestUtils.serializeRequestHeader(header);
when(transportLayer.read(any(ByteBuffer.class))).then(invocation -> {
invocation.<ByteBuffer>getArgument(0).putInt(headerBuffer.remaining());
return 4;
}).then(invocation -> {
// serialize only the request header. the authenticator should not parse beyond this
invocation.<ByteBuffer>getArgument(0).put(headerBuffer.duplicate());
return headerBuffer.remaining();
});
try {
authenticator.authenticate();
fail("Expected authenticate() to raise an exception");
} catch (IllegalSaslStateException e) {
// expected exception
}
verify(transportLayer, times(2)).read(any(ByteBuffer.class));
}
Aggregations