use of org.apache.kafka.common.requests.ApiVersionsResponse in project kafka by apache.
the class SaslAuthenticatorTest method startServerWithoutSaslAuthenticateHeader.
private NioEchoServer startServerWithoutSaslAuthenticateHeader(final SecurityProtocol securityProtocol, String saslMechanism) throws Exception {
final ListenerName listenerName = ListenerName.forSecurityProtocol(securityProtocol);
final Map<String, ?> configs = Collections.emptyMap();
final JaasContext jaasContext = JaasContext.loadServerContext(listenerName, saslMechanism, configs);
final Map<String, JaasContext> jaasContexts = Collections.singletonMap(saslMechanism, jaasContext);
boolean isScram = ScramMechanism.isScram(saslMechanism);
if (isScram)
ScramCredentialUtils.createCache(credentialCache, Arrays.asList(saslMechanism));
Supplier<ApiVersionsResponse> apiVersionSupplier = () -> {
ApiVersionsResponse defaultApiVersionResponse = ApiVersionsResponse.defaultApiVersionsResponse(ApiMessageType.ListenerType.ZK_BROKER);
ApiVersionCollection apiVersions = new ApiVersionCollection();
for (ApiVersion apiVersion : defaultApiVersionResponse.data().apiKeys()) {
if (apiVersion.apiKey() != ApiKeys.SASL_AUTHENTICATE.id) {
// ApiVersion can NOT be reused in second ApiVersionCollection
// due to the internal pointers it contains.
apiVersions.add(apiVersion.duplicate());
}
}
ApiVersionsResponseData data = new ApiVersionsResponseData().setErrorCode(Errors.NONE.code()).setThrottleTimeMs(0).setApiKeys(apiVersions);
return new ApiVersionsResponse(data);
};
SaslChannelBuilder serverChannelBuilder = new SaslChannelBuilder(Mode.SERVER, jaasContexts, securityProtocol, listenerName, false, saslMechanism, true, credentialCache, null, null, time, new LogContext(), apiVersionSupplier) {
@Override
protected SaslServerAuthenticator buildServerAuthenticator(Map<String, ?> configs, Map<String, AuthenticateCallbackHandler> callbackHandlers, String id, TransportLayer transportLayer, Map<String, Subject> subjects, Map<String, Long> connectionsMaxReauthMsByMechanism, ChannelMetadataRegistry metadataRegistry) {
return new SaslServerAuthenticator(configs, callbackHandlers, id, subjects, null, listenerName, securityProtocol, transportLayer, connectionsMaxReauthMsByMechanism, metadataRegistry, time, apiVersionSupplier) {
@Override
protected void enableKafkaSaslAuthenticateHeaders(boolean flag) {
// Don't enable Kafka SASL_AUTHENTICATE headers
}
};
}
};
serverChannelBuilder.configure(saslServerConfigs);
server = new NioEchoServer(listenerName, securityProtocol, new TestSecurityConfig(saslServerConfigs), "localhost", serverChannelBuilder, credentialCache, time);
server.start();
return server;
}
use of org.apache.kafka.common.requests.ApiVersionsResponse in project kafka by apache.
the class SaslAuthenticatorTest method createClientConnectionWithoutSaslAuthenticateHeader.
private void createClientConnectionWithoutSaslAuthenticateHeader(final SecurityProtocol securityProtocol, final String saslMechanism, String node) throws Exception {
final ListenerName listenerName = ListenerName.forSecurityProtocol(securityProtocol);
final Map<String, ?> configs = Collections.emptyMap();
final JaasContext jaasContext = JaasContext.loadClientContext(configs);
final Map<String, JaasContext> jaasContexts = Collections.singletonMap(saslMechanism, jaasContext);
SaslChannelBuilder clientChannelBuilder = new SaslChannelBuilder(Mode.CLIENT, jaasContexts, securityProtocol, listenerName, false, saslMechanism, true, null, null, null, time, new LogContext(), null) {
@Override
protected SaslClientAuthenticator buildClientAuthenticator(Map<String, ?> configs, AuthenticateCallbackHandler callbackHandler, String id, String serverHost, String servicePrincipal, TransportLayer transportLayer, Subject subject) {
return new SaslClientAuthenticator(configs, callbackHandler, id, subject, servicePrincipal, serverHost, saslMechanism, true, transportLayer, time, new LogContext()) {
@Override
protected SaslHandshakeRequest createSaslHandshakeRequest(short version) {
return buildSaslHandshakeRequest(saslMechanism, (short) 0);
}
@Override
protected void setSaslAuthenticateAndHandshakeVersions(ApiVersionsResponse apiVersionsResponse) {
// Don't set version so that headers are disabled
}
};
}
};
clientChannelBuilder.configure(saslClientConfigs);
this.selector = NetworkTestUtils.createSelector(clientChannelBuilder, time);
InetSocketAddress addr = new InetSocketAddress("localhost", server.port());
selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE);
}
use of org.apache.kafka.common.requests.ApiVersionsResponse in project kafka by apache.
the class SaslAuthenticatorTest method sendVersionRequestReceiveResponse.
private ApiVersionsResponse sendVersionRequestReceiveResponse(String node) throws Exception {
ApiVersionsRequest handshakeRequest = createApiVersionsRequestV0();
ApiVersionsResponse response = (ApiVersionsResponse) sendKafkaRequestReceiveResponse(node, ApiKeys.API_VERSIONS, handshakeRequest);
assertEquals(Errors.NONE.code(), response.data().errorCode());
return response;
}
use of org.apache.kafka.common.requests.ApiVersionsResponse in project kafka by apache.
the class SaslAuthenticatorTest method testValidApiVersionsRequest.
/**
* Tests that valid ApiVersionRequest is handled by the server correctly and
* returns an NONE error.
*/
@Test
public void testValidApiVersionsRequest() throws Exception {
short handshakeVersion = ApiKeys.SASL_HANDSHAKE.latestVersion();
SecurityProtocol securityProtocol = SecurityProtocol.SASL_PLAINTEXT;
configureMechanisms("PLAIN", Arrays.asList("PLAIN"));
server = createEchoServer(securityProtocol);
// Send ApiVersionsRequest with valid version and validate error response.
String node = "1";
short version = ApiKeys.API_VERSIONS.latestVersion();
createClientConnection(SecurityProtocol.PLAINTEXT, node);
RequestHeader header = new RequestHeader(ApiKeys.API_VERSIONS, version, "someclient", 1);
ApiVersionsRequest request = new ApiVersionsRequest.Builder().build(version);
selector.send(new NetworkSend(node, request.toSend(header)));
ByteBuffer responseBuffer = waitForResponse();
ResponseHeader.parse(responseBuffer, ApiKeys.API_VERSIONS.responseHeaderVersion(version));
ApiVersionsResponse response = ApiVersionsResponse.parse(responseBuffer, version);
assertEquals(Errors.NONE.code(), response.data().errorCode());
// Test that client can authenticate successfully
sendHandshakeRequestReceiveResponse(node, handshakeVersion);
authenticateUsingSaslPlainAndCheckConnection(node, handshakeVersion > 0);
}
use of org.apache.kafka.common.requests.ApiVersionsResponse in project kafka by apache.
the class SenderTest method testQuotaMetrics.
/*
* Send multiple requests. Verify that the client side quota metrics have the right values
*/
@SuppressWarnings("deprecation")
@Test
public void testQuotaMetrics() {
MockSelector selector = new MockSelector(time);
Sensor throttleTimeSensor = Sender.throttleTimeSensor(this.senderMetricsRegistry);
Cluster cluster = TestUtils.singletonCluster("test", 1);
Node node = cluster.nodes().get(0);
NetworkClient client = new NetworkClient(selector, metadata, "mock", Integer.MAX_VALUE, 1000, 1000, 64 * 1024, 64 * 1024, 1000, 10 * 1000, 127 * 1000, time, true, new ApiVersions(), throttleTimeSensor, logContext);
ApiVersionsResponse apiVersionsResponse = ApiVersionsResponse.defaultApiVersionsResponse(400, ApiMessageType.ListenerType.ZK_BROKER);
ByteBuffer buffer = RequestTestUtils.serializeResponseWithHeader(apiVersionsResponse, ApiKeys.API_VERSIONS.latestVersion(), 0);
selector.delayedReceive(new DelayedReceive(node.idString(), new NetworkReceive(node.idString(), buffer)));
while (!client.ready(node, time.milliseconds())) {
client.poll(1, time.milliseconds());
// If a throttled response is received, advance the time to ensure progress.
time.sleep(client.throttleDelayMs(node, time.milliseconds()));
}
selector.clear();
for (int i = 1; i <= 3; i++) {
int throttleTimeMs = 100 * i;
ProduceRequest.Builder builder = ProduceRequest.forCurrentMagic(new ProduceRequestData().setTopicData(new ProduceRequestData.TopicProduceDataCollection()).setAcks((short) 1).setTimeoutMs(1000));
ClientRequest request = client.newClientRequest(node.idString(), builder, time.milliseconds(), true);
client.send(request, time.milliseconds());
client.poll(1, time.milliseconds());
ProduceResponse response = produceResponse(tp0, i, Errors.NONE, throttleTimeMs);
buffer = RequestTestUtils.serializeResponseWithHeader(response, ApiKeys.PRODUCE.latestVersion(), request.correlationId());
selector.completeReceive(new NetworkReceive(node.idString(), buffer));
client.poll(1, time.milliseconds());
// If a throttled response is received, advance the time to ensure progress.
time.sleep(client.throttleDelayMs(node, time.milliseconds()));
selector.clear();
}
Map<MetricName, KafkaMetric> allMetrics = metrics.metrics();
KafkaMetric avgMetric = allMetrics.get(this.senderMetricsRegistry.produceThrottleTimeAvg);
KafkaMetric maxMetric = allMetrics.get(this.senderMetricsRegistry.produceThrottleTimeMax);
// Throttle times are ApiVersions=400, Produce=(100, 200, 300)
assertEquals(250, (Double) avgMetric.metricValue(), EPS);
assertEquals(400, (Double) maxMetric.metricValue(), EPS);
client.close();
}
Aggregations