use of org.apache.kafka.common.protocol.ApiKeys in project kafka by apache.
the class SaslClientAuthenticator method handleKafkaResponse.
private void handleKafkaResponse(RequestHeader requestHeader, byte[] responseBytes) {
AbstractResponse response;
ApiKeys apiKey;
try {
response = NetworkClient.parseResponse(ByteBuffer.wrap(responseBytes), requestHeader);
apiKey = ApiKeys.forId(requestHeader.apiKey());
} catch (SchemaException | IllegalArgumentException e) {
LOG.debug("Invalid SASL mechanism response, server may be expecting only GSSAPI tokens");
throw new AuthenticationException("Invalid SASL mechanism response", e);
}
switch(apiKey) {
case SASL_HANDSHAKE:
handleSaslHandshakeResponse((SaslHandshakeResponse) response);
break;
default:
throw new IllegalStateException("Unexpected API key during handshake: " + apiKey);
}
}
use of org.apache.kafka.common.protocol.ApiKeys in project kafka by apache.
the class SaslServerAuthenticator method handleKafkaRequest.
private boolean handleKafkaRequest(byte[] requestBytes) throws IOException, AuthenticationException {
boolean isKafkaRequest = false;
String clientMechanism = null;
try {
ByteBuffer requestBuffer = ByteBuffer.wrap(requestBytes);
RequestHeader requestHeader = RequestHeader.parse(requestBuffer);
ApiKeys apiKey = ApiKeys.forId(requestHeader.apiKey());
// A valid Kafka request header was received. SASL authentication tokens are now expected only
// following a SaslHandshakeRequest since this is not a GSSAPI client token from a Kafka 0.9.0.x client.
setSaslState(SaslState.HANDSHAKE_REQUEST);
isKafkaRequest = true;
if (!Protocol.apiVersionSupported(requestHeader.apiKey(), requestHeader.apiVersion())) {
if (apiKey == ApiKeys.API_VERSIONS)
sendKafkaResponse(ApiVersionsResponse.unsupportedVersionSend(node, requestHeader));
else
throw new UnsupportedVersionException("Version " + requestHeader.apiVersion() + " is not supported for apiKey " + apiKey);
} else {
AbstractRequest request = AbstractRequest.getRequest(requestHeader.apiKey(), requestHeader.apiVersion(), requestBuffer).request;
LOG.debug("Handle Kafka request {}", apiKey);
switch(apiKey) {
case API_VERSIONS:
handleApiVersionsRequest(requestHeader);
break;
case SASL_HANDSHAKE:
clientMechanism = handleHandshakeRequest(requestHeader, (SaslHandshakeRequest) request);
break;
default:
throw new IllegalSaslStateException("Unexpected Kafka request of type " + apiKey + " during SASL handshake.");
}
}
} catch (SchemaException | IllegalArgumentException e) {
if (saslState == SaslState.GSSAPI_OR_HANDSHAKE_REQUEST) {
// starting with 0x60, revert to GSSAPI for both these exceptions.
if (LOG.isDebugEnabled()) {
StringBuilder tokenBuilder = new StringBuilder();
for (byte b : requestBytes) {
tokenBuilder.append(String.format("%02x", b));
if (tokenBuilder.length() >= 20)
break;
}
LOG.debug("Received client packet of length {} starting with bytes 0x{}, process as GSSAPI packet", requestBytes.length, tokenBuilder);
}
if (enabledMechanisms.contains(SaslConfigs.GSSAPI_MECHANISM)) {
LOG.debug("First client packet is not a SASL mechanism request, using default mechanism GSSAPI");
clientMechanism = SaslConfigs.GSSAPI_MECHANISM;
} else
throw new UnsupportedSaslMechanismException("Exception handling first SASL packet from client, GSSAPI is not supported by server", e);
} else
throw e;
}
if (clientMechanism != null) {
createSaslServer(clientMechanism);
setSaslState(SaslState.AUTHENTICATE);
}
return isKafkaRequest;
}
use of org.apache.kafka.common.protocol.ApiKeys in project kafka by apache.
the class NodeApiVersionsTest method testUnsupportedVersionsToString.
@Test
public void testUnsupportedVersionsToString() {
NodeApiVersions versions = new NodeApiVersions(Collections.<ApiVersion>emptyList());
StringBuilder bld = new StringBuilder();
String prefix = "(";
for (ApiKeys apiKey : ApiKeys.values()) {
bld.append(prefix).append(apiKey.name).append("(").append(apiKey.id).append("): UNSUPPORTED");
prefix = ", ";
}
bld.append(")");
assertEquals(bld.toString(), versions.toString());
}
use of org.apache.kafka.common.protocol.ApiKeys in project kafka by apache.
the class NodeApiVersionsTest method testVersionsToString.
@Test
public void testVersionsToString() {
List<ApiVersion> versionList = new ArrayList<>();
for (ApiKeys apiKey : ApiKeys.values()) {
if (apiKey == ApiKeys.CONTROLLED_SHUTDOWN_KEY) {
versionList.add(new ApiVersion(apiKey.id, (short) 0, (short) 0));
} else if (apiKey == ApiKeys.DELETE_TOPICS) {
versionList.add(new ApiVersion(apiKey.id, (short) 10000, (short) 10001));
} else {
versionList.add(new ApiVersion(apiKey));
}
}
NodeApiVersions versions = new NodeApiVersions(versionList);
StringBuilder bld = new StringBuilder();
String prefix = "(";
for (ApiKeys apiKey : ApiKeys.values()) {
bld.append(prefix);
if (apiKey == ApiKeys.CONTROLLED_SHUTDOWN_KEY) {
bld.append("ControlledShutdown(7): 0 [unusable: node too old]");
} else if (apiKey == ApiKeys.DELETE_TOPICS) {
bld.append("DeleteTopics(20): 10000 to 10001 [unusable: node too new]");
} else {
bld.append(apiKey.name).append("(").append(apiKey.id).append("): ");
if (apiKey.oldestVersion() == apiKey.latestVersion()) {
bld.append(apiKey.oldestVersion());
} else {
bld.append(apiKey.oldestVersion()).append(" to ").append(apiKey.latestVersion());
}
bld.append(" [usable: ").append(apiKey.latestVersion()).append("]");
}
prefix = ", ";
}
bld.append(")");
assertEquals(bld.toString(), versions.toString());
}
use of org.apache.kafka.common.protocol.ApiKeys in project kafka by apache.
the class NetworkClient method parseResponse.
public static AbstractResponse parseResponse(ByteBuffer responseBuffer, RequestHeader requestHeader) {
ResponseHeader responseHeader = ResponseHeader.parse(responseBuffer);
// Always expect the response version id to be the same as the request version id
ApiKeys apiKey = ApiKeys.forId(requestHeader.apiKey());
Struct responseBody = apiKey.responseSchema(requestHeader.apiVersion()).read(responseBuffer);
correlate(requestHeader, responseHeader);
return AbstractResponse.getResponse(apiKey, responseBody);
}
Aggregations