Search in sources :

Example 1 with SchemaException

use of org.apache.kafka.common.protocol.types.SchemaException in project kafka by apache.

the class SaslClientAuthenticator method handleKafkaResponse.

private void handleKafkaResponse(RequestHeader requestHeader, byte[] responseBytes) {
    AbstractResponse response;
    ApiKeys apiKey;
    try {
        response = NetworkClient.parseResponse(ByteBuffer.wrap(responseBytes), requestHeader);
        apiKey = ApiKeys.forId(requestHeader.apiKey());
    } catch (SchemaException | IllegalArgumentException e) {
        LOG.debug("Invalid SASL mechanism response, server may be expecting only GSSAPI tokens");
        throw new AuthenticationException("Invalid SASL mechanism response", e);
    }
    switch(apiKey) {
        case SASL_HANDSHAKE:
            handleSaslHandshakeResponse((SaslHandshakeResponse) response);
            break;
        default:
            throw new IllegalStateException("Unexpected API key during handshake: " + apiKey);
    }
}
Also used : ApiKeys(org.apache.kafka.common.protocol.ApiKeys) SchemaException(org.apache.kafka.common.protocol.types.SchemaException) AbstractResponse(org.apache.kafka.common.requests.AbstractResponse) AuthenticationException(org.apache.kafka.common.errors.AuthenticationException)

Example 2 with SchemaException

use of org.apache.kafka.common.protocol.types.SchemaException in project apache-kafka-on-k8s by banzaicloud.

the class SaslClientAuthenticator method receiveKafkaResponse.

private AbstractResponse receiveKafkaResponse() throws IOException {
    try {
        byte[] responseBytes = receiveResponseOrToken();
        if (responseBytes == null)
            return null;
        else {
            AbstractResponse response = NetworkClient.parseResponse(ByteBuffer.wrap(responseBytes), currentRequestHeader);
            currentRequestHeader = null;
            return response;
        }
    } catch (SchemaException | IllegalArgumentException e) {
        LOG.debug("Invalid SASL mechanism response, server may be expecting only GSSAPI tokens");
        setSaslState(SaslState.FAILED);
        throw new IllegalSaslStateException("Invalid SASL mechanism response, server may be expecting a different protocol", e);
    }
}
Also used : SchemaException(org.apache.kafka.common.protocol.types.SchemaException) AbstractResponse(org.apache.kafka.common.requests.AbstractResponse) IllegalSaslStateException(org.apache.kafka.common.errors.IllegalSaslStateException)

Example 3 with SchemaException

use of org.apache.kafka.common.protocol.types.SchemaException in project kafka by apache.

the class ConsumerProtocol method deserializeAssignment.

public static Assignment deserializeAssignment(final ByteBuffer buffer, short version) {
    version = checkAssignmentVersion(version);
    try {
        ConsumerProtocolAssignment data = new ConsumerProtocolAssignment(new ByteBufferAccessor(buffer), version);
        List<TopicPartition> assignedPartitions = new ArrayList<>();
        for (ConsumerProtocolAssignment.TopicPartition tp : data.assignedPartitions()) {
            for (Integer partition : tp.partitions()) {
                assignedPartitions.add(new TopicPartition(tp.topic(), partition));
            }
        }
        return new Assignment(assignedPartitions, data.userData() != null ? data.userData().duplicate() : null);
    } catch (BufferUnderflowException e) {
        throw new SchemaException("Buffer underflow while parsing consumer protocol's assignment", e);
    }
}
Also used : Assignment(org.apache.kafka.clients.consumer.ConsumerPartitionAssignor.Assignment) ConsumerProtocolAssignment(org.apache.kafka.common.message.ConsumerProtocolAssignment) SchemaException(org.apache.kafka.common.protocol.types.SchemaException) ConsumerProtocolAssignment(org.apache.kafka.common.message.ConsumerProtocolAssignment) TopicPartition(org.apache.kafka.common.TopicPartition) ArrayList(java.util.ArrayList) ByteBufferAccessor(org.apache.kafka.common.protocol.ByteBufferAccessor) BufferUnderflowException(java.nio.BufferUnderflowException)

Example 4 with SchemaException

use of org.apache.kafka.common.protocol.types.SchemaException in project kafka by apache.

the class SaslClientAuthenticator method receiveKafkaResponse.

private AbstractResponse receiveKafkaResponse() throws IOException {
    if (netInBuffer == null)
        netInBuffer = new NetworkReceive(node);
    NetworkReceive receive = netInBuffer;
    try {
        byte[] responseBytes = receiveResponseOrToken();
        if (responseBytes == null)
            return null;
        else {
            AbstractResponse response = NetworkClient.parseResponse(ByteBuffer.wrap(responseBytes), currentRequestHeader);
            currentRequestHeader = null;
            return response;
        }
    } catch (BufferUnderflowException | SchemaException | IllegalArgumentException e) {
        /*
             * Account for the fact that during re-authentication there may be responses
             * arriving for requests that were sent in the past.
             */
        if (reauthInfo.reauthenticating()) {
            /*
                 * It didn't match the current request header, so it must be unrelated to
                 * re-authentication. Save it so it can be processed later.
                 */
            receive.payload().rewind();
            reauthInfo.pendingAuthenticatedReceives.add(receive);
            return null;
        }
        log.debug("Invalid SASL mechanism response, server may be expecting only GSSAPI tokens");
        setSaslState(SaslState.FAILED);
        throw new IllegalSaslStateException("Invalid SASL mechanism response, server may be expecting a different protocol", e);
    }
}
Also used : SchemaException(org.apache.kafka.common.protocol.types.SchemaException) AbstractResponse(org.apache.kafka.common.requests.AbstractResponse) NetworkReceive(org.apache.kafka.common.network.NetworkReceive) IllegalSaslStateException(org.apache.kafka.common.errors.IllegalSaslStateException) BufferUnderflowException(java.nio.BufferUnderflowException)

Example 5 with SchemaException

use of org.apache.kafka.common.protocol.types.SchemaException in project kafka by apache.

the class MirrorClient method remoteConsumerOffsets.

/**
 * Translate a remote consumer group's offsets into corresponding local offsets. Topics are automatically
 *  renamed according to the ReplicationPolicy.
 *  @param consumerGroupId group ID of remote consumer group
 *  @param remoteClusterAlias alias of remote cluster
 *  @param timeout timeout
 */
public Map<TopicPartition, OffsetAndMetadata> remoteConsumerOffsets(String consumerGroupId, String remoteClusterAlias, Duration timeout) {
    long deadline = System.currentTimeMillis() + timeout.toMillis();
    Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
    try (KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(consumerConfig, new ByteArrayDeserializer(), new ByteArrayDeserializer())) {
        // checkpoint topics are not "remote topics", as they are not replicated. So we don't need
        // to use ReplicationPolicy to create the checkpoint topic here.
        String checkpointTopic = replicationPolicy.checkpointsTopic(remoteClusterAlias);
        List<TopicPartition> checkpointAssignment = Collections.singletonList(new TopicPartition(checkpointTopic, 0));
        consumer.assign(checkpointAssignment);
        consumer.seekToBeginning(checkpointAssignment);
        while (System.currentTimeMillis() < deadline && !endOfStream(consumer, checkpointAssignment)) {
            ConsumerRecords<byte[], byte[]> records = consumer.poll(timeout);
            for (ConsumerRecord<byte[], byte[]> record : records) {
                try {
                    Checkpoint checkpoint = Checkpoint.deserializeRecord(record);
                    if (checkpoint.consumerGroupId().equals(consumerGroupId)) {
                        offsets.put(checkpoint.topicPartition(), checkpoint.offsetAndMetadata());
                    }
                } catch (SchemaException e) {
                    log.info("Could not deserialize record. Skipping.", e);
                }
            }
        }
        log.info("Consumed {} checkpoint records for {} from {}.", offsets.size(), consumerGroupId, checkpointTopic);
    }
    return offsets;
}
Also used : SchemaException(org.apache.kafka.common.protocol.types.SchemaException) HashMap(java.util.HashMap) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) TopicPartition(org.apache.kafka.common.TopicPartition) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer)

Aggregations

SchemaException (org.apache.kafka.common.protocol.types.SchemaException)6 BufferUnderflowException (java.nio.BufferUnderflowException)3 TopicPartition (org.apache.kafka.common.TopicPartition)3 AbstractResponse (org.apache.kafka.common.requests.AbstractResponse)3 ArrayList (java.util.ArrayList)2 IllegalSaslStateException (org.apache.kafka.common.errors.IllegalSaslStateException)2 ByteBufferAccessor (org.apache.kafka.common.protocol.ByteBufferAccessor)2 HashMap (java.util.HashMap)1 Assignment (org.apache.kafka.clients.consumer.ConsumerPartitionAssignor.Assignment)1 Subscription (org.apache.kafka.clients.consumer.ConsumerPartitionAssignor.Subscription)1 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)1 OffsetAndMetadata (org.apache.kafka.clients.consumer.OffsetAndMetadata)1 AuthenticationException (org.apache.kafka.common.errors.AuthenticationException)1 ConsumerProtocolAssignment (org.apache.kafka.common.message.ConsumerProtocolAssignment)1 ConsumerProtocolSubscription (org.apache.kafka.common.message.ConsumerProtocolSubscription)1 NetworkReceive (org.apache.kafka.common.network.NetworkReceive)1 ApiKeys (org.apache.kafka.common.protocol.ApiKeys)1 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)1