Search in sources :

Example 41 with KafkaException

use of org.apache.kafka.common.KafkaException in project apache-kafka-on-k8s by banzaicloud.

the class FileRecords method writeTo.

@Override
public long writeTo(GatheringByteChannel destChannel, long offset, int length) throws IOException {
    long newSize = Math.min(channel.size(), end) - start;
    int oldSize = sizeInBytes();
    if (newSize < oldSize)
        throw new KafkaException(String.format("Size of FileRecords %s has been truncated during write: old size %d, new size %d", file.getAbsolutePath(), oldSize, newSize));
    long position = start + offset;
    int count = Math.min(length, oldSize);
    final long bytesTransferred;
    if (destChannel instanceof TransportLayer) {
        TransportLayer tl = (TransportLayer) destChannel;
        bytesTransferred = tl.transferFrom(channel, position, count);
    } else {
        bytesTransferred = channel.transferTo(position, count, destChannel);
    }
    return bytesTransferred;
}
Also used : TransportLayer(org.apache.kafka.common.network.TransportLayer) KafkaException(org.apache.kafka.common.KafkaException)

Example 42 with KafkaException

use of org.apache.kafka.common.KafkaException in project apache-kafka-on-k8s by banzaicloud.

the class DefaultRecordBatch method compressedIterator.

private CloseableIterator<Record> compressedIterator(BufferSupplier bufferSupplier) {
    final ByteBuffer buffer = this.buffer.duplicate();
    buffer.position(RECORDS_OFFSET);
    final DataInputStream inputStream = new DataInputStream(compressionType().wrapForInput(buffer, magic(), bufferSupplier));
    return new RecordIterator() {

        @Override
        protected Record readNext(long baseOffset, long firstTimestamp, int baseSequence, Long logAppendTime) {
            try {
                return DefaultRecord.readFrom(inputStream, baseOffset, firstTimestamp, baseSequence, logAppendTime);
            } catch (EOFException e) {
                throw new InvalidRecordException("Incorrect declared batch size, premature EOF reached");
            } catch (IOException e) {
                throw new KafkaException("Failed to decompress record stream", e);
            }
        }

        @Override
        protected boolean ensureNoneRemaining() {
            try {
                return inputStream.read() == -1;
            } catch (IOException e) {
                throw new KafkaException("Error checking for remaining bytes after reading batch", e);
            }
        }

        @Override
        public void close() {
            try {
                inputStream.close();
            } catch (IOException e) {
                throw new KafkaException("Failed to close record stream", e);
            }
        }
    };
}
Also used : EOFException(java.io.EOFException) KafkaException(org.apache.kafka.common.KafkaException) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) ByteBuffer(java.nio.ByteBuffer)

Example 43 with KafkaException

use of org.apache.kafka.common.KafkaException in project apache-kafka-on-k8s by banzaicloud.

the class LegacyRecord method write.

private static void write(ByteBuffer buffer, byte magic, long timestamp, ByteBuffer key, ByteBuffer value, CompressionType compressionType, TimestampType timestampType) {
    try {
        DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
        write(out, magic, timestamp, key, value, compressionType, timestampType);
    } catch (IOException e) {
        throw new KafkaException(e);
    }
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) KafkaException(org.apache.kafka.common.KafkaException) IOException(java.io.IOException)

Example 44 with KafkaException

use of org.apache.kafka.common.KafkaException in project apache-kafka-on-k8s by banzaicloud.

the class PlaintextChannelBuilder method buildChannel.

@Override
public KafkaChannel buildChannel(String id, SelectionKey key, int maxReceiveSize, MemoryPool memoryPool) throws KafkaException {
    try {
        PlaintextTransportLayer transportLayer = new PlaintextTransportLayer(key);
        PlaintextAuthenticator authenticator = new PlaintextAuthenticator(configs, transportLayer);
        return new KafkaChannel(id, transportLayer, authenticator, maxReceiveSize, memoryPool != null ? memoryPool : MemoryPool.NONE);
    } catch (Exception e) {
        log.warn("Failed to create channel due to ", e);
        throw new KafkaException(e);
    }
}
Also used : KafkaException(org.apache.kafka.common.KafkaException) KafkaException(org.apache.kafka.common.KafkaException) IOException(java.io.IOException)

Example 45 with KafkaException

use of org.apache.kafka.common.KafkaException in project apache-kafka-on-k8s by banzaicloud.

the class SaslChannelBuilder method configure.

@Override
public void configure(Map<String, ?> configs) throws KafkaException {
    try {
        this.configs = configs;
        boolean hasKerberos = jaasContexts.containsKey(SaslConfigs.GSSAPI_MECHANISM);
        if (hasKerberos) {
            String defaultRealm;
            try {
                defaultRealm = defaultKerberosRealm();
            } catch (Exception ke) {
                defaultRealm = "";
            }
            @SuppressWarnings("unchecked") List<String> principalToLocalRules = (List<String>) configs.get(BrokerSecurityConfigs.SASL_KERBEROS_PRINCIPAL_TO_LOCAL_RULES_CONFIG);
            if (principalToLocalRules != null)
                kerberosShortNamer = KerberosShortNamer.fromUnparsedRules(defaultRealm, principalToLocalRules);
        }
        for (Map.Entry<String, JaasContext> entry : jaasContexts.entrySet()) {
            String mechanism = entry.getKey();
            // With static JAAS configuration, use KerberosLogin if Kerberos is enabled. With dynamic JAAS configuration,
            // use KerberosLogin only for the LoginContext corresponding to GSSAPI
            LoginManager loginManager = LoginManager.acquireLoginManager(entry.getValue(), mechanism, hasKerberos, configs);
            loginManagers.put(mechanism, loginManager);
            subjects.put(mechanism, loginManager.subject());
        }
        if (this.securityProtocol == SecurityProtocol.SASL_SSL) {
            // Disable SSL client authentication as we are using SASL authentication
            this.sslFactory = new SslFactory(mode, "none", isInterBrokerListener);
            this.sslFactory.configure(configs);
        }
    } catch (Exception e) {
        close();
        throw new KafkaException(e);
    }
}
Also used : JaasContext(org.apache.kafka.common.security.JaasContext) LoginManager(org.apache.kafka.common.security.authenticator.LoginManager) List(java.util.List) KafkaException(org.apache.kafka.common.KafkaException) HashMap(java.util.HashMap) Map(java.util.Map) KafkaException(org.apache.kafka.common.KafkaException) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) SslFactory(org.apache.kafka.common.security.ssl.SslFactory)

Aggregations

KafkaException (org.apache.kafka.common.KafkaException)262 Test (org.junit.Test)69 TopicPartition (org.apache.kafka.common.TopicPartition)56 Test (org.junit.jupiter.api.Test)47 HashMap (java.util.HashMap)40 IOException (java.io.IOException)39 StreamsException (org.apache.kafka.streams.errors.StreamsException)34 Map (java.util.Map)32 TimeoutException (org.apache.kafka.common.errors.TimeoutException)28 ArrayList (java.util.ArrayList)27 List (java.util.List)21 ByteBuffer (java.nio.ByteBuffer)19 ExecutionException (java.util.concurrent.ExecutionException)19 ConfigException (org.apache.kafka.common.config.ConfigException)16 TopicAuthorizationException (org.apache.kafka.common.errors.TopicAuthorizationException)14 HashSet (java.util.HashSet)13 Properties (java.util.Properties)13 Set (java.util.Set)11 Collectors (java.util.stream.Collectors)11 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)11