Search in sources :

Example 1 with InternalSinkRecord

use of org.apache.kafka.connect.runtime.InternalSinkRecord in project kafka by apache.

the class WorkerErrantRecordReporter method report.

@Override
public Future<Void> report(SinkRecord record, Throwable error) {
    ConsumerRecord<byte[], byte[]> consumerRecord;
    // report modified or new records, so handle both cases
    if (record instanceof InternalSinkRecord) {
        consumerRecord = ((InternalSinkRecord) record).originalRecord();
    } else {
        // Generate a new consumer record from the modified sink record. We prefer
        // to send the original consumer record (pre-transformed) to the DLQ,
        // but in this case we don't have one and send the potentially transformed
        // record instead
        String topic = record.topic();
        byte[] key = keyConverter.fromConnectData(topic, record.keySchema(), record.key());
        byte[] value = valueConverter.fromConnectData(topic, record.valueSchema(), record.value());
        RecordHeaders headers = new RecordHeaders();
        if (record.headers() != null) {
            for (Header header : record.headers()) {
                String headerKey = header.key();
                byte[] rawHeader = headerConverter.fromConnectHeader(topic, headerKey, header.schema(), header.value());
                headers.add(headerKey, rawHeader);
            }
        }
        int keyLength = key != null ? key.length : -1;
        int valLength = value != null ? value.length : -1;
        consumerRecord = new ConsumerRecord<>(record.topic(), record.kafkaPartition(), record.kafkaOffset(), record.timestamp(), record.timestampType(), keyLength, valLength, key, value, headers, Optional.empty());
    }
    Future<Void> future = retryWithToleranceOperator.executeFailed(Stage.TASK_PUT, SinkTask.class, consumerRecord, error);
    if (!future.isDone()) {
        TopicPartition partition = new TopicPartition(consumerRecord.topic(), consumerRecord.partition());
        futures.computeIfAbsent(partition, p -> new ArrayList<>()).add(future);
    }
    return future;
}
Also used : LoggerFactory(org.slf4j.LoggerFactory) TimeoutException(java.util.concurrent.TimeoutException) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) Future(java.util.concurrent.Future) InternalSinkRecord(org.apache.kafka.connect.runtime.InternalSinkRecord) HeaderConverter(org.apache.kafka.connect.storage.HeaderConverter) Converter(org.apache.kafka.connect.storage.Converter) SinkTask(org.apache.kafka.connect.sink.SinkTask) TopicPartition(org.apache.kafka.common.TopicPartition) ErrantRecordReporter(org.apache.kafka.connect.sink.ErrantRecordReporter) Logger(org.slf4j.Logger) Header(org.apache.kafka.connect.header.Header) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) ConnectException(org.apache.kafka.connect.errors.ConnectException) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) Optional(java.util.Optional) InternalSinkRecord(org.apache.kafka.connect.runtime.InternalSinkRecord) ArrayList(java.util.ArrayList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) Header(org.apache.kafka.connect.header.Header) TopicPartition(org.apache.kafka.common.TopicPartition)

Aggregations

ArrayList (java.util.ArrayList)1 Collection (java.util.Collection)1 List (java.util.List)1 Objects (java.util.Objects)1 Optional (java.util.Optional)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 ExecutionException (java.util.concurrent.ExecutionException)1 Future (java.util.concurrent.Future)1 TimeUnit (java.util.concurrent.TimeUnit)1 TimeoutException (java.util.concurrent.TimeoutException)1 Collectors (java.util.stream.Collectors)1 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)1 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)1 TopicPartition (org.apache.kafka.common.TopicPartition)1 RecordHeaders (org.apache.kafka.common.header.internals.RecordHeaders)1 ConnectException (org.apache.kafka.connect.errors.ConnectException)1 Header (org.apache.kafka.connect.header.Header)1 InternalSinkRecord (org.apache.kafka.connect.runtime.InternalSinkRecord)1 ErrantRecordReporter (org.apache.kafka.connect.sink.ErrantRecordReporter)1