Search in sources :

Example 1 with TokenTooLargeException

use of org.apache.nifi.stream.io.exception.TokenTooLargeException in project nifi by apache.

the class PublisherLease method publish.

void publish(final FlowFile flowFile, final InputStream flowFileContent, final byte[] messageKey, final byte[] demarcatorBytes, final String topic) throws IOException {
    if (tracker == null) {
        tracker = new InFlightMessageTracker(logger);
    }
    try {
        byte[] messageContent;
        if (demarcatorBytes == null || demarcatorBytes.length == 0) {
            if (flowFile.getSize() > maxMessageSize) {
                tracker.fail(flowFile, new TokenTooLargeException("A message in the stream exceeds the maximum allowed message size of " + maxMessageSize + " bytes."));
                return;
            }
            // Send FlowFile content as it is, to support sending 0 byte message.
            messageContent = new byte[(int) flowFile.getSize()];
            StreamUtils.fillBuffer(flowFileContent, messageContent);
            publish(flowFile, messageKey, messageContent, topic, tracker);
            return;
        }
        try (final StreamDemarcator demarcator = new StreamDemarcator(flowFileContent, demarcatorBytes, maxMessageSize)) {
            while ((messageContent = demarcator.nextToken()) != null) {
                publish(flowFile, messageKey, messageContent, topic, tracker);
                if (tracker.isFailed(flowFile)) {
                    // If we have a failure, don't try to send anything else.
                    return;
                }
            }
        } catch (final TokenTooLargeException ttle) {
            tracker.fail(flowFile, ttle);
        }
    } catch (final Exception e) {
        tracker.fail(flowFile, e);
        poison();
        throw e;
    }
}
Also used : TokenTooLargeException(org.apache.nifi.stream.io.exception.TokenTooLargeException) StreamDemarcator(org.apache.nifi.stream.io.util.StreamDemarcator) TimeoutException(java.util.concurrent.TimeoutException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) TokenTooLargeException(org.apache.nifi.stream.io.exception.TokenTooLargeException) IOException(java.io.IOException)

Example 2 with TokenTooLargeException

use of org.apache.nifi.stream.io.exception.TokenTooLargeException in project nifi by apache.

the class PublisherLease method publish.

void publish(final FlowFile flowFile, final RecordSet recordSet, final RecordSetWriterFactory writerFactory, final RecordSchema schema, final String messageKeyField, final String topic) throws IOException {
    if (tracker == null) {
        tracker = new InFlightMessageTracker(logger);
    }
    final ByteArrayOutputStream baos = new ByteArrayOutputStream(1024);
    Record record;
    int recordCount = 0;
    try {
        while ((record = recordSet.next()) != null) {
            recordCount++;
            baos.reset();
            Map<String, String> additionalAttributes = Collections.emptyMap();
            try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) {
                final WriteResult writeResult = writer.write(record);
                additionalAttributes = writeResult.getAttributes();
                writer.flush();
            }
            final byte[] messageContent = baos.toByteArray();
            final String key = messageKeyField == null ? null : record.getAsString(messageKeyField);
            final byte[] messageKey = (key == null) ? null : key.getBytes(StandardCharsets.UTF_8);
            publish(flowFile, additionalAttributes, messageKey, messageContent, topic, tracker);
            if (tracker.isFailed(flowFile)) {
                // If we have a failure, don't try to send anything else.
                return;
            }
        }
        if (recordCount == 0) {
            tracker.trackEmpty(flowFile);
        }
    } catch (final TokenTooLargeException ttle) {
        tracker.fail(flowFile, ttle);
    } catch (final SchemaNotFoundException snfe) {
        throw new IOException(snfe);
    } catch (final Exception e) {
        tracker.fail(flowFile, e);
        poison();
        throw e;
    }
}
Also used : ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) TimeoutException(java.util.concurrent.TimeoutException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) TokenTooLargeException(org.apache.nifi.stream.io.exception.TokenTooLargeException) IOException(java.io.IOException) WriteResult(org.apache.nifi.serialization.WriteResult) TokenTooLargeException(org.apache.nifi.stream.io.exception.TokenTooLargeException) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.apache.nifi.serialization.record.Record) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException)

Aggregations

IOException (java.io.IOException)2 TimeoutException (java.util.concurrent.TimeoutException)2 SchemaNotFoundException (org.apache.nifi.schema.access.SchemaNotFoundException)2 TokenTooLargeException (org.apache.nifi.stream.io.exception.TokenTooLargeException)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)1 RecordSetWriter (org.apache.nifi.serialization.RecordSetWriter)1 WriteResult (org.apache.nifi.serialization.WriteResult)1 Record (org.apache.nifi.serialization.record.Record)1 StreamDemarcator (org.apache.nifi.stream.io.util.StreamDemarcator)1