Search in sources :

Example 26 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class FetchAzureBlobStorage method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final long startNanos = System.nanoTime();
    String containerName = context.getProperty(AzureStorageUtils.CONTAINER).evaluateAttributeExpressions(flowFile).getValue();
    String blobPath = context.getProperty(BLOB).evaluateAttributeExpressions(flowFile).getValue();
    AtomicReference<Exception> storedException = new AtomicReference<>();
    try {
        CloudBlobClient blobClient = AzureStorageUtils.createCloudBlobClient(context, getLogger(), flowFile);
        CloudBlobContainer container = blobClient.getContainerReference(containerName);
        final Map<String, String> attributes = new HashMap<>();
        final CloudBlob blob = container.getBlockBlobReference(blobPath);
        // TODO - we may be able do fancier things with ranges and
        // distribution of download over threads, investigate
        flowFile = session.write(flowFile, os -> {
            try {
                blob.download(os);
            } catch (StorageException e) {
                storedException.set(e);
                throw new IOException(e);
            }
        });
        long length = blob.getProperties().getLength();
        attributes.put("azure.length", String.valueOf(length));
        if (!attributes.isEmpty()) {
            flowFile = session.putAllAttributes(flowFile, attributes);
        }
        session.transfer(flowFile, REL_SUCCESS);
        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
        session.getProvenanceReporter().fetch(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis);
    } catch (IllegalArgumentException | URISyntaxException | StorageException | ProcessException e) {
        if (e instanceof ProcessException && storedException.get() == null) {
            throw (ProcessException) e;
        } else {
            Exception failureException = Optional.ofNullable(storedException.get()).orElse(e);
            getLogger().error("Failure to fetch Azure blob {}", new Object[] { blobPath }, failureException);
            flowFile = session.penalize(flowFile);
            session.transfer(flowFile, REL_FAILURE);
        }
    }
}
Also used : CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) FlowFile(org.apache.nifi.flowfile.FlowFile) URISyntaxException(java.net.URISyntaxException) CloudBlobClient(com.microsoft.azure.storage.blob.CloudBlobClient) ProcessContext(org.apache.nifi.processor.ProcessContext) IOException(java.io.IOException) HashMap(java.util.HashMap) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProcessException(org.apache.nifi.processor.exception.ProcessException) TimeUnit(java.util.concurrent.TimeUnit) StorageException(com.microsoft.azure.storage.StorageException) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) CloudBlobContainer(com.microsoft.azure.storage.blob.CloudBlobContainer) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) Optional(java.util.Optional) Tags(org.apache.nifi.annotation.documentation.Tags) AbstractAzureBlobProcessor(org.apache.nifi.processors.azure.AbstractAzureBlobProcessor) AzureStorageUtils(org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils) CloudBlob(com.microsoft.azure.storage.blob.CloudBlob) FlowFile(org.apache.nifi.flowfile.FlowFile) CloudBlobClient(com.microsoft.azure.storage.blob.CloudBlobClient) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) URISyntaxException(java.net.URISyntaxException) IOException(java.io.IOException) ProcessException(org.apache.nifi.processor.exception.ProcessException) StorageException(com.microsoft.azure.storage.StorageException) CloudBlob(com.microsoft.azure.storage.blob.CloudBlob) ProcessException(org.apache.nifi.processor.exception.ProcessException) CloudBlobContainer(com.microsoft.azure.storage.blob.CloudBlobContainer) StorageException(com.microsoft.azure.storage.StorageException)

Example 27 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class PutAzureBlobStorage method onTrigger.

public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final long startNanos = System.nanoTime();
    String containerName = context.getProperty(AzureStorageUtils.CONTAINER).evaluateAttributeExpressions(flowFile).getValue();
    String blobPath = context.getProperty(BLOB).evaluateAttributeExpressions(flowFile).getValue();
    AtomicReference<Exception> storedException = new AtomicReference<>();
    try {
        CloudBlobClient blobClient = AzureStorageUtils.createCloudBlobClient(context, getLogger(), flowFile);
        CloudBlobContainer container = blobClient.getContainerReference(containerName);
        CloudBlob blob = container.getBlockBlobReference(blobPath);
        final Map<String, String> attributes = new HashMap<>();
        long length = flowFile.getSize();
        session.read(flowFile, rawIn -> {
            InputStream in = rawIn;
            if (!(in instanceof BufferedInputStream)) {
                // do not double-wrap
                in = new BufferedInputStream(rawIn);
            }
            try {
                blob.upload(in, length);
                BlobProperties properties = blob.getProperties();
                attributes.put("azure.container", containerName);
                attributes.put("azure.primaryUri", blob.getSnapshotQualifiedUri().toString());
                attributes.put("azure.etag", properties.getEtag());
                attributes.put("azure.length", String.valueOf(length));
                attributes.put("azure.timestamp", String.valueOf(properties.getLastModified()));
            } catch (StorageException | URISyntaxException e) {
                storedException.set(e);
                throw new IOException(e);
            }
        });
        if (!attributes.isEmpty()) {
            flowFile = session.putAllAttributes(flowFile, attributes);
        }
        session.transfer(flowFile, REL_SUCCESS);
        final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
        session.getProvenanceReporter().send(flowFile, blob.getSnapshotQualifiedUri().toString(), transferMillis);
    } catch (IllegalArgumentException | URISyntaxException | StorageException | ProcessException e) {
        if (e instanceof ProcessException && storedException.get() == null) {
            throw (ProcessException) e;
        } else {
            Exception failureException = Optional.ofNullable(storedException.get()).orElse(e);
            getLogger().error("Failed to put Azure blob {}", new Object[] { blobPath }, failureException);
            flowFile = session.penalize(flowFile);
            session.transfer(flowFile, REL_FAILURE);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) CloudBlobClient(com.microsoft.azure.storage.blob.CloudBlobClient) HashMap(java.util.HashMap) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) URISyntaxException(java.net.URISyntaxException) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) ProcessException(org.apache.nifi.processor.exception.ProcessException) StorageException(com.microsoft.azure.storage.StorageException) IOException(java.io.IOException) CloudBlob(com.microsoft.azure.storage.blob.CloudBlob) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedInputStream(java.io.BufferedInputStream) BlobProperties(com.microsoft.azure.storage.blob.BlobProperties) CloudBlobContainer(com.microsoft.azure.storage.blob.CloudBlobContainer) StorageException(com.microsoft.azure.storage.StorageException)

Example 28 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class ConvertAvroToJSON method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String containerOption = context.getProperty(CONTAINER_OPTIONS).getValue();
    final boolean useContainer = containerOption.equals(CONTAINER_ARRAY);
    // Wrap a single record (inclusive of no records) only when a container is being used
    final boolean wrapSingleRecord = context.getProperty(WRAP_SINGLE_RECORD).asBoolean() && useContainer;
    final String stringSchema = context.getProperty(SCHEMA).getValue();
    final boolean schemaLess = stringSchema != null;
    try {
        flowFile = session.write(flowFile, new StreamCallback() {

            @Override
            public void process(final InputStream rawIn, final OutputStream rawOut) throws IOException {
                final GenericData genericData = GenericData.get();
                if (schemaLess) {
                    if (schema == null) {
                        schema = new Schema.Parser().parse(stringSchema);
                    }
                    try (final InputStream in = new BufferedInputStream(rawIn);
                        final OutputStream out = new BufferedOutputStream(rawOut)) {
                        final DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>(schema);
                        final BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(in, null);
                        final GenericRecord record = reader.read(null, decoder);
                        // need to be true before we wrap it with an array
                        if (useContainer && wrapSingleRecord) {
                            out.write('[');
                        }
                        final byte[] outputBytes = (record == null) ? EMPTY_JSON_OBJECT : genericData.toString(record).getBytes(StandardCharsets.UTF_8);
                        out.write(outputBytes);
                        if (useContainer && wrapSingleRecord) {
                            out.write(']');
                        }
                    }
                } else {
                    try (final InputStream in = new BufferedInputStream(rawIn);
                        final OutputStream out = new BufferedOutputStream(rawOut);
                        final DataFileStream<GenericRecord> reader = new DataFileStream<>(in, new GenericDatumReader<GenericRecord>())) {
                        int recordCount = 0;
                        GenericRecord currRecord = null;
                        if (reader.hasNext()) {
                            currRecord = reader.next();
                            recordCount++;
                        }
                        // if configured to wrap single record
                        if (reader.hasNext() && useContainer || wrapSingleRecord) {
                            out.write('[');
                        }
                        // Determine the initial output record, inclusive if we should have an empty set of Avro records
                        final byte[] outputBytes = (currRecord == null) ? EMPTY_JSON_OBJECT : genericData.toString(currRecord).getBytes(StandardCharsets.UTF_8);
                        out.write(outputBytes);
                        while (reader.hasNext()) {
                            if (useContainer) {
                                out.write(',');
                            } else {
                                out.write('\n');
                            }
                            currRecord = reader.next(currRecord);
                            out.write(genericData.toString(currRecord).getBytes(StandardCharsets.UTF_8));
                            recordCount++;
                        }
                        // configured to wrap a single record
                        if (recordCount > 1 && useContainer || wrapSingleRecord) {
                            out.write(']');
                        }
                    }
                }
            }
        });
    } catch (final ProcessException pe) {
        getLogger().error("Failed to convert {} from Avro to JSON due to {}; transferring to failure", new Object[] { flowFile, pe });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json");
    session.transfer(flowFile, REL_SUCCESS);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) DataFileStream(org.apache.avro.file.DataFileStream) GenericData(org.apache.avro.generic.GenericData) StreamCallback(org.apache.nifi.processor.io.StreamCallback) BinaryDecoder(org.apache.avro.io.BinaryDecoder) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedInputStream(java.io.BufferedInputStream) GenericRecord(org.apache.avro.generic.GenericRecord) BufferedOutputStream(java.io.BufferedOutputStream)

Example 29 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class PutCloudWatchMetric method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    MetricDatum datum = new MetricDatum();
    try {
        datum.setMetricName(context.getProperty(METRIC_NAME).evaluateAttributeExpressions(flowFile).getValue());
        final String valueString = context.getProperty(VALUE).evaluateAttributeExpressions(flowFile).getValue();
        if (valueString != null) {
            datum.setValue(Double.parseDouble(valueString));
        } else {
            StatisticSet statisticSet = new StatisticSet();
            statisticSet.setMaximum(Double.parseDouble(context.getProperty(MAXIMUM).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setMinimum(Double.parseDouble(context.getProperty(MINIMUM).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setSampleCount(Double.parseDouble(context.getProperty(SAMPLECOUNT).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setSum(Double.parseDouble(context.getProperty(SUM).evaluateAttributeExpressions(flowFile).getValue()));
            datum.setStatisticValues(statisticSet);
        }
        final String timestamp = context.getProperty(TIMESTAMP).evaluateAttributeExpressions(flowFile).getValue();
        if (timestamp != null) {
            datum.setTimestamp(new Date(Long.parseLong(timestamp)));
        }
        final String unit = context.getProperty(UNIT).evaluateAttributeExpressions(flowFile).getValue();
        if (unit != null) {
            datum.setUnit(unit);
        }
        // add dynamic properties as dimensions
        if (!dynamicPropertyNames.isEmpty()) {
            final List<Dimension> dimensions = new ArrayList<>(dynamicPropertyNames.size());
            for (String propertyName : dynamicPropertyNames) {
                final String propertyValue = context.getProperty(propertyName).evaluateAttributeExpressions(flowFile).getValue();
                if (StringUtils.isNotBlank(propertyValue)) {
                    dimensions.add(new Dimension().withName(propertyName).withValue(propertyValue));
                }
            }
            datum.withDimensions(dimensions);
        }
        final PutMetricDataRequest metricDataRequest = new PutMetricDataRequest().withNamespace(context.getProperty(NAMESPACE).evaluateAttributeExpressions(flowFile).getValue()).withMetricData(datum);
        putMetricData(metricDataRequest);
        session.transfer(flowFile, REL_SUCCESS);
        getLogger().info("Successfully published cloudwatch metric for {}", new Object[] { flowFile });
    } catch (final Exception e) {
        getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[] { flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) PutMetricDataRequest(com.amazonaws.services.cloudwatch.model.PutMetricDataRequest) ArrayList(java.util.ArrayList) MetricDatum(com.amazonaws.services.cloudwatch.model.MetricDatum) Dimension(com.amazonaws.services.cloudwatch.model.Dimension) StatisticSet(com.amazonaws.services.cloudwatch.model.StatisticSet) Date(java.util.Date) ProcessException(org.apache.nifi.processor.exception.ProcessException) AmazonClientException(com.amazonaws.AmazonClientException)

Example 30 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class PutDruidRecord method processFlowFile.

/**
 * Parses the record(s), converts each to a Map, and sends via Tranquility to the Druid Indexing Service
 *
 * @param context The process context
 * @param session The process session
 */
@SuppressWarnings("unchecked")
private void processFlowFile(ProcessContext context, final ProcessSession session) {
    final ComponentLog log = getLogger();
    // Get handle on Druid Tranquility session
    DruidTranquilityService tranquilityController = context.getProperty(DRUID_TRANQUILITY_SERVICE).asControllerService(DruidTranquilityService.class);
    Tranquilizer<Map<String, Object>> tranquilizer = tranquilityController.getTranquilizer();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    // Create the outgoing flow files and output streams
    FlowFile droppedFlowFile = session.create(flowFile);
    final AtomicInteger droppedFlowFileCount = new AtomicInteger(0);
    FlowFile failedFlowFile = session.create(flowFile);
    final AtomicInteger failedFlowFileCount = new AtomicInteger(0);
    FlowFile successfulFlowFile = session.create(flowFile);
    final AtomicInteger successfulFlowFileCount = new AtomicInteger(0);
    final AtomicInteger recordWriteErrors = new AtomicInteger(0);
    int recordCount = 0;
    final OutputStream droppedOutputStream = session.write(droppedFlowFile);
    final RecordSetWriter droppedRecordWriter;
    final OutputStream failedOutputStream = session.write(failedFlowFile);
    final RecordSetWriter failedRecordWriter;
    final OutputStream successfulOutputStream = session.write(successfulFlowFile);
    final RecordSetWriter successfulRecordWriter;
    try (final InputStream in = session.read(flowFile)) {
        final RecordReaderFactory recordParserFactory = context.getProperty(RECORD_READER_FACTORY).asControllerService(RecordReaderFactory.class);
        final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER_FACTORY).asControllerService(RecordSetWriterFactory.class);
        final Map<String, String> attributes = flowFile.getAttributes();
        final RecordReader reader = recordParserFactory.createRecordReader(flowFile, in, getLogger());
        final RecordSchema outSchema = writerFactory.getSchema(attributes, reader.getSchema());
        droppedRecordWriter = writerFactory.createWriter(log, outSchema, droppedOutputStream);
        droppedRecordWriter.beginRecordSet();
        failedRecordWriter = writerFactory.createWriter(log, outSchema, failedOutputStream);
        failedRecordWriter.beginRecordSet();
        successfulRecordWriter = writerFactory.createWriter(log, outSchema, successfulOutputStream);
        successfulRecordWriter.beginRecordSet();
        Record r;
        while ((r = reader.nextRecord()) != null) {
            final Record record = r;
            recordCount++;
            // Convert each Record to HashMap and send to Druid
            Map<String, Object> contentMap = (Map<String, Object>) DataTypeUtils.convertRecordFieldtoObject(r, RecordFieldType.RECORD.getRecordDataType(r.getSchema()));
            log.debug("Tranquilizer Status: {}", new Object[] { tranquilizer.status().toString() });
            // Send data element to Druid asynchronously
            Future<BoxedUnit> future = tranquilizer.send(contentMap);
            log.debug("Sent Payload to Druid: {}", new Object[] { contentMap });
            // Wait for Druid to call back with status
            future.addEventListener(new FutureEventListener<Object>() {

                @Override
                public void onFailure(Throwable cause) {
                    if (cause instanceof MessageDroppedException) {
                        // This happens when event timestamp targets a Druid Indexing task that has closed (Late Arriving Data)
                        log.debug("Record Dropped due to MessageDroppedException: {}, transferring record to dropped.", new Object[] { cause.getMessage() }, cause);
                        try {
                            synchronized (droppedRecordWriter) {
                                droppedRecordWriter.write(record);
                                droppedRecordWriter.flush();
                                droppedFlowFileCount.incrementAndGet();
                            }
                        } catch (final IOException ioe) {
                            log.error("Error transferring record to dropped, this may result in data loss.", new Object[] { ioe.getMessage() }, ioe);
                            recordWriteErrors.incrementAndGet();
                        }
                    } else {
                        log.error("FlowFile Processing Failed due to: {}", new Object[] { cause.getMessage() }, cause);
                        try {
                            synchronized (failedRecordWriter) {
                                failedRecordWriter.write(record);
                                failedRecordWriter.flush();
                                failedFlowFileCount.incrementAndGet();
                            }
                        } catch (final IOException ioe) {
                            log.error("Error transferring record to failure, this may result in data loss.", new Object[] { ioe.getMessage() }, ioe);
                            recordWriteErrors.incrementAndGet();
                        }
                    }
                }

                @Override
                public void onSuccess(Object value) {
                    log.debug(" FlowFile Processing Success: {}", new Object[] { value.toString() });
                    try {
                        synchronized (successfulRecordWriter) {
                            successfulRecordWriter.write(record);
                            successfulRecordWriter.flush();
                            successfulFlowFileCount.incrementAndGet();
                        }
                    } catch (final IOException ioe) {
                        log.error("Error transferring record to success, this may result in data loss. " + "However the record was successfully processed by Druid", new Object[] { ioe.getMessage() }, ioe);
                        recordWriteErrors.incrementAndGet();
                    }
                }
            });
        }
    } catch (IOException | SchemaNotFoundException | MalformedRecordException e) {
        log.error("FlowFile Processing Failed due to: {}", new Object[] { e.getMessage() }, e);
        // The FlowFile will be obtained and the error logged below, when calling publishResult.getFailedFlowFiles()
        flowFile = session.putAttribute(flowFile, RECORD_COUNT, Integer.toString(recordCount));
        session.transfer(flowFile, REL_FAILURE);
        try {
            droppedOutputStream.close();
            session.remove(droppedFlowFile);
        } catch (IOException ioe) {
            log.error("Error closing output stream for FlowFile with dropped records.", ioe);
        }
        try {
            failedOutputStream.close();
            session.remove(failedFlowFile);
        } catch (IOException ioe) {
            log.error("Error closing output stream for FlowFile with failed records.", ioe);
        }
        try {
            successfulOutputStream.close();
            session.remove(successfulFlowFile);
        } catch (IOException ioe) {
            log.error("Error closing output stream for FlowFile with successful records.", ioe);
        }
        session.commit();
        return;
    }
    if (recordCount == 0) {
        // Send original (empty) flow file to success, remove the rest
        flowFile = session.putAttribute(flowFile, RECORD_COUNT, "0");
        session.transfer(flowFile, REL_SUCCESS);
        try {
            droppedOutputStream.close();
            session.remove(droppedFlowFile);
        } catch (IOException ioe) {
            log.error("Error closing output stream for FlowFile with dropped records.", ioe);
        }
        try {
            failedOutputStream.close();
            session.remove(failedFlowFile);
        } catch (IOException ioe) {
            log.error("Error closing output stream for FlowFile with failed records.", ioe);
        }
        try {
            successfulOutputStream.close();
            session.remove(successfulFlowFile);
        } catch (IOException ioe) {
            log.error("Error closing output stream for FlowFile with successful records.", ioe);
        }
    } else {
        // Wait for all the records to finish processing
        while (recordCount != (droppedFlowFileCount.get() + failedFlowFileCount.get() + successfulFlowFileCount.get() + recordWriteErrors.get())) {
            Thread.yield();
        }
        try {
            droppedRecordWriter.finishRecordSet();
            droppedRecordWriter.close();
        } catch (IOException ioe) {
            log.error("Error closing FlowFile with dropped records: {}", new Object[] { ioe.getMessage() }, ioe);
            session.rollback();
            throw new ProcessException(ioe);
        }
        if (droppedFlowFileCount.get() > 0) {
            droppedFlowFile = session.putAttribute(droppedFlowFile, RECORD_COUNT, Integer.toString(droppedFlowFileCount.get()));
            session.transfer(droppedFlowFile, REL_DROPPED);
        } else {
            session.remove(droppedFlowFile);
        }
        try {
            failedRecordWriter.finishRecordSet();
            failedRecordWriter.close();
        } catch (IOException ioe) {
            log.error("Error closing FlowFile with failed records: {}", new Object[] { ioe.getMessage() }, ioe);
            session.rollback();
            throw new ProcessException(ioe);
        }
        if (failedFlowFileCount.get() > 0) {
            failedFlowFile = session.putAttribute(failedFlowFile, RECORD_COUNT, Integer.toString(failedFlowFileCount.get()));
            session.transfer(failedFlowFile, REL_FAILURE);
        } else {
            session.remove(failedFlowFile);
        }
        try {
            successfulRecordWriter.finishRecordSet();
            successfulRecordWriter.close();
        } catch (IOException ioe) {
            log.error("Error closing FlowFile with successful records: {}", new Object[] { ioe.getMessage() }, ioe);
            session.rollback();
            throw new ProcessException(ioe);
        }
        if (successfulFlowFileCount.get() > 0) {
            successfulFlowFile = session.putAttribute(successfulFlowFile, RECORD_COUNT, Integer.toString(successfulFlowFileCount.get()));
            session.transfer(successfulFlowFile, REL_SUCCESS);
            session.getProvenanceReporter().send(successfulFlowFile, tranquilityController.getTransitUri());
        } else {
            session.remove(successfulFlowFile);
        }
        session.remove(flowFile);
    }
    session.commit();
}
Also used : MessageDroppedException(com.metamx.tranquility.tranquilizer.MessageDroppedException) OutputStream(java.io.OutputStream) RecordReader(org.apache.nifi.serialization.RecordReader) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) DruidTranquilityService(org.apache.nifi.controller.api.druid.DruidTranquilityService) Record(org.apache.nifi.serialization.record.Record) BoxedUnit(scala.runtime.BoxedUnit) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) ProcessException(org.apache.nifi.processor.exception.ProcessException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) Map(java.util.Map)

Aggregations

ProcessException (org.apache.nifi.processor.exception.ProcessException)274 FlowFile (org.apache.nifi.flowfile.FlowFile)169 IOException (java.io.IOException)162 InputStream (java.io.InputStream)79 HashMap (java.util.HashMap)78 ComponentLog (org.apache.nifi.logging.ComponentLog)78 OutputStream (java.io.OutputStream)62 ArrayList (java.util.ArrayList)55 Map (java.util.Map)52 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)39 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)38 StopWatch (org.apache.nifi.util.StopWatch)37 HashSet (java.util.HashSet)36 ProcessSession (org.apache.nifi.processor.ProcessSession)35 Relationship (org.apache.nifi.processor.Relationship)33 List (java.util.List)31 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)29 AtomicReference (java.util.concurrent.atomic.AtomicReference)28 Set (java.util.Set)26 ProcessContext (org.apache.nifi.processor.ProcessContext)25