Search in sources :

Example 21 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class ScanAttribute method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final List<FlowFile> flowFiles = session.get(50);
    if (flowFiles.isEmpty()) {
        return;
    }
    final ComponentLog logger = getLogger();
    try {
        if (fileWatcher.checkAndReset()) {
            this.dictionaryTerms = createDictionary(context);
        }
    } catch (final IOException e) {
        logger.error("Unable to reload dictionary due to {}", e);
    }
    final boolean matchAll = context.getProperty(MATCHING_CRITERIA).getValue().equals(MATCH_CRITERIA_ALL);
    for (final FlowFile flowFile : flowFiles) {
        final boolean matched = matchAll ? allMatch(flowFile, attributePattern, dictionaryTerms) : anyMatch(flowFile, attributePattern, dictionaryTerms);
        final Relationship relationship = matched ? REL_MATCHED : REL_UNMATCHED;
        session.getProvenanceReporter().route(flowFile, relationship);
        session.transfer(flowFile, relationship);
        logger.info("Transferred {} to {}", new Object[] { flowFile, relationship });
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) Relationship(org.apache.nifi.processor.Relationship) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog)

Example 22 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class ListenUDPRecord method handleParseFailure.

private void handleParseFailure(final StandardEvent event, final ProcessSession session, final Exception cause, final String message) {
    // If we are unable to parse the data, we need to transfer it to 'parse failure' relationship
    final Map<String, String> attributes = getAttributes(event.getSender());
    FlowFile failureFlowFile = session.create();
    failureFlowFile = session.write(failureFlowFile, out -> out.write(event.getData()));
    failureFlowFile = session.putAllAttributes(failureFlowFile, attributes);
    final String transitUri = getTransitUri(event.getSender());
    session.getProvenanceReporter().receive(failureFlowFile, transitUri);
    session.transfer(failureFlowFile, REL_PARSE_FAILURE);
    if (cause == null) {
        getLogger().error(message);
    } else {
        getLogger().error(message, cause);
    }
    session.adjustCounter("Parse Failures", 1, false);
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) Arrays(java.util.Arrays) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ByteBuffer(java.nio.ByteBuffer) InetAddress(java.net.InetAddress) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) ByteArrayInputStream(java.io.ByteArrayInputStream) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) RecordReader(org.apache.nifi.serialization.RecordReader) Map(java.util.Map) EventFactory(org.apache.nifi.processor.util.listen.event.EventFactory) FlowFile(org.apache.nifi.flowfile.FlowFile) WriteResult(org.apache.nifi.serialization.WriteResult) Collection(java.util.Collection) BlockingQueue(java.util.concurrent.BlockingQueue) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) IOUtils(org.apache.commons.io.IOUtils) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) List(java.util.List) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) Tags(org.apache.nifi.annotation.documentation.Tags) DataUnit(org.apache.nifi.processor.DataUnit) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ValidationContext(org.apache.nifi.components.ValidationContext) DatagramChannelDispatcher(org.apache.nifi.processor.util.listen.dispatcher.DatagramChannelDispatcher) HashMap(java.util.HashMap) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) Relationship(org.apache.nifi.processor.Relationship) StandardEvent(org.apache.nifi.processor.util.listen.event.StandardEvent) ValidationResult(org.apache.nifi.components.ValidationResult) Record(org.apache.nifi.serialization.record.Record) OutputStream(java.io.OutputStream) Validator(org.apache.nifi.components.Validator) ChannelDispatcher(org.apache.nifi.processor.util.listen.dispatcher.ChannelDispatcher) ProcessContext(org.apache.nifi.processor.ProcessContext) ProcessSession(org.apache.nifi.processor.ProcessSession) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) IOException(java.io.IOException) AbstractListenEventProcessor(org.apache.nifi.processor.util.listen.AbstractListenEventProcessor) UnknownHostException(java.net.UnknownHostException) TimeUnit(java.util.concurrent.TimeUnit) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) StandardEventFactory(org.apache.nifi.processor.util.listen.event.StandardEventFactory) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) Collections(java.util.Collections) InputStream(java.io.InputStream) FlowFile(org.apache.nifi.flowfile.FlowFile)

Example 23 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class Notify method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final PropertyValue signalIdProperty = context.getProperty(RELEASE_SIGNAL_IDENTIFIER);
    final PropertyValue counterNameProperty = context.getProperty(SIGNAL_COUNTER_NAME);
    final PropertyValue deltaProperty = context.getProperty(SIGNAL_COUNTER_DELTA);
    final String attributeCacheRegex = context.getProperty(ATTRIBUTE_CACHE_REGEX).getValue();
    final Integer bufferCount = context.getProperty(SIGNAL_BUFFER_COUNT).asInteger();
    // the cache client used to interact with the distributed cache.
    final AtomicDistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(AtomicDistributedMapCacheClient.class);
    final WaitNotifyProtocol protocol = new WaitNotifyProtocol(cache);
    final Map<String, SignalBuffer> signalBuffers = new HashMap<>();
    for (int i = 0; i < bufferCount; i++) {
        final FlowFile flowFile = session.get();
        if (flowFile == null) {
            break;
        }
        // Signal id is computed from attribute 'RELEASE_SIGNAL_IDENTIFIER' with expression language support
        final String signalId = signalIdProperty.evaluateAttributeExpressions(flowFile).getValue();
        // if the computed value is null, or empty, we transfer the flow file to failure relationship
        if (StringUtils.isBlank(signalId)) {
            logger.error("FlowFile {} has no attribute for given Release Signal Identifier", new Object[] { flowFile });
            // set 'notified' attribute
            session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
            continue;
        }
        String counterName = counterNameProperty.evaluateAttributeExpressions(flowFile).getValue();
        if (StringUtils.isEmpty(counterName)) {
            counterName = WaitNotifyProtocol.DEFAULT_COUNT_NAME;
        }
        int delta = 1;
        if (deltaProperty.isSet()) {
            final String deltaStr = deltaProperty.evaluateAttributeExpressions(flowFile).getValue();
            try {
                delta = Integer.parseInt(deltaStr);
            } catch (final NumberFormatException e) {
                logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] { flowFile, e }, e);
                session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
                continue;
            }
        }
        if (!signalBuffers.containsKey(signalId)) {
            signalBuffers.put(signalId, new SignalBuffer());
        }
        final SignalBuffer signalBuffer = signalBuffers.get(signalId);
        if (StringUtils.isNotEmpty(attributeCacheRegex)) {
            flowFile.getAttributes().entrySet().stream().filter(e -> (!e.getKey().equals("uuid") && e.getKey().matches(attributeCacheRegex))).forEach(e -> signalBuffer.attributesToCache.put(e.getKey(), e.getValue()));
        }
        signalBuffer.incrementDelta(counterName, delta);
        signalBuffer.flowFiles.add(flowFile);
        if (logger.isDebugEnabled()) {
            logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", new Object[] { signalId, counterName, flowFile });
        }
    }
    signalBuffers.forEach((signalId, signalBuffer) -> {
        // retry after yielding for a while.
        try {
            protocol.notify(signalId, signalBuffer.deltas, signalBuffer.attributesToCache);
            signalBuffer.flowFiles.forEach(flowFile -> session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(true)), REL_SUCCESS));
        } catch (IOException e) {
            throw new RuntimeException(String.format("Unable to communicate with cache when processing %s due to %s", signalId, e), e);
        }
    });
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ResultType(org.apache.nifi.expression.AttributeExpression.ResultType) HashMap(java.util.HashMap) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) ComponentLog(org.apache.nifi.logging.ComponentLog) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) PropertyValue(org.apache.nifi.components.PropertyValue) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient) FlowFile(org.apache.nifi.flowfile.FlowFile) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) PropertyValue(org.apache.nifi.components.PropertyValue) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient)

Example 24 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class TestInvokeGroovy method testScriptDefinedRelationship.

/**
 * Tests a script that has a Groovy Processor that that reads the first line of text from the flowfiles content and
 * stores the value in an attribute of the outgoing flowfile.
 *
 * @throws Exception Any error encountered while testing
 */
@Test
public void testScriptDefinedRelationship() throws Exception {
    InvokeScriptedProcessor processor = new InvokeScriptedProcessor();
    MockProcessContext context = new MockProcessContext(processor);
    MockProcessorInitializationContext initContext = new MockProcessorInitializationContext(processor, context);
    processor.initialize(initContext);
    context.setProperty(scriptingComponent.getScriptingComponentHelper().SCRIPT_ENGINE, "Groovy");
    context.setProperty(ScriptingComponentUtils.SCRIPT_FILE, "target/test/resources/groovy/test_reader.groovy");
    // State Manger is unused, and a null reference is specified
    processor.customValidate(new MockValidationContext(context));
    processor.setup(context);
    Set<Relationship> relationships = processor.getRelationships();
    assertNotNull(relationships);
    assertTrue(relationships.size() > 0);
    boolean found = false;
    for (Relationship relationship : relationships) {
        if (relationship.getName().equals("test")) {
            found = true;
            break;
        }
    }
    assertTrue(found);
}
Also used : MockProcessorInitializationContext(org.apache.nifi.util.MockProcessorInitializationContext) Relationship(org.apache.nifi.processor.Relationship) MockValidationContext(org.apache.nifi.util.MockValidationContext) MockProcessContext(org.apache.nifi.util.MockProcessContext) Test(org.junit.Test)

Example 25 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class AbstractRouteRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final T flowFileContext;
    try {
        flowFileContext = getFlowFileContext(flowFile, context);
    } catch (final Exception e) {
        getLogger().error("Failed to process {}; routing to failure", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final AtomicInteger numRecords = new AtomicInteger(0);
    final Map<Relationship, Tuple<FlowFile, RecordSetWriter>> writers = new HashMap<>();
    final FlowFile original = flowFile;
    final Map<String, String> originalAttributes = original.getAttributes();
    try {
        session.read(flowFile, new InputStreamCallback() {

            @Override
            public void process(final InputStream in) throws IOException {
                try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
                    final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
                    Record record;
                    while ((record = reader.nextRecord()) != null) {
                        final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext);
                        numRecords.incrementAndGet();
                        for (final Relationship relationship : relationships) {
                            final RecordSetWriter recordSetWriter;
                            Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship);
                            if (tuple == null) {
                                FlowFile outFlowFile = session.create(original);
                                final OutputStream out = session.write(outFlowFile);
                                recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out);
                                recordSetWriter.beginRecordSet();
                                tuple = new Tuple<>(outFlowFile, recordSetWriter);
                                writers.put(relationship, tuple);
                            } else {
                                recordSetWriter = tuple.getValue();
                            }
                            recordSetWriter.write(record);
                        }
                    }
                } catch (final SchemaNotFoundException | MalformedRecordException e) {
                    throw new ProcessException("Could not parse incoming data", e);
                }
            }
        });
        for (final Map.Entry<Relationship, Tuple<FlowFile, RecordSetWriter>> entry : writers.entrySet()) {
            final Relationship relationship = entry.getKey();
            final Tuple<FlowFile, RecordSetWriter> tuple = entry.getValue();
            final RecordSetWriter writer = tuple.getValue();
            FlowFile childFlowFile = tuple.getKey();
            final WriteResult writeResult = writer.finishRecordSet();
            try {
                writer.close();
            } catch (final IOException ioe) {
                getLogger().warn("Failed to close Writer for {}", new Object[] { childFlowFile });
            }
            final Map<String, String> attributes = new HashMap<>();
            attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
            attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
            attributes.putAll(writeResult.getAttributes());
            childFlowFile = session.putAllAttributes(childFlowFile, attributes);
            session.transfer(childFlowFile, relationship);
            session.adjustCounter("Records Processed", writeResult.getRecordCount(), false);
            session.adjustCounter("Records Routed to " + relationship.getName(), writeResult.getRecordCount(), false);
            session.getProvenanceReporter().route(childFlowFile, relationship);
        }
    } catch (final Exception e) {
        getLogger().error("Failed to process {}", new Object[] { flowFile, e });
        for (final Tuple<FlowFile, RecordSetWriter> tuple : writers.values()) {
            try {
                tuple.getValue().close();
            } catch (final Exception e1) {
                getLogger().warn("Failed to close Writer for {}; some resources may not be cleaned up appropriately", new Object[] { tuple.getKey() });
            }
            session.remove(tuple.getKey());
        }
        session.transfer(flowFile, REL_FAILURE);
        return;
    } finally {
        for (final Tuple<FlowFile, RecordSetWriter> tuple : writers.values()) {
            final RecordSetWriter writer = tuple.getValue();
            try {
                writer.close();
            } catch (final Exception e) {
                getLogger().warn("Failed to close Record Writer for {}; some resources may not be properly cleaned up", new Object[] { tuple.getKey(), e });
            }
        }
    }
    if (isRouteOriginal()) {
        flowFile = session.putAttribute(flowFile, "record.count", String.valueOf(numRecords));
        session.transfer(flowFile, REL_ORIGINAL);
    } else {
        session.remove(flowFile);
    }
    getLogger().info("Successfully processed {}, creating {} derivative FlowFiles and processing {} records", new Object[] { flowFile, writers.size(), numRecords });
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) HashMap(java.util.HashMap) RecordReader(org.apache.nifi.serialization.RecordReader) OutputStream(java.io.OutputStream) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) Record(org.apache.nifi.serialization.record.Record) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) WriteResult(org.apache.nifi.serialization.WriteResult) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Relationship(org.apache.nifi.processor.Relationship) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) HashMap(java.util.HashMap) Map(java.util.Map) Tuple(org.apache.nifi.util.Tuple)

Aggregations

Relationship (org.apache.nifi.processor.Relationship)106 ArrayList (java.util.ArrayList)41 HashSet (java.util.HashSet)40 HashMap (java.util.HashMap)32 FlowFile (org.apache.nifi.flowfile.FlowFile)32 Map (java.util.Map)31 IOException (java.io.IOException)26 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)26 Test (org.junit.Test)23 List (java.util.List)20 Set (java.util.Set)19 Connection (org.apache.nifi.connectable.Connection)18 TestRunner (org.apache.nifi.util.TestRunner)18 ProcessException (org.apache.nifi.processor.exception.ProcessException)17 ProcessSession (org.apache.nifi.processor.ProcessSession)15 InputStream (java.io.InputStream)14 DynamicRelationship (org.apache.nifi.annotation.behavior.DynamicRelationship)12 Processor (org.apache.nifi.processor.Processor)12 Collections (java.util.Collections)11 AtomicLong (java.util.concurrent.atomic.AtomicLong)10