Search in sources :

Example 51 with FlowFile

use of org.apache.nifi.flowfile.FlowFile in project nifi by apache.

the class PutCassandraQL method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    ComponentLog logger = getLogger();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final long startNanos = System.nanoTime();
    final long statementTimeout = context.getProperty(STATEMENT_TIMEOUT).evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.MILLISECONDS);
    final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());
    // The documentation for the driver recommends the session remain open the entire time the processor is running
    // and states that it is thread-safe. This is why connectionSession is not in a try-with-resources.
    final Session connectionSession = cassandraSession.get();
    String cql = getCQL(session, flowFile, charset);
    try {
        PreparedStatement statement = connectionSession.prepare(cql);
        BoundStatement boundStatement = statement.bind();
        Map<String, String> attributes = flowFile.getAttributes();
        for (final Map.Entry<String, String> entry : attributes.entrySet()) {
            final String key = entry.getKey();
            final Matcher matcher = CQL_TYPE_ATTRIBUTE_PATTERN.matcher(key);
            if (matcher.matches()) {
                final int parameterIndex = Integer.parseInt(matcher.group(1));
                String paramType = entry.getValue();
                if (StringUtils.isEmpty(paramType)) {
                    throw new ProcessException("Value of the " + key + " attribute is null or empty, it must contain a valid value");
                }
                paramType = paramType.trim();
                final String valueAttrName = "cql.args." + parameterIndex + ".value";
                final String parameterValue = attributes.get(valueAttrName);
                try {
                    setStatementObject(boundStatement, parameterIndex - 1, valueAttrName, parameterValue, paramType);
                } catch (final InvalidTypeException | IllegalArgumentException e) {
                    throw new ProcessException("The value of the " + valueAttrName + " is '" + parameterValue + "', which cannot be converted into the necessary data type: " + paramType, e);
                }
            }
        }
        try {
            ResultSetFuture future = connectionSession.executeAsync(boundStatement);
            if (statementTimeout > 0) {
                future.getUninterruptibly(statementTimeout, TimeUnit.MILLISECONDS);
            } else {
                future.getUninterruptibly();
            }
            // Emit a Provenance SEND event
            final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
            // This isn't a real URI but since Cassandra is distributed we just use the cluster name
            String transitUri = "cassandra://" + connectionSession.getCluster().getMetadata().getClusterName();
            session.getProvenanceReporter().send(flowFile, transitUri, transmissionMillis, true);
            session.transfer(flowFile, REL_SUCCESS);
        } catch (final TimeoutException e) {
            throw new ProcessException(e);
        }
    } catch (final NoHostAvailableException nhae) {
        getLogger().error("No host in the Cassandra cluster can be contacted successfully to execute this statement", nhae);
        // Log up to 10 error messages. Otherwise if a 1000-node cluster was specified but there was no connectivity,
        // a thousand error messages would be logged. However we would like information from Cassandra itself, so
        // cap the error limit at 10, format the messages, and don't include the stack trace (it is displayed by the
        // logger message above).
        getLogger().error(nhae.getCustomMessage(10, true, false));
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_RETRY);
    } catch (final QueryExecutionException qee) {
        logger.error("Cannot execute the statement with the requested consistency level successfully", qee);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_RETRY);
    } catch (final QueryValidationException qve) {
        logger.error("The CQL statement {} is invalid due to syntax error, authorization issue, or another " + "validation problem; routing {} to failure", new Object[] { cql, flowFile }, qve);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    } catch (final ProcessException e) {
        logger.error("Unable to execute CQL select statement {} for {} due to {}; routing to failure", new Object[] { cql, flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ResultSetFuture(com.datastax.driver.core.ResultSetFuture) Matcher(java.util.regex.Matcher) Charset(java.nio.charset.Charset) PreparedStatement(com.datastax.driver.core.PreparedStatement) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) QueryExecutionException(com.datastax.driver.core.exceptions.QueryExecutionException) QueryValidationException(com.datastax.driver.core.exceptions.QueryValidationException) NoHostAvailableException(com.datastax.driver.core.exceptions.NoHostAvailableException) BoundStatement(com.datastax.driver.core.BoundStatement) Map(java.util.Map) Session(com.datastax.driver.core.Session) ProcessSession(org.apache.nifi.processor.ProcessSession) InvalidTypeException(com.datastax.driver.core.exceptions.InvalidTypeException) TimeoutException(java.util.concurrent.TimeoutException)

Example 52 with FlowFile

use of org.apache.nifi.flowfile.FlowFile in project nifi by apache.

the class ExtractCCDAAttributes method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    // stores CDA attributes
    Map<String, String> attributes = new TreeMap<String, String>();
    getLogger().info("Processing CCDA");
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    if (processMap.isEmpty()) {
        getLogger().error("Process Mapping is not loaded");
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final Boolean skipValidation = context.getProperty(SKIP_VALIDATION).asBoolean();
    final StopWatch stopWatch = new StopWatch(true);
    ClinicalDocument cd = null;
    try {
        // Load and optionally validate CDA document
        cd = loadDocument(session.read(flowFile), skipValidation);
    } catch (ProcessException e) {
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    getLogger().debug("Loaded document for {} in {}", new Object[] { flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS) });
    getLogger().debug("Processing elements");
    // Process CDA element using mapping data
    processElement(null, cd, attributes);
    flowFile = session.putAllAttributes(flowFile, attributes);
    stopWatch.stop();
    getLogger().debug("Successfully processed {} in {}", new Object[] { flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS) });
    if (getLogger().isDebugEnabled()) {
        for (Entry<String, String> entry : attributes.entrySet()) {
            getLogger().debug("Attribute: {}={}", new Object[] { entry.getKey(), entry.getValue() });
        }
    }
    session.transfer(flowFile, REL_SUCCESS);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ProcessException(org.apache.nifi.processor.exception.ProcessException) ClinicalDocument(org.openhealthtools.mdht.uml.cda.ClinicalDocument) TreeMap(java.util.TreeMap) StopWatch(org.apache.nifi.util.StopWatch)

Example 53 with FlowFile

use of org.apache.nifi.flowfile.FlowFile in project nifi by apache.

the class DDLEventWriter method writeEvent.

@Override
public long writeEvent(ProcessSession session, String transitUri, DDLEventInfo eventInfo, long currentSequenceId, Relationship relationship) {
    FlowFile flowFile = session.create();
    flowFile = session.write(flowFile, (outputStream) -> {
        super.startJson(outputStream, eventInfo);
        super.writeJson(eventInfo);
        jsonGenerator.writeStringField("query", eventInfo.getQuery());
        super.endJson();
    });
    flowFile = session.putAllAttributes(flowFile, getCommonAttributes(currentSequenceId, eventInfo));
    session.transfer(flowFile, relationship);
    session.getProvenanceReporter().receive(flowFile, transitUri);
    return currentSequenceId + 1;
}
Also used : DDLEventInfo(org.apache.nifi.cdc.mysql.event.DDLEventInfo) FlowFile(org.apache.nifi.flowfile.FlowFile) Relationship(org.apache.nifi.processor.Relationship) ProcessSession(org.apache.nifi.processor.ProcessSession) FlowFile(org.apache.nifi.flowfile.FlowFile)

Example 54 with FlowFile

use of org.apache.nifi.flowfile.FlowFile in project nifi by apache.

the class UpdateRowsWriter method writeEvent.

/**
 * Creates and transfers a new flow file whose contents are the JSON-serialized value of the specified event, and the sequence ID attribute set
 *
 * @param session   A reference to a ProcessSession from which the flow file(s) will be created and transferred
 * @param eventInfo An event whose value will become the contents of the flow file
 * @return The next available CDC sequence ID for use by the CDC processor
 */
@Override
public long writeEvent(final ProcessSession session, String transitUri, final UpdateRowsEventInfo eventInfo, final long currentSequenceId, Relationship relationship) {
    final AtomicLong seqId = new AtomicLong(currentSequenceId);
    for (Map.Entry<Serializable[], Serializable[]> row : eventInfo.getRows()) {
        FlowFile flowFile = session.create();
        flowFile = session.write(flowFile, outputStream -> {
            super.startJson(outputStream, eventInfo);
            super.writeJson(eventInfo);
            final BitSet bitSet = eventInfo.getIncludedColumns();
            writeRow(eventInfo, row, bitSet);
            super.endJson();
        });
        flowFile = session.putAllAttributes(flowFile, getCommonAttributes(seqId.get(), eventInfo));
        session.transfer(flowFile, relationship);
        session.getProvenanceReporter().receive(flowFile, transitUri);
        seqId.getAndIncrement();
    }
    return seqId.get();
}
Also used : AtomicLong(java.util.concurrent.atomic.AtomicLong) FlowFile(org.apache.nifi.flowfile.FlowFile) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) BitSet(java.util.BitSet) ColumnDefinition(org.apache.nifi.cdc.event.ColumnDefinition) UpdateRowsEventInfo(org.apache.nifi.cdc.mysql.event.UpdateRowsEventInfo) MySQLCDCUtils(org.apache.nifi.cdc.mysql.event.MySQLCDCUtils) Serializable(java.io.Serializable) FlowFile(org.apache.nifi.flowfile.FlowFile) AtomicLong(java.util.concurrent.atomic.AtomicLong) BitSet(java.util.BitSet) Map(java.util.Map)

Example 55 with FlowFile

use of org.apache.nifi.flowfile.FlowFile in project nifi by apache.

the class PutCouchbaseKey method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final byte[] content = new byte[(int) flowFile.getSize()];
    session.read(flowFile, new InputStreamCallback() {

        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, content, true);
        }
    });
    String docId = flowFile.getAttribute(CoreAttributes.UUID.key());
    if (!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())) {
        docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
    }
    try {
        Document<?> doc = null;
        final DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
        switch(documentType) {
            case Json:
                {
                    doc = RawJsonDocument.create(docId, new String(content, StandardCharsets.UTF_8));
                    break;
                }
            case Binary:
                {
                    final ByteBuf buf = Unpooled.copiedBuffer(content);
                    doc = BinaryDocument.create(docId, buf);
                    break;
                }
        }
        final PersistTo persistTo = PersistTo.valueOf(context.getProperty(PERSIST_TO).getValue());
        final ReplicateTo replicateTo = ReplicateTo.valueOf(context.getProperty(REPLICATE_TO).getValue());
        doc = openBucket(context).upsert(doc, persistTo, replicateTo);
        final Map<String, String> updatedAttrs = new HashMap<>();
        updatedAttrs.put(CouchbaseAttributes.Cluster.key(), context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue());
        updatedAttrs.put(CouchbaseAttributes.Bucket.key(), context.getProperty(BUCKET_NAME).getValue());
        updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId);
        updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
        updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry()));
        flowFile = session.putAllAttributes(flowFile, updatedAttrs);
        session.getProvenanceReporter().send(flowFile, getTransitUrl(context, docId));
        session.transfer(flowFile, REL_SUCCESS);
    } catch (final CouchbaseException e) {
        String errMsg = String.format("Writing document %s to Couchbase Server using %s failed due to %s", docId, flowFile, e);
        handleCouchbaseException(context, session, logger, flowFile, e, errMsg);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) InputStream(java.io.InputStream) IOException(java.io.IOException) ByteBuf(com.couchbase.client.deps.io.netty.buffer.ByteBuf) ComponentLog(org.apache.nifi.logging.ComponentLog) ReplicateTo(com.couchbase.client.java.ReplicateTo) CouchbaseException(com.couchbase.client.core.CouchbaseException) PersistTo(com.couchbase.client.java.PersistTo) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback)

Aggregations

FlowFile (org.apache.nifi.flowfile.FlowFile)500 IOException (java.io.IOException)236 ProcessException (org.apache.nifi.processor.exception.ProcessException)193 HashMap (java.util.HashMap)160 InputStream (java.io.InputStream)145 OutputStream (java.io.OutputStream)131 ComponentLog (org.apache.nifi.logging.ComponentLog)119 Test (org.junit.Test)116 ArrayList (java.util.ArrayList)113 Map (java.util.Map)105 MockFlowFile (org.apache.nifi.util.MockFlowFile)103 ProcessSession (org.apache.nifi.processor.ProcessSession)99 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)83 Relationship (org.apache.nifi.processor.Relationship)78 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)78 HashSet (java.util.HashSet)75 List (java.util.List)67 StopWatch (org.apache.nifi.util.StopWatch)59 Set (java.util.Set)56 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)55