Search in sources :

Example 1 with ComponentLog

use of org.apache.nifi.logging.ComponentLog in project nifi by apache.

the class ReflectionUtilsTest method validateNoFailureIfQuiatelyIfOneOfArgumentsWrongTypeAndProcessLog.

@Test
public void validateNoFailureIfQuiatelyIfOneOfArgumentsWrongTypeAndProcessLog() throws Exception {
    ComponentLog pl = mock(ComponentLog.class);
    ReflectionUtils.quietlyInvokeMethodsWithAnnotation(OnStopped.class, new E(), pl, 3, "hjk", "hjk");
    verify(pl, Mockito.atMost(1)).error(Mockito.anyString());
}
Also used : ComponentLog(org.apache.nifi.logging.ComponentLog) Test(org.junit.Test)

Example 2 with ComponentLog

use of org.apache.nifi.logging.ComponentLog in project nifi by apache.

the class ProcessorInitializer method teardown.

@Override
public void teardown(ConfigurableComponent component) {
    Processor processor = (Processor) component;
    try (NarCloseable narCloseable = NarCloseable.withComponentNarLoader(component.getClass(), component.getIdentifier())) {
        final ComponentLog logger = new MockComponentLogger();
        final MockProcessContext context = new MockProcessContext();
        ReflectionUtils.quietlyInvokeMethodsWithAnnotation(OnShutdown.class, processor, logger, context);
    } finally {
        ExtensionManager.removeInstanceClassLoader(component.getIdentifier());
    }
}
Also used : NarCloseable(org.apache.nifi.nar.NarCloseable) Processor(org.apache.nifi.processor.Processor) MockComponentLogger(org.apache.nifi.mock.MockComponentLogger) ComponentLog(org.apache.nifi.logging.ComponentLog) MockProcessContext(org.apache.nifi.mock.MockProcessContext)

Example 3 with ComponentLog

use of org.apache.nifi.logging.ComponentLog in project nifi by apache.

the class PutCassandraQL method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    ComponentLog logger = getLogger();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final long startNanos = System.nanoTime();
    final long statementTimeout = context.getProperty(STATEMENT_TIMEOUT).evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.MILLISECONDS);
    final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());
    // The documentation for the driver recommends the session remain open the entire time the processor is running
    // and states that it is thread-safe. This is why connectionSession is not in a try-with-resources.
    final Session connectionSession = cassandraSession.get();
    String cql = getCQL(session, flowFile, charset);
    try {
        PreparedStatement statement = connectionSession.prepare(cql);
        BoundStatement boundStatement = statement.bind();
        Map<String, String> attributes = flowFile.getAttributes();
        for (final Map.Entry<String, String> entry : attributes.entrySet()) {
            final String key = entry.getKey();
            final Matcher matcher = CQL_TYPE_ATTRIBUTE_PATTERN.matcher(key);
            if (matcher.matches()) {
                final int parameterIndex = Integer.parseInt(matcher.group(1));
                String paramType = entry.getValue();
                if (StringUtils.isEmpty(paramType)) {
                    throw new ProcessException("Value of the " + key + " attribute is null or empty, it must contain a valid value");
                }
                paramType = paramType.trim();
                final String valueAttrName = "cql.args." + parameterIndex + ".value";
                final String parameterValue = attributes.get(valueAttrName);
                try {
                    setStatementObject(boundStatement, parameterIndex - 1, valueAttrName, parameterValue, paramType);
                } catch (final InvalidTypeException | IllegalArgumentException e) {
                    throw new ProcessException("The value of the " + valueAttrName + " is '" + parameterValue + "', which cannot be converted into the necessary data type: " + paramType, e);
                }
            }
        }
        try {
            ResultSetFuture future = connectionSession.executeAsync(boundStatement);
            if (statementTimeout > 0) {
                future.getUninterruptibly(statementTimeout, TimeUnit.MILLISECONDS);
            } else {
                future.getUninterruptibly();
            }
            // Emit a Provenance SEND event
            final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
            // This isn't a real URI but since Cassandra is distributed we just use the cluster name
            String transitUri = "cassandra://" + connectionSession.getCluster().getMetadata().getClusterName();
            session.getProvenanceReporter().send(flowFile, transitUri, transmissionMillis, true);
            session.transfer(flowFile, REL_SUCCESS);
        } catch (final TimeoutException e) {
            throw new ProcessException(e);
        }
    } catch (final NoHostAvailableException nhae) {
        getLogger().error("No host in the Cassandra cluster can be contacted successfully to execute this statement", nhae);
        // Log up to 10 error messages. Otherwise if a 1000-node cluster was specified but there was no connectivity,
        // a thousand error messages would be logged. However we would like information from Cassandra itself, so
        // cap the error limit at 10, format the messages, and don't include the stack trace (it is displayed by the
        // logger message above).
        getLogger().error(nhae.getCustomMessage(10, true, false));
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_RETRY);
    } catch (final QueryExecutionException qee) {
        logger.error("Cannot execute the statement with the requested consistency level successfully", qee);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_RETRY);
    } catch (final QueryValidationException qve) {
        logger.error("The CQL statement {} is invalid due to syntax error, authorization issue, or another " + "validation problem; routing {} to failure", new Object[] { cql, flowFile }, qve);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    } catch (final ProcessException e) {
        logger.error("Unable to execute CQL select statement {} for {} due to {}; routing to failure", new Object[] { cql, flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ResultSetFuture(com.datastax.driver.core.ResultSetFuture) Matcher(java.util.regex.Matcher) Charset(java.nio.charset.Charset) PreparedStatement(com.datastax.driver.core.PreparedStatement) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) QueryExecutionException(com.datastax.driver.core.exceptions.QueryExecutionException) QueryValidationException(com.datastax.driver.core.exceptions.QueryValidationException) NoHostAvailableException(com.datastax.driver.core.exceptions.NoHostAvailableException) BoundStatement(com.datastax.driver.core.BoundStatement) Map(java.util.Map) Session(com.datastax.driver.core.Session) ProcessSession(org.apache.nifi.processor.ProcessSession) InvalidTypeException(com.datastax.driver.core.exceptions.InvalidTypeException) TimeoutException(java.util.concurrent.TimeoutException)

Example 4 with ComponentLog

use of org.apache.nifi.logging.ComponentLog in project nifi by apache.

the class QueryCassandra method onScheduled.

@OnScheduled
public void onScheduled(final ProcessContext context) {
    ComponentLog log = getLogger();
    try {
        connectToCassandra(context);
        final int fetchSize = context.getProperty(FETCH_SIZE).evaluateAttributeExpressions().asInteger();
        if (fetchSize > 0) {
            synchronized (cluster.get()) {
                cluster.get().getConfiguration().getQueryOptions().setFetchSize(fetchSize);
            }
        }
    } catch (final NoHostAvailableException nhae) {
        log.error("No host in the Cassandra cluster can be contacted successfully to execute this query", nhae);
        // Log up to 10 error messages. Otherwise if a 1000-node cluster was specified but there was no connectivity,
        // a thousand error messages would be logged. However we would like information from Cassandra itself, so
        // cap the error limit at 10, format the messages, and don't include the stack trace (it is displayed by the
        // logger message above).
        log.error(nhae.getCustomMessage(10, true, false));
        throw new ProcessException(nhae);
    } catch (final AuthenticationException ae) {
        log.error("Invalid username/password combination", ae);
        throw new ProcessException(ae);
    }
}
Also used : ProcessException(org.apache.nifi.processor.exception.ProcessException) NoHostAvailableException(com.datastax.driver.core.exceptions.NoHostAvailableException) AuthenticationException(com.datastax.driver.core.exceptions.AuthenticationException) ComponentLog(org.apache.nifi.logging.ComponentLog) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled)

Example 5 with ComponentLog

use of org.apache.nifi.logging.ComponentLog in project nifi by apache.

the class PutCouchbaseKey method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final byte[] content = new byte[(int) flowFile.getSize()];
    session.read(flowFile, new InputStreamCallback() {

        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, content, true);
        }
    });
    String docId = flowFile.getAttribute(CoreAttributes.UUID.key());
    if (!StringUtils.isEmpty(context.getProperty(DOC_ID).getValue())) {
        docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
    }
    try {
        Document<?> doc = null;
        final DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
        switch(documentType) {
            case Json:
                {
                    doc = RawJsonDocument.create(docId, new String(content, StandardCharsets.UTF_8));
                    break;
                }
            case Binary:
                {
                    final ByteBuf buf = Unpooled.copiedBuffer(content);
                    doc = BinaryDocument.create(docId, buf);
                    break;
                }
        }
        final PersistTo persistTo = PersistTo.valueOf(context.getProperty(PERSIST_TO).getValue());
        final ReplicateTo replicateTo = ReplicateTo.valueOf(context.getProperty(REPLICATE_TO).getValue());
        doc = openBucket(context).upsert(doc, persistTo, replicateTo);
        final Map<String, String> updatedAttrs = new HashMap<>();
        updatedAttrs.put(CouchbaseAttributes.Cluster.key(), context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue());
        updatedAttrs.put(CouchbaseAttributes.Bucket.key(), context.getProperty(BUCKET_NAME).getValue());
        updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId);
        updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
        updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry()));
        flowFile = session.putAllAttributes(flowFile, updatedAttrs);
        session.getProvenanceReporter().send(flowFile, getTransitUrl(context, docId));
        session.transfer(flowFile, REL_SUCCESS);
    } catch (final CouchbaseException e) {
        String errMsg = String.format("Writing document %s to Couchbase Server using %s failed due to %s", docId, flowFile, e);
        handleCouchbaseException(context, session, logger, flowFile, e, errMsg);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) InputStream(java.io.InputStream) IOException(java.io.IOException) ByteBuf(com.couchbase.client.deps.io.netty.buffer.ByteBuf) ComponentLog(org.apache.nifi.logging.ComponentLog) ReplicateTo(com.couchbase.client.java.ReplicateTo) CouchbaseException(com.couchbase.client.core.CouchbaseException) PersistTo(com.couchbase.client.java.PersistTo) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback)

Aggregations

ComponentLog (org.apache.nifi.logging.ComponentLog)211 FlowFile (org.apache.nifi.flowfile.FlowFile)111 ProcessException (org.apache.nifi.processor.exception.ProcessException)95 IOException (java.io.IOException)94 HashMap (java.util.HashMap)51 Map (java.util.Map)47 InputStream (java.io.InputStream)46 ArrayList (java.util.ArrayList)44 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)40 HashSet (java.util.HashSet)33 ProcessSession (org.apache.nifi.processor.ProcessSession)32 List (java.util.List)28 ProcessContext (org.apache.nifi.processor.ProcessContext)28 Relationship (org.apache.nifi.processor.Relationship)28 StopWatch (org.apache.nifi.util.StopWatch)28 OutputStream (java.io.OutputStream)27 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)27 Set (java.util.Set)23 Collections (java.util.Collections)21 AtomicReference (java.util.concurrent.atomic.AtomicReference)21