Search in sources :

Example 66 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class JoltTransformJSON method onTrigger.

@Override
public void onTrigger(final ProcessContext context, ProcessSession session) throws ProcessException {
    final FlowFile original = session.get();
    if (original == null) {
        return;
    }
    final ComponentLog logger = getLogger();
    final StopWatch stopWatch = new StopWatch(true);
    final Object inputJson;
    try (final InputStream in = session.read(original)) {
        inputJson = JsonUtils.jsonToObject(in);
    } catch (final Exception e) {
        logger.error("Failed to transform {}; routing to failure", new Object[] { original, e });
        session.transfer(original, REL_FAILURE);
        return;
    }
    final String jsonString;
    final ClassLoader originalContextClassLoader = Thread.currentThread().getContextClassLoader();
    try {
        final JoltTransform transform = getTransform(context, original);
        if (customClassLoader != null) {
            Thread.currentThread().setContextClassLoader(customClassLoader);
        }
        final Object transformedJson = TransformUtils.transform(transform, inputJson);
        jsonString = JsonUtils.toJsonString(transformedJson);
    } catch (final Exception ex) {
        logger.error("Unable to transform {} due to {}", new Object[] { original, ex.toString(), ex });
        session.transfer(original, REL_FAILURE);
        return;
    } finally {
        if (customClassLoader != null && originalContextClassLoader != null) {
            Thread.currentThread().setContextClassLoader(originalContextClassLoader);
        }
    }
    FlowFile transformed = session.write(original, new OutputStreamCallback() {

        @Override
        public void process(OutputStream out) throws IOException {
            out.write(jsonString.getBytes(DEFAULT_CHARSET));
        }
    });
    final String transformType = context.getProperty(JOLT_TRANSFORM).getValue();
    transformed = session.putAttribute(transformed, CoreAttributes.MIME_TYPE.key(), "application/json");
    session.transfer(transformed, REL_SUCCESS);
    session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transformType, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
    logger.info("Transformed {}", new Object[] { original });
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) JoltTransform(com.bazaarvoice.jolt.JoltTransform) OutputStream(java.io.OutputStream) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) StopWatch(org.apache.nifi.util.StopWatch) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback)

Example 67 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class ListDatabaseTables method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
    final String catalog = context.getProperty(CATALOG).getValue();
    final String schemaPattern = context.getProperty(SCHEMA_PATTERN).getValue();
    final String tableNamePattern = context.getProperty(TABLE_NAME_PATTERN).getValue();
    final String[] tableTypes = context.getProperty(TABLE_TYPES).isSet() ? context.getProperty(TABLE_TYPES).getValue().split("\\s*,\\s*") : null;
    final boolean includeCount = context.getProperty(INCLUDE_COUNT).asBoolean();
    final long refreshInterval = context.getProperty(REFRESH_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
    final StateManager stateManager = context.getStateManager();
    final StateMap stateMap;
    final Map<String, String> stateMapProperties;
    try {
        stateMap = stateManager.getState(Scope.CLUSTER);
        stateMapProperties = new HashMap<>(stateMap.toMap());
    } catch (IOException ioe) {
        throw new ProcessException(ioe);
    }
    try (final Connection con = dbcpService.getConnection()) {
        DatabaseMetaData dbMetaData = con.getMetaData();
        ResultSet rs = dbMetaData.getTables(catalog, schemaPattern, tableNamePattern, tableTypes);
        while (rs.next()) {
            final String tableCatalog = rs.getString(1);
            final String tableSchema = rs.getString(2);
            final String tableName = rs.getString(3);
            final String tableType = rs.getString(4);
            final String tableRemarks = rs.getString(5);
            // Build fully-qualified name
            String fqn = Stream.of(tableCatalog, tableSchema, tableName).filter(segment -> !StringUtils.isEmpty(segment)).collect(Collectors.joining("."));
            String lastTimestampForTable = stateMapProperties.get(fqn);
            boolean refreshTable = true;
            try {
                // Refresh state if the interval has elapsed
                long lastRefreshed = -1;
                final long currentTime = System.currentTimeMillis();
                if (!StringUtils.isEmpty(lastTimestampForTable)) {
                    lastRefreshed = Long.parseLong(lastTimestampForTable);
                }
                if (lastRefreshed == -1 || (refreshInterval > 0 && currentTime >= (lastRefreshed + refreshInterval))) {
                    stateMapProperties.remove(lastTimestampForTable);
                } else {
                    refreshTable = false;
                }
            } catch (final NumberFormatException nfe) {
                getLogger().error("Failed to retrieve observed last table fetches from the State Manager. Will not perform " + "query until this is accomplished.", nfe);
                context.yield();
                return;
            }
            if (refreshTable) {
                FlowFile flowFile = session.create();
                logger.info("Found {}: {}", new Object[] { tableType, fqn });
                if (includeCount) {
                    try (Statement st = con.createStatement()) {
                        final String countQuery = "SELECT COUNT(1) FROM " + fqn;
                        logger.debug("Executing query: {}", new Object[] { countQuery });
                        ResultSet countResult = st.executeQuery(countQuery);
                        if (countResult.next()) {
                            flowFile = session.putAttribute(flowFile, DB_TABLE_COUNT, Long.toString(countResult.getLong(1)));
                        }
                    } catch (SQLException se) {
                        logger.error("Couldn't get row count for {}", new Object[] { fqn });
                        session.remove(flowFile);
                        continue;
                    }
                }
                if (tableCatalog != null) {
                    flowFile = session.putAttribute(flowFile, DB_TABLE_CATALOG, tableCatalog);
                }
                if (tableSchema != null) {
                    flowFile = session.putAttribute(flowFile, DB_TABLE_SCHEMA, tableSchema);
                }
                flowFile = session.putAttribute(flowFile, DB_TABLE_NAME, tableName);
                flowFile = session.putAttribute(flowFile, DB_TABLE_FULLNAME, fqn);
                flowFile = session.putAttribute(flowFile, DB_TABLE_TYPE, tableType);
                if (tableRemarks != null) {
                    flowFile = session.putAttribute(flowFile, DB_TABLE_REMARKS, tableRemarks);
                }
                String transitUri;
                try {
                    transitUri = dbMetaData.getURL();
                } catch (SQLException sqle) {
                    transitUri = "<unknown>";
                }
                session.getProvenanceReporter().receive(flowFile, transitUri);
                session.transfer(flowFile, REL_SUCCESS);
                stateMapProperties.put(fqn, Long.toString(System.currentTimeMillis()));
            }
        }
        // Update the timestamps for listed tables
        if (stateMap.getVersion() == -1) {
            stateManager.setState(stateMapProperties, Scope.CLUSTER);
        } else {
            stateManager.replace(stateMap, stateMapProperties, Scope.CLUSTER);
        }
    } catch (final SQLException | IOException e) {
        throw new ProcessException(e);
    }
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) Connection(java.sql.Connection) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) DatabaseMetaData(java.sql.DatabaseMetaData) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) SQLException(java.sql.SQLException) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Scope(org.apache.nifi.components.state.Scope) Relationship(org.apache.nifi.processor.Relationship) ResultSet(java.sql.ResultSet) Map(java.util.Map) TriggerSerially(org.apache.nifi.annotation.behavior.TriggerSerially) Validator(org.apache.nifi.components.Validator) FlowFile(org.apache.nifi.flowfile.FlowFile) StateManager(org.apache.nifi.components.state.StateManager) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) StringUtils(org.apache.nifi.util.StringUtils) Collectors(java.util.stream.Collectors) StateMap(org.apache.nifi.components.state.StateMap) TimeUnit(java.util.concurrent.TimeUnit) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) Stateful(org.apache.nifi.annotation.behavior.Stateful) List(java.util.List) Stream(java.util.stream.Stream) Statement(java.sql.Statement) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) DBCPService(org.apache.nifi.dbcp.DBCPService) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) SQLException(java.sql.SQLException) Statement(java.sql.Statement) StateMap(org.apache.nifi.components.state.StateMap) Connection(java.sql.Connection) IOException(java.io.IOException) DatabaseMetaData(java.sql.DatabaseMetaData) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) StateManager(org.apache.nifi.components.state.StateManager) DBCPService(org.apache.nifi.dbcp.DBCPService) ResultSet(java.sql.ResultSet)

Example 68 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class ListenSyslog method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    // poll the queue with a small timeout to avoid unnecessarily yielding below
    RawSyslogEvent rawSyslogEvent = getMessage(true, true, session);
    // throttling even when no data is available
    if (rawSyslogEvent == null) {
        return;
    }
    final int maxBatchSize = context.getProperty(MAX_BATCH_SIZE).asInteger();
    final String port = context.getProperty(PORT).evaluateAttributeExpressions().getValue();
    final String protocol = context.getProperty(PROTOCOL).getValue();
    final Map<String, String> defaultAttributes = new HashMap<>(4);
    defaultAttributes.put(SyslogAttributes.PROTOCOL.key(), protocol);
    defaultAttributes.put(SyslogAttributes.PORT.key(), port);
    defaultAttributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
    final int numAttributes = SyslogAttributes.values().length + 2;
    final boolean shouldParse = context.getProperty(PARSE_MESSAGES).asBoolean();
    final Map<String, FlowFile> flowFilePerSender = new HashMap<>();
    final SyslogParser parser = getParser();
    for (int i = 0; i < maxBatchSize; i++) {
        SyslogEvent event = null;
        // If this is our first iteration, we have already polled our queues. Otherwise, poll on each iteration.
        if (i > 0) {
            rawSyslogEvent = getMessage(true, false, session);
            if (rawSyslogEvent == null) {
                break;
            }
        }
        final String sender = rawSyslogEvent.getSender();
        FlowFile flowFile = flowFilePerSender.computeIfAbsent(sender, k -> session.create());
        if (shouldParse) {
            boolean valid = true;
            try {
                event = parser.parseEvent(rawSyslogEvent.getData(), sender);
            } catch (final ProcessException pe) {
                getLogger().warn("Failed to parse Syslog event; routing to invalid");
                valid = false;
            }
            // because the 'flowFile' object may already have data written to it.
            if (!valid || event == null || !event.isValid()) {
                FlowFile invalidFlowFile = session.create();
                invalidFlowFile = session.putAllAttributes(invalidFlowFile, defaultAttributes);
                if (sender != null) {
                    invalidFlowFile = session.putAttribute(invalidFlowFile, SyslogAttributes.SENDER.key(), sender);
                }
                try {
                    final byte[] rawBytes = rawSyslogEvent.getData();
                    invalidFlowFile = session.write(invalidFlowFile, new OutputStreamCallback() {

                        @Override
                        public void process(final OutputStream out) throws IOException {
                            out.write(rawBytes);
                        }
                    });
                } catch (final Exception e) {
                    getLogger().error("Failed to write contents of Syslog message to FlowFile due to {}; will re-queue message and try again", e);
                    errorEvents.offer(rawSyslogEvent);
                    session.remove(invalidFlowFile);
                    break;
                }
                session.transfer(invalidFlowFile, REL_INVALID);
                break;
            }
            getLogger().trace(event.getFullMessage());
            final Map<String, String> attributes = new HashMap<>(numAttributes);
            attributes.put(SyslogAttributes.PRIORITY.key(), event.getPriority());
            attributes.put(SyslogAttributes.SEVERITY.key(), event.getSeverity());
            attributes.put(SyslogAttributes.FACILITY.key(), event.getFacility());
            attributes.put(SyslogAttributes.VERSION.key(), event.getVersion());
            attributes.put(SyslogAttributes.TIMESTAMP.key(), event.getTimeStamp());
            attributes.put(SyslogAttributes.HOSTNAME.key(), event.getHostName());
            attributes.put(SyslogAttributes.BODY.key(), event.getMsgBody());
            attributes.put(SyslogAttributes.VALID.key(), String.valueOf(event.isValid()));
            flowFile = session.putAllAttributes(flowFile, attributes);
        }
        // figure out if we should write the bytes from the raw event or parsed event
        final boolean writeDemarcator = (i > 0);
        try {
            // write the raw bytes of the message as the FlowFile content
            final byte[] rawMessage = (event == null) ? rawSyslogEvent.getData() : event.getRawMessage();
            flowFile = session.append(flowFile, new OutputStreamCallback() {

                @Override
                public void process(final OutputStream out) throws IOException {
                    if (writeDemarcator) {
                        out.write(messageDemarcatorBytes);
                    }
                    out.write(rawMessage);
                }
            });
        } catch (final Exception e) {
            getLogger().error("Failed to write contents of Syslog message to FlowFile due to {}; will re-queue message and try again", e);
            errorEvents.offer(rawSyslogEvent);
            break;
        }
        flowFilePerSender.put(sender, flowFile);
    }
    for (final Map.Entry<String, FlowFile> entry : flowFilePerSender.entrySet()) {
        final String sender = entry.getKey();
        FlowFile flowFile = entry.getValue();
        if (flowFile.getSize() == 0L) {
            session.remove(flowFile);
            getLogger().debug("No data written to FlowFile from Sender {}; removing FlowFile", new Object[] { sender });
            continue;
        }
        final Map<String, String> newAttributes = new HashMap<>(defaultAttributes.size() + 1);
        newAttributes.putAll(defaultAttributes);
        newAttributes.put(SyslogAttributes.SENDER.key(), sender);
        flowFile = session.putAllAttributes(flowFile, newAttributes);
        getLogger().debug("Transferring {} to success", new Object[] { flowFile });
        session.transfer(flowFile, REL_SUCCESS);
        session.adjustCounter("FlowFiles Transferred to Success", 1L, false);
        final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender;
        final String transitUri = new StringBuilder().append(protocol.toLowerCase()).append("://").append(senderHost).append(":").append(port).toString();
        session.getProvenanceReporter().receive(flowFile, transitUri);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) SyslogEvent(org.apache.nifi.processors.standard.syslog.SyslogEvent) ProcessException(org.apache.nifi.processor.exception.ProcessException) SyslogParser(org.apache.nifi.processors.standard.syslog.SyslogParser) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) Map(java.util.Map) HashMap(java.util.HashMap)

Example 69 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class LookupAttribute method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final LookupService lookupService = context.getProperty(LOOKUP_SERVICE).asControllerService(LookupService.class);
    final boolean includeEmptyValues = context.getProperty(INCLUDE_EMPTY_VALUES).asBoolean();
    for (FlowFile flowFile : session.get(50)) {
        try {
            onTrigger(logger, lookupService, includeEmptyValues, flowFile, session);
        } catch (final IOException e) {
            throw new ProcessException(e.getMessage(), e);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ProcessException(org.apache.nifi.processor.exception.ProcessException) StringLookupService(org.apache.nifi.lookup.StringLookupService) LookupService(org.apache.nifi.lookup.LookupService) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog)

Example 70 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class MergeRecord method binFlowFile.

private void binFlowFile(final ProcessContext context, final FlowFile flowFile, final ProcessSession session, final RecordBinManager binManager, final boolean block) {
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    try (final InputStream in = session.read(flowFile);
        final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {
        final RecordSchema schema = reader.getSchema();
        final String groupId = getGroupId(context, flowFile, schema, session);
        getLogger().debug("Got Group ID {} for {}", new Object[] { groupId, flowFile });
        binManager.add(groupId, flowFile, reader, session, block);
    } catch (MalformedRecordException | IOException | SchemaNotFoundException e) {
        throw new ProcessException(e);
    }
}
Also used : ProcessException(org.apache.nifi.processor.exception.ProcessException) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException)

Aggregations

ProcessException (org.apache.nifi.processor.exception.ProcessException)274 FlowFile (org.apache.nifi.flowfile.FlowFile)169 IOException (java.io.IOException)162 InputStream (java.io.InputStream)79 HashMap (java.util.HashMap)78 ComponentLog (org.apache.nifi.logging.ComponentLog)78 OutputStream (java.io.OutputStream)62 ArrayList (java.util.ArrayList)55 Map (java.util.Map)52 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)39 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)38 StopWatch (org.apache.nifi.util.StopWatch)37 HashSet (java.util.HashSet)36 ProcessSession (org.apache.nifi.processor.ProcessSession)35 Relationship (org.apache.nifi.processor.Relationship)33 List (java.util.List)31 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)29 AtomicReference (java.util.concurrent.atomic.AtomicReference)28 Set (java.util.Set)26 ProcessContext (org.apache.nifi.processor.ProcessContext)25