Search in sources :

Example 86 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class AbstractDatabaseFetchProcessor method setup.

public void setup(final ProcessContext context, boolean shouldCleanCache, FlowFile flowFile) {
    synchronized (setupComplete) {
        setupComplete.set(false);
        final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES).evaluateAttributeExpressions(flowFile).getValue();
        // If there are no max-value column names specified, we don't need to perform this processing
        if (StringUtils.isEmpty(maxValueColumnNames)) {
            setupComplete.set(true);
            return;
        }
        // Try to fill the columnTypeMap with the types of the desired max-value columns
        final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
        final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
        final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue());
        try (final Connection con = dbcpService.getConnection();
            final Statement st = con.createStatement()) {
            // Try a query that returns no rows, for the purposes of getting metadata about the columns. It is possible
            // to use DatabaseMetaData.getColumns(), but not all drivers support this, notably the schema-on-read
            // approach as in Apache Drill
            String query = dbAdapter.getSelectStatement(tableName, maxValueColumnNames, "1 = 0", null, null, null);
            ResultSet resultSet = st.executeQuery(query);
            ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
            int numCols = resultSetMetaData.getColumnCount();
            if (numCols > 0) {
                if (shouldCleanCache) {
                    columnTypeMap.clear();
                }
                for (int i = 1; i <= numCols; i++) {
                    String colName = resultSetMetaData.getColumnName(i).toLowerCase();
                    String colKey = getStateKey(tableName, colName);
                    int colType = resultSetMetaData.getColumnType(i);
                    columnTypeMap.putIfAbsent(colKey, colType);
                }
            } else {
                throw new ProcessException("No columns found in table from those specified: " + maxValueColumnNames);
            }
        } catch (SQLException e) {
            throw new ProcessException("Unable to communicate with database in order to determine column types", e);
        }
        setupComplete.set(true);
    }
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) ProcessException(org.apache.nifi.processor.exception.ProcessException) SQLException(java.sql.SQLException) Statement(java.sql.Statement) DBCPService(org.apache.nifi.dbcp.DBCPService) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) DatabaseAdapter(org.apache.nifi.processors.standard.db.DatabaseAdapter)

Example 87 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class AbstractRecordProcessor method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final Map<String, String> attributes = new HashMap<>();
    final AtomicInteger recordCount = new AtomicInteger();
    final FlowFile original = flowFile;
    final Map<String, String> originalAttributes = flowFile.getAttributes();
    try {
        flowFile = session.write(flowFile, new StreamCallback() {

            @Override
            public void process(final InputStream in, final OutputStream out) throws IOException {
                try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
                    final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
                    try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) {
                        writer.beginRecordSet();
                        Record record;
                        while ((record = reader.nextRecord()) != null) {
                            final Record processed = AbstractRecordProcessor.this.process(record, writeSchema, original, context);
                            writer.write(processed);
                        }
                        final WriteResult writeResult = writer.finishRecordSet();
                        attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
                        attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
                        attributes.putAll(writeResult.getAttributes());
                        recordCount.set(writeResult.getRecordCount());
                    }
                } catch (final SchemaNotFoundException e) {
                    throw new ProcessException(e.getLocalizedMessage(), e);
                } catch (final MalformedRecordException e) {
                    throw new ProcessException("Could not parse incoming data", e);
                }
            }
        });
    } catch (final Exception e) {
        getLogger().error("Failed to process {}; will route to failure", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    flowFile = session.putAllAttributes(flowFile, attributes);
    session.transfer(flowFile, REL_SUCCESS);
    final int count = recordCount.get();
    session.adjustCounter("Records Processed", count, false);
    getLogger().info("Successfully converted {} records for {}", new Object[] { count, flowFile });
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) RecordReader(org.apache.nifi.serialization.RecordReader) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) StreamCallback(org.apache.nifi.processor.io.StreamCallback) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) ProcessException(org.apache.nifi.processor.exception.ProcessException) WriteResult(org.apache.nifi.serialization.WriteResult) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Record(org.apache.nifi.serialization.record.Record) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema)

Example 88 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class AbstractRouteRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final T flowFileContext;
    try {
        flowFileContext = getFlowFileContext(flowFile, context);
    } catch (final Exception e) {
        getLogger().error("Failed to process {}; routing to failure", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final AtomicInteger numRecords = new AtomicInteger(0);
    final Map<Relationship, Tuple<FlowFile, RecordSetWriter>> writers = new HashMap<>();
    final FlowFile original = flowFile;
    final Map<String, String> originalAttributes = original.getAttributes();
    try {
        session.read(flowFile, new InputStreamCallback() {

            @Override
            public void process(final InputStream in) throws IOException {
                try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
                    final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
                    Record record;
                    while ((record = reader.nextRecord()) != null) {
                        final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext);
                        numRecords.incrementAndGet();
                        for (final Relationship relationship : relationships) {
                            final RecordSetWriter recordSetWriter;
                            Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship);
                            if (tuple == null) {
                                FlowFile outFlowFile = session.create(original);
                                final OutputStream out = session.write(outFlowFile);
                                recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out);
                                recordSetWriter.beginRecordSet();
                                tuple = new Tuple<>(outFlowFile, recordSetWriter);
                                writers.put(relationship, tuple);
                            } else {
                                recordSetWriter = tuple.getValue();
                            }
                            recordSetWriter.write(record);
                        }
                    }
                } catch (final SchemaNotFoundException | MalformedRecordException e) {
                    throw new ProcessException("Could not parse incoming data", e);
                }
            }
        });
        for (final Map.Entry<Relationship, Tuple<FlowFile, RecordSetWriter>> entry : writers.entrySet()) {
            final Relationship relationship = entry.getKey();
            final Tuple<FlowFile, RecordSetWriter> tuple = entry.getValue();
            final RecordSetWriter writer = tuple.getValue();
            FlowFile childFlowFile = tuple.getKey();
            final WriteResult writeResult = writer.finishRecordSet();
            try {
                writer.close();
            } catch (final IOException ioe) {
                getLogger().warn("Failed to close Writer for {}", new Object[] { childFlowFile });
            }
            final Map<String, String> attributes = new HashMap<>();
            attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
            attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
            attributes.putAll(writeResult.getAttributes());
            childFlowFile = session.putAllAttributes(childFlowFile, attributes);
            session.transfer(childFlowFile, relationship);
            session.adjustCounter("Records Processed", writeResult.getRecordCount(), false);
            session.adjustCounter("Records Routed to " + relationship.getName(), writeResult.getRecordCount(), false);
            session.getProvenanceReporter().route(childFlowFile, relationship);
        }
    } catch (final Exception e) {
        getLogger().error("Failed to process {}", new Object[] { flowFile, e });
        for (final Tuple<FlowFile, RecordSetWriter> tuple : writers.values()) {
            try {
                tuple.getValue().close();
            } catch (final Exception e1) {
                getLogger().warn("Failed to close Writer for {}; some resources may not be cleaned up appropriately", new Object[] { tuple.getKey() });
            }
            session.remove(tuple.getKey());
        }
        session.transfer(flowFile, REL_FAILURE);
        return;
    } finally {
        for (final Tuple<FlowFile, RecordSetWriter> tuple : writers.values()) {
            final RecordSetWriter writer = tuple.getValue();
            try {
                writer.close();
            } catch (final Exception e) {
                getLogger().warn("Failed to close Record Writer for {}; some resources may not be properly cleaned up", new Object[] { tuple.getKey(), e });
            }
        }
    }
    if (isRouteOriginal()) {
        flowFile = session.putAttribute(flowFile, "record.count", String.valueOf(numRecords));
        session.transfer(flowFile, REL_ORIGINAL);
    } else {
        session.remove(flowFile);
    }
    getLogger().info("Successfully processed {}, creating {} derivative FlowFiles and processing {} records", new Object[] { flowFile, writers.size(), numRecords });
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) HashMap(java.util.HashMap) RecordReader(org.apache.nifi.serialization.RecordReader) OutputStream(java.io.OutputStream) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) Record(org.apache.nifi.serialization.record.Record) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) WriteResult(org.apache.nifi.serialization.WriteResult) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Relationship(org.apache.nifi.processor.Relationship) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) HashMap(java.util.HashMap) Map(java.util.Map) Tuple(org.apache.nifi.util.Tuple)

Example 89 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class Base64EncodeContent method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final ComponentLog logger = getLogger();
    boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
    try {
        final StopWatch stopWatch = new StopWatch(true);
        if (encode) {
            flowFile = session.write(flowFile, new StreamCallback() {

                @Override
                public void process(InputStream in, OutputStream out) throws IOException {
                    try (Base64OutputStream bos = new Base64OutputStream(out)) {
                        int len = -1;
                        byte[] buf = new byte[8192];
                        while ((len = in.read(buf)) > 0) {
                            bos.write(buf, 0, len);
                        }
                        bos.flush();
                    }
                }
            });
        } else {
            flowFile = session.write(flowFile, new StreamCallback() {

                @Override
                public void process(InputStream in, OutputStream out) throws IOException {
                    try (Base64InputStream bis = new Base64InputStream(new ValidatingBase64InputStream(in))) {
                        int len = -1;
                        byte[] buf = new byte[8192];
                        while ((len = bis.read(buf)) > 0) {
                            out.write(buf, 0, len);
                        }
                        out.flush();
                    }
                }
            });
        }
        logger.info("Successfully {} {}", new Object[] { encode ? "encoded" : "decoded", flowFile });
        session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
        session.transfer(flowFile, REL_SUCCESS);
    } catch (ProcessException e) {
        logger.error("Failed to {} {} due to {}", new Object[] { encode ? "encode" : "decode", flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ValidatingBase64InputStream(org.apache.nifi.processors.standard.util.ValidatingBase64InputStream) Base64InputStream(org.apache.commons.codec.binary.Base64InputStream) InputStream(java.io.InputStream) Base64OutputStream(org.apache.commons.codec.binary.Base64OutputStream) OutputStream(java.io.OutputStream) ValidatingBase64InputStream(org.apache.nifi.processors.standard.util.ValidatingBase64InputStream) Base64OutputStream(org.apache.commons.codec.binary.Base64OutputStream) ComponentLog(org.apache.nifi.logging.ComponentLog) StreamCallback(org.apache.nifi.processor.io.StreamCallback) StopWatch(org.apache.nifi.util.StopWatch) ProcessException(org.apache.nifi.processor.exception.ProcessException) ValidatingBase64InputStream(org.apache.nifi.processors.standard.util.ValidatingBase64InputStream) Base64InputStream(org.apache.commons.codec.binary.Base64InputStream)

Example 90 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class ConvertJSONToSQL method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final boolean translateFieldNames = context.getProperty(TRANSLATE_FIELD_NAMES).asBoolean();
    final boolean ignoreUnmappedFields = IGNORE_UNMATCHED_FIELD.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_FIELD_BEHAVIOR).getValue());
    final String statementType = context.getProperty(STATEMENT_TYPE).getValue();
    final String updateKeys = context.getProperty(UPDATE_KEY).evaluateAttributeExpressions(flowFile).getValue();
    final String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions(flowFile).getValue();
    final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue();
    final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
    final SchemaKey schemaKey = new SchemaKey(catalog, tableName);
    final boolean includePrimaryKeys = UPDATE_TYPE.equals(statementType) && updateKeys == null;
    // Is the unmatched column behaviour fail or warning?
    final boolean failUnmappedColumns = FAIL_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
    final boolean warningUnmappedColumns = WARNING_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
    // Escape column names?
    final boolean escapeColumnNames = context.getProperty(QUOTED_IDENTIFIERS).asBoolean();
    // Quote table name?
    final boolean quoteTableName = context.getProperty(QUOTED_TABLE_IDENTIFIER).asBoolean();
    // Attribute prefix
    final String attributePrefix = context.getProperty(SQL_PARAM_ATTR_PREFIX).evaluateAttributeExpressions(flowFile).getValue();
    // get the database schema from the cache, if one exists. We do this in a synchronized block, rather than
    // using a ConcurrentMap because the Map that we are using is a LinkedHashMap with a capacity such that if
    // the Map grows beyond this capacity, old elements are evicted. We do this in order to avoid filling the
    // Java Heap if there are a lot of different SQL statements being generated that reference different tables.
    TableSchema schema;
    synchronized (this) {
        schema = schemaCache.get(schemaKey);
        if (schema == null) {
            // No schema exists for this table yet. Query the database to determine the schema and put it into the cache.
            final DBCPService dbcpService = context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class);
            try (final Connection conn = dbcpService.getConnection()) {
                schema = TableSchema.from(conn, catalog, schemaName, tableName, translateFieldNames, includePrimaryKeys);
                schemaCache.put(schemaKey, schema);
            } catch (final SQLException e) {
                getLogger().error("Failed to convert {} into a SQL statement due to {}; routing to failure", new Object[] { flowFile, e.toString() }, e);
                session.transfer(flowFile, REL_FAILURE);
                return;
            }
        }
    }
    // Parse the JSON document
    final ObjectMapper mapper = new ObjectMapper();
    final AtomicReference<JsonNode> rootNodeRef = new AtomicReference<>(null);
    try {
        session.read(flowFile, new InputStreamCallback() {

            @Override
            public void process(final InputStream in) throws IOException {
                try (final InputStream bufferedIn = new BufferedInputStream(in)) {
                    rootNodeRef.set(mapper.readTree(bufferedIn));
                }
            }
        });
    } catch (final ProcessException pe) {
        getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[] { flowFile, pe.toString() }, pe);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final JsonNode rootNode = rootNodeRef.get();
    // The node may or may not be a Json Array. If it isn't, we will create an
    // ArrayNode and add just the root node to it. We do this so that we can easily iterate
    // over the array node, rather than duplicating the logic or creating another function that takes many variables
    // in order to implement the logic.
    final ArrayNode arrayNode;
    if (rootNode.isArray()) {
        arrayNode = (ArrayNode) rootNode;
    } else {
        final JsonNodeFactory nodeFactory = JsonNodeFactory.instance;
        arrayNode = new ArrayNode(nodeFactory);
        arrayNode.add(rootNode);
    }
    final String fragmentIdentifier = UUID.randomUUID().toString();
    final Set<FlowFile> created = new HashSet<>();
    for (int i = 0; i < arrayNode.size(); i++) {
        final JsonNode jsonNode = arrayNode.get(i);
        final String sql;
        final Map<String, String> attributes = new HashMap<>();
        try {
            // build the fully qualified table name
            final StringBuilder tableNameBuilder = new StringBuilder();
            if (catalog != null) {
                tableNameBuilder.append(catalog).append(".");
            }
            if (schemaName != null) {
                tableNameBuilder.append(schemaName).append(".");
            }
            tableNameBuilder.append(tableName);
            final String fqTableName = tableNameBuilder.toString();
            if (INSERT_TYPE.equals(statementType)) {
                sql = generateInsert(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
            } else if (UPDATE_TYPE.equals(statementType)) {
                sql = generateUpdate(jsonNode, attributes, fqTableName, updateKeys, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
            } else {
                sql = generateDelete(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
            }
        } catch (final ProcessException pe) {
            getLogger().error("Failed to convert {} to a SQL {} statement due to {}; routing to failure", new Object[] { flowFile, statementType, pe.toString() }, pe);
            session.remove(created);
            session.transfer(flowFile, REL_FAILURE);
            return;
        }
        FlowFile sqlFlowFile = session.create(flowFile);
        created.add(sqlFlowFile);
        sqlFlowFile = session.write(sqlFlowFile, new OutputStreamCallback() {

            @Override
            public void process(final OutputStream out) throws IOException {
                out.write(sql.getBytes(StandardCharsets.UTF_8));
            }
        });
        attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
        attributes.put(attributePrefix + ".table", tableName);
        attributes.put(FRAGMENT_ID.key(), fragmentIdentifier);
        attributes.put(FRAGMENT_COUNT.key(), String.valueOf(arrayNode.size()));
        attributes.put(FRAGMENT_INDEX.key(), String.valueOf(i));
        if (catalog != null) {
            attributes.put(attributePrefix + ".catalog", catalog);
        }
        sqlFlowFile = session.putAllAttributes(sqlFlowFile, attributes);
        session.transfer(sqlFlowFile, REL_SQL);
    }
    flowFile = copyAttributesToOriginal(session, flowFile, fragmentIdentifier, arrayNode.size());
    session.transfer(flowFile, REL_ORIGINAL);
}
Also used : SQLException(java.sql.SQLException) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) OutputStream(java.io.OutputStream) JsonNode(org.codehaus.jackson.JsonNode) BufferedInputStream(java.io.BufferedInputStream) ArrayNode(org.codehaus.jackson.node.ArrayNode) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) HashSet(java.util.HashSet) FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) Connection(java.sql.Connection) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) JsonNodeFactory(org.codehaus.jackson.node.JsonNodeFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) DBCPService(org.apache.nifi.dbcp.DBCPService) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback)

Aggregations

ProcessException (org.apache.nifi.processor.exception.ProcessException)274 FlowFile (org.apache.nifi.flowfile.FlowFile)169 IOException (java.io.IOException)162 InputStream (java.io.InputStream)79 HashMap (java.util.HashMap)78 ComponentLog (org.apache.nifi.logging.ComponentLog)78 OutputStream (java.io.OutputStream)62 ArrayList (java.util.ArrayList)55 Map (java.util.Map)52 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)39 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)38 StopWatch (org.apache.nifi.util.StopWatch)37 HashSet (java.util.HashSet)36 ProcessSession (org.apache.nifi.processor.ProcessSession)35 Relationship (org.apache.nifi.processor.Relationship)33 List (java.util.List)31 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)29 AtomicReference (java.util.concurrent.atomic.AtomicReference)28 Set (java.util.Set)26 ProcessContext (org.apache.nifi.processor.ProcessContext)25