Search in sources :

Example 6 with RecordSet

use of org.apache.nifi.serialization.record.RecordSet in project nifi by apache.

the class TestWriteCSVResult method testDataTypes.

@Test
public void testDataTypes() throws IOException {
    final CSVFormat csvFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withRecordSeparator("\n");
    final StringBuilder headerBuilder = new StringBuilder();
    final List<RecordField> fields = new ArrayList<>();
    for (final RecordFieldType fieldType : RecordFieldType.values()) {
        if (fieldType == RecordFieldType.CHOICE) {
            final List<DataType> possibleTypes = new ArrayList<>();
            possibleTypes.add(RecordFieldType.INT.getDataType());
            possibleTypes.add(RecordFieldType.LONG.getDataType());
            fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getChoiceDataType(possibleTypes)));
        } else {
            fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getDataType()));
        }
        headerBuilder.append('"').append(fieldType.name().toLowerCase()).append('"').append(",");
    }
    final RecordSchema schema = new SimpleRecordSchema(fields);
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final long now = System.currentTimeMillis();
    try (final WriteCSVResult result = new WriteCSVResult(csvFormat, schema, new SchemaNameAsAttribute(), baos, RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), true, "UTF-8")) {
        final Map<String, Object> valueMap = new HashMap<>();
        valueMap.put("string", "a孟bc李12儒3");
        valueMap.put("boolean", true);
        valueMap.put("byte", (byte) 1);
        valueMap.put("char", 'c');
        valueMap.put("short", (short) 8);
        valueMap.put("int", 9);
        valueMap.put("bigint", BigInteger.valueOf(8L));
        valueMap.put("long", 8L);
        valueMap.put("float", 8.0F);
        valueMap.put("double", 8.0D);
        valueMap.put("date", new Date(now));
        valueMap.put("time", new Time(now));
        valueMap.put("timestamp", new Timestamp(now));
        valueMap.put("record", null);
        valueMap.put("choice", 48L);
        valueMap.put("array", null);
        final Record record = new MapRecord(schema, valueMap);
        final RecordSet rs = RecordSet.of(schema, record);
        result.write(rs);
    }
    final String output = new String(baos.toByteArray(), StandardCharsets.UTF_8);
    headerBuilder.deleteCharAt(headerBuilder.length() - 1);
    final String headerLine = headerBuilder.toString();
    final String[] splits = output.split("\n");
    assertEquals(2, splits.length);
    assertEquals(headerLine, splits[0]);
    final String values = splits[1];
    final StringBuilder expectedBuilder = new StringBuilder();
    expectedBuilder.append("\"a孟bc李12儒3\",\"true\",\"1\",\"c\",\"8\",\"9\",\"8\",\"8\",\"8.0\",\"8.0\",");
    final String dateValue = getDateFormat(RecordFieldType.DATE.getDefaultFormat()).format(now);
    final String timeValue = getDateFormat(RecordFieldType.TIME.getDefaultFormat()).format(now);
    final String timestampValue = getDateFormat(RecordFieldType.TIMESTAMP.getDefaultFormat()).format(now);
    expectedBuilder.append('"').append(dateValue).append('"').append(',');
    expectedBuilder.append('"').append(timeValue).append('"').append(',');
    expectedBuilder.append('"').append(timestampValue).append('"').append(',');
    expectedBuilder.append(",\"48\",,");
    final String expectedValues = expectedBuilder.toString();
    assertEquals(expectedValues, values);
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) Time(java.sql.Time) Timestamp(java.sql.Timestamp) DataType(org.apache.nifi.serialization.record.DataType) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordSet(org.apache.nifi.serialization.record.RecordSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) SchemaNameAsAttribute(org.apache.nifi.schema.access.SchemaNameAsAttribute) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Date(java.sql.Date) CSVFormat(org.apache.commons.csv.CSVFormat) RecordFieldType(org.apache.nifi.serialization.record.RecordFieldType) Test(org.junit.Test)

Example 7 with RecordSet

use of org.apache.nifi.serialization.record.RecordSet in project nifi by apache.

the class PublishKafkaRecord_0_10 method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(1, DataUnit.MB, 500));
    if (flowFiles.isEmpty()) {
        return;
    }
    final PublisherPool pool = getPublisherPool(context);
    if (pool == null) {
        context.yield();
        return;
    }
    final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
    final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final long startTime = System.nanoTime();
    try (final PublisherLease lease = pool.obtainPublisher()) {
        // Send each FlowFile to Kafka asynchronously.
        for (final FlowFile flowFile : flowFiles) {
            if (!isScheduled()) {
                // If stopped, re-queue FlowFile instead of sending it
                session.transfer(flowFile);
                continue;
            }
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(flowFile).getValue();
            final String messageKeyField = context.getProperty(MESSAGE_KEY_FIELD).evaluateAttributeExpressions(flowFile).getValue();
            final Map<String, String> attributes = flowFile.getAttributes();
            try {
                session.read(flowFile, new InputStreamCallback() {

                    @Override
                    public void process(final InputStream rawIn) throws IOException {
                        try (final InputStream in = new BufferedInputStream(rawIn)) {
                            final RecordReader reader = readerFactory.createRecordReader(attributes, in, getLogger());
                            final RecordSet recordSet = reader.createRecordSet();
                            final RecordSchema schema = writerFactory.getSchema(attributes, recordSet.getSchema());
                            lease.publish(flowFile, recordSet, writerFactory, schema, messageKeyField, topic);
                        } catch (final SchemaNotFoundException | MalformedRecordException e) {
                            throw new ProcessException(e);
                        }
                    }
                });
            } catch (final Exception e) {
                // The FlowFile will be obtained and the error logged below, when calling publishResult.getFailedFlowFiles()
                lease.getTracker().fail(flowFile, e);
                continue;
            }
        }
        // Complete the send
        final PublishResult publishResult = lease.complete();
        // Transfer any successful FlowFiles.
        final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
        for (FlowFile success : publishResult.getSuccessfulFlowFiles()) {
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(success).getValue();
            final int msgCount = publishResult.getSuccessfulMessageCount(success);
            success = session.putAttribute(success, MSG_COUNT, String.valueOf(msgCount));
            session.adjustCounter("Messages Sent", msgCount, true);
            final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, topic);
            session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis);
            session.transfer(success, REL_SUCCESS);
        }
        // Transfer any failures.
        for (final FlowFile failure : publishResult.getFailedFlowFiles()) {
            final int successCount = publishResult.getSuccessfulMessageCount(failure);
            if (successCount > 0) {
                getLogger().error("Failed to send some messages for {} to Kafka, but {} messages were acknowledged by Kafka. Routing to failure due to {}", new Object[] { failure, successCount, publishResult.getReasonForFailure(failure) });
            } else {
                getLogger().error("Failed to send all message for {} to Kafka; routing to failure due to {}", new Object[] { failure, publishResult.getReasonForFailure(failure) });
            }
            session.transfer(failure, REL_FAILURE);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) BufferedInputStream(java.io.BufferedInputStream) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) RecordSet(org.apache.nifi.serialization.record.RecordSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema)

Example 8 with RecordSet

use of org.apache.nifi.serialization.record.RecordSet in project nifi by apache.

the class AbstractPutHDFSRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    // do this before getting a flow file so that we always get a chance to attempt Kerberos relogin
    final FileSystem fileSystem = getFileSystem();
    final Configuration configuration = getConfiguration();
    final UserGroupInformation ugi = getUserGroupInformation();
    if (configuration == null || fileSystem == null || ugi == null) {
        getLogger().error("Processor not configured properly because Configuration, FileSystem, or UserGroupInformation was null");
        context.yield();
        return;
    }
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        context.yield();
        return;
    }
    ugi.doAs((PrivilegedAction<Object>) () -> {
        Path tempDotCopyFile = null;
        FlowFile putFlowFile = flowFile;
        try {
            // TODO codec extension
            final String filenameValue = putFlowFile.getAttribute(CoreAttributes.FILENAME.key());
            final String directoryValue = context.getProperty(DIRECTORY).evaluateAttributeExpressions(putFlowFile).getValue();
            // create the directory if it doesn't exist
            final Path directoryPath = new Path(directoryValue);
            createDirectory(fileSystem, directoryPath, remoteOwner, remoteGroup);
            // write to tempFile first and on success rename to destFile
            final Path tempFile = new Path(directoryPath, "." + filenameValue);
            final Path destFile = new Path(directoryPath, filenameValue);
            final boolean destinationExists = fileSystem.exists(destFile) || fileSystem.exists(tempFile);
            final boolean shouldOverwrite = context.getProperty(OVERWRITE).asBoolean();
            // if the tempFile or destFile already exist, and overwrite is set to false, then transfer to failure
            if (destinationExists && !shouldOverwrite) {
                session.transfer(session.penalize(putFlowFile), REL_FAILURE);
                getLogger().warn("penalizing {} and routing to failure because file with same name already exists", new Object[] { putFlowFile });
                return null;
            }
            final AtomicReference<Throwable> exceptionHolder = new AtomicReference<>(null);
            final AtomicReference<WriteResult> writeResult = new AtomicReference<>();
            final RecordReaderFactory recordReaderFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
            final FlowFile flowFileIn = putFlowFile;
            final StopWatch stopWatch = new StopWatch(true);
            // Read records from the incoming FlowFile and write them the tempFile
            session.read(putFlowFile, (final InputStream rawIn) -> {
                RecordReader recordReader = null;
                HDFSRecordWriter recordWriter = null;
                try (final BufferedInputStream in = new BufferedInputStream(rawIn)) {
                    // handle this separately from the other IOExceptions which normally route to retry
                    try {
                        recordReader = recordReaderFactory.createRecordReader(flowFileIn, in, getLogger());
                    } catch (Exception e) {
                        final RecordReaderFactoryException rrfe = new RecordReaderFactoryException("Unable to create RecordReader", e);
                        exceptionHolder.set(rrfe);
                        return;
                    }
                    final RecordSet recordSet = recordReader.createRecordSet();
                    recordWriter = createHDFSRecordWriter(context, flowFile, configuration, tempFile, recordReader.getSchema());
                    writeResult.set(recordWriter.write(recordSet));
                } catch (Exception e) {
                    exceptionHolder.set(e);
                } finally {
                    IOUtils.closeQuietly(recordReader);
                    IOUtils.closeQuietly(recordWriter);
                }
            });
            stopWatch.stop();
            final String dataRate = stopWatch.calculateDataRate(putFlowFile.getSize());
            final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS);
            tempDotCopyFile = tempFile;
            // into one of the appropriate catch blocks below
            if (exceptionHolder.get() != null) {
                throw exceptionHolder.get();
            }
            // Attempt to rename from the tempFile to destFile, and change owner if successfully renamed
            rename(fileSystem, tempFile, destFile);
            changeOwner(fileSystem, destFile, remoteOwner, remoteGroup);
            getLogger().info("Wrote {} to {} in {} milliseconds at a rate of {}", new Object[] { putFlowFile, destFile, millis, dataRate });
            putFlowFile = postProcess(context, session, putFlowFile, destFile);
            final String newFilename = destFile.getName();
            final String hdfsPath = destFile.getParent().toString();
            // Update the filename and absolute path attributes
            final Map<String, String> attributes = new HashMap<>(writeResult.get().getAttributes());
            attributes.put(CoreAttributes.FILENAME.key(), newFilename);
            attributes.put(ABSOLUTE_HDFS_PATH_ATTRIBUTE, hdfsPath);
            attributes.put(RECORD_COUNT_ATTR, String.valueOf(writeResult.get().getRecordCount()));
            putFlowFile = session.putAllAttributes(putFlowFile, attributes);
            // Send a provenance event and transfer to success
            final Path qualifiedPath = destFile.makeQualified(fileSystem.getUri(), fileSystem.getWorkingDirectory());
            session.getProvenanceReporter().send(putFlowFile, qualifiedPath.toString());
            session.transfer(putFlowFile, REL_SUCCESS);
        } catch (IOException | FlowFileAccessException e) {
            deleteQuietly(fileSystem, tempDotCopyFile);
            getLogger().error("Failed to write due to {}", new Object[] { e });
            session.transfer(session.penalize(putFlowFile), REL_RETRY);
            context.yield();
        } catch (Throwable t) {
            deleteQuietly(fileSystem, tempDotCopyFile);
            getLogger().error("Failed to write due to {}", new Object[] { t });
            session.transfer(putFlowFile, REL_FAILURE);
        }
        return null;
    });
}
Also used : Path(org.apache.hadoop.fs.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) Configuration(org.apache.hadoop.conf.Configuration) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) HDFSRecordWriter(org.apache.nifi.processors.hadoop.record.HDFSRecordWriter) AtomicReference(java.util.concurrent.atomic.AtomicReference) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordReaderFactoryException(org.apache.nifi.processors.hadoop.exception.RecordReaderFactoryException) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) FailureException(org.apache.nifi.processors.hadoop.exception.FailureException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) StopWatch(org.apache.nifi.util.StopWatch) BufferedInputStream(java.io.BufferedInputStream) FileSystem(org.apache.hadoop.fs.FileSystem) RecordReaderFactoryException(org.apache.nifi.processors.hadoop.exception.RecordReaderFactoryException) RecordSet(org.apache.nifi.serialization.record.RecordSet) HashMap(java.util.HashMap) Map(java.util.Map) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 9 with RecordSet

use of org.apache.nifi.serialization.record.RecordSet in project nifi by apache.

the class PutParquetTest method testIOExceptionFromReaderShouldRouteToRetry.

@Test
public void testIOExceptionFromReaderShouldRouteToRetry() throws InitializationException, IOException, MalformedRecordException, SchemaNotFoundException {
    configure(proc, 10);
    final RecordSet recordSet = Mockito.mock(RecordSet.class);
    when(recordSet.next()).thenThrow(new IOException("ERROR"));
    final RecordReader recordReader = Mockito.mock(RecordReader.class);
    when(recordReader.createRecordSet()).thenReturn(recordSet);
    when(recordReader.getSchema()).thenReturn(AvroTypeUtil.createSchema(schema));
    final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
    when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
    when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenReturn(recordReader);
    testRunner.addControllerService("mock-reader-factory", readerFactory);
    testRunner.enableControllerService(readerFactory);
    testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
    final String filename = "testMalformedRecordExceptionShouldRouteToFailure-" + System.currentTimeMillis();
    final Map<String, String> flowFileAttributes = new HashMap<>();
    flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
    testRunner.enqueue("trigger", flowFileAttributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(PutParquet.REL_RETRY, 1);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) HashMap(java.util.HashMap) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) RecordSet(org.apache.nifi.serialization.record.RecordSet) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) Test(org.junit.Test)

Example 10 with RecordSet

use of org.apache.nifi.serialization.record.RecordSet in project nifi by apache.

the class AbstractKudu method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    try {
        if (flowFile == null)
            return;
        final Map<String, String> attributes = new HashMap<String, String>();
        final AtomicReference<Throwable> exceptionHolder = new AtomicReference<>(null);
        final RecordReaderFactory recordReaderFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
        final KuduSession kuduSession = this.getKuduSession(kuduClient);
        session.read(flowFile, (final InputStream rawIn) -> {
            RecordReader recordReader = null;
            try (final BufferedInputStream in = new BufferedInputStream(rawIn)) {
                try {
                    recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger());
                } catch (Exception ex) {
                    final RecordReaderFactoryException rrfe = new RecordReaderFactoryException("Unable to create RecordReader", ex);
                    exceptionHolder.set(rrfe);
                    return;
                }
                List<String> fieldNames = recordReader.getSchema().getFieldNames();
                final RecordSet recordSet = recordReader.createRecordSet();
                if (skipHeadLine)
                    recordSet.next();
                int numOfAddedRecord = 0;
                Record record = recordSet.next();
                while (record != null) {
                    org.apache.kudu.client.Operation oper = null;
                    if (operationType == OperationType.UPSERT) {
                        oper = upsertRecordToKudu(kuduTable, record, fieldNames);
                    } else {
                        oper = insertRecordToKudu(kuduTable, record, fieldNames);
                    }
                    kuduSession.apply(oper);
                    numOfAddedRecord++;
                    record = recordSet.next();
                }
                getLogger().info("KUDU: number of inserted records: " + numOfAddedRecord);
                attributes.put(RECORD_COUNT_ATTR, String.valueOf(numOfAddedRecord));
            } catch (KuduException ex) {
                getLogger().error("Exception occurred while interacting with Kudu due to " + ex.getMessage(), ex);
                exceptionHolder.set(ex);
            } catch (Exception e) {
                exceptionHolder.set(e);
            } finally {
                IOUtils.closeQuietly(recordReader);
            }
        });
        kuduSession.close();
        if (exceptionHolder.get() != null) {
            throw exceptionHolder.get();
        }
        // Update flow file's attributes after the ingestion
        session.putAllAttributes(flowFile, attributes);
        session.transfer(flowFile, REL_SUCCESS);
        session.getProvenanceReporter().send(flowFile, "Successfully added flowfile to kudu");
    } catch (IOException | FlowFileAccessException e) {
        getLogger().error("Failed to write due to {}", new Object[] { e });
        session.transfer(flowFile, REL_FAILURE);
    } catch (Throwable t) {
        getLogger().error("Failed to write due to {}", new Object[] { t });
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : KuduSession(org.apache.kudu.client.KuduSession) HashMap(java.util.HashMap) RecordReader(org.apache.nifi.serialization.RecordReader) KuduException(org.apache.kudu.client.KuduException) BufferedInputStream(java.io.BufferedInputStream) RecordReaderFactoryException(org.apache.nifi.processors.hadoop.exception.RecordReaderFactoryException) Record(org.apache.nifi.serialization.record.Record) RecordSet(org.apache.nifi.serialization.record.RecordSet) FlowFile(org.apache.nifi.flowfile.FlowFile) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) KuduException(org.apache.kudu.client.KuduException) ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordReaderFactoryException(org.apache.nifi.processors.hadoop.exception.RecordReaderFactoryException) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory)

Aggregations

RecordSet (org.apache.nifi.serialization.record.RecordSet)14 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)10 IOException (java.io.IOException)9 FlowFile (org.apache.nifi.flowfile.FlowFile)9 HashMap (java.util.HashMap)8 RecordReader (org.apache.nifi.serialization.RecordReader)8 Record (org.apache.nifi.serialization.record.Record)8 InputStream (java.io.InputStream)7 ProcessException (org.apache.nifi.processor.exception.ProcessException)7 RecordReaderFactory (org.apache.nifi.serialization.RecordReaderFactory)7 ArrayList (java.util.ArrayList)6 RecordSetWriterFactory (org.apache.nifi.serialization.RecordSetWriterFactory)6 Test (org.junit.Test)6 BufferedInputStream (java.io.BufferedInputStream)5 ComponentLog (org.apache.nifi.logging.ComponentLog)5 SchemaNotFoundException (org.apache.nifi.schema.access.SchemaNotFoundException)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 LinkedHashMap (java.util.LinkedHashMap)4 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)4 RecordSetWriter (org.apache.nifi.serialization.RecordSetWriter)4