Search in sources :

Example 26 with MalformedRecordException

use of org.apache.nifi.serialization.MalformedRecordException in project nifi by apache.

the class CommaSeparatedRecordReader method createRecordReader.

@Override
public RecordReader createRecordReader(Map<String, String> variables, final InputStream in, final ComponentLog logger) throws IOException, SchemaNotFoundException {
    final BufferedReader reader = new BufferedReader(new InputStreamReader(in));
    final List<RecordField> fields = new ArrayList<>();
    final String headerLine = reader.readLine();
    for (final String colName : headerLine.split(",")) {
        fields.add(new RecordField(colName.trim(), RecordFieldType.STRING.getDataType()));
    }
    return new RecordReader() {

        @Override
        public void close() throws IOException {
            reader.close();
        }

        @Override
        public Record nextRecord(final boolean coerceTypes, final boolean dropUnknown) throws IOException, MalformedRecordException {
            if (failAfterN > -1 && recordCount >= failAfterN) {
                throw new MalformedRecordException("Intentional Unit Test Exception because " + recordCount + " records have been read");
            }
            final String nextLine = reader.readLine();
            if (nextLine == null) {
                return null;
            }
            recordCount++;
            final String[] values = nextLine.split(",");
            final Map<String, Object> valueMap = new HashMap<>();
            int i = 0;
            for (final RecordField field : fields) {
                final String fieldName = field.getFieldName();
                valueMap.put(fieldName, values[i++].trim());
            }
            return new MapRecord(new SimpleRecordSchema(fields), valueMap);
        }

        @Override
        public RecordSchema getSchema() {
            return new SimpleRecordSchema(fields);
        }
    };
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) InputStreamReader(java.io.InputStreamReader) HashMap(java.util.HashMap) RecordReader(org.apache.nifi.serialization.RecordReader) ArrayList(java.util.ArrayList) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) BufferedReader(java.io.BufferedReader)

Example 27 with MalformedRecordException

use of org.apache.nifi.serialization.MalformedRecordException in project nifi by apache.

the class PutDatabaseRecord method executeSQL.

private void executeSQL(ProcessContext context, ProcessSession session, FlowFile flowFile, FunctionContext functionContext, RoutingResult result, Connection con, RecordReader recordParser) throws IllegalArgumentException, MalformedRecordException, IOException, SQLException {
    final RecordSchema recordSchema = recordParser.getSchema();
    // Find which field has the SQL statement in it
    final String sqlField = context.getProperty(FIELD_CONTAINING_SQL).evaluateAttributeExpressions(flowFile).getValue();
    if (StringUtils.isEmpty(sqlField)) {
        throw new IllegalArgumentException(format("SQL specified as Statement Type but no Field Containing SQL was found, FlowFile %s", flowFile));
    }
    boolean schemaHasSqlField = recordSchema.getFields().stream().anyMatch((field) -> sqlField.equals(field.getFieldName()));
    if (!schemaHasSqlField) {
        throw new IllegalArgumentException(format("Record schema does not contain Field Containing SQL: %s, FlowFile %s", sqlField, flowFile));
    }
    try (Statement s = con.createStatement()) {
        try {
            // timeout in seconds
            s.setQueryTimeout(functionContext.queryTimeout);
        } catch (SQLException se) {
            // If the driver doesn't support query timeout, then assume it is "infinite". Allow a timeout of zero only
            if (functionContext.queryTimeout > 0) {
                throw se;
            }
        }
        Record currentRecord;
        while ((currentRecord = recordParser.nextRecord()) != null) {
            Object sql = currentRecord.getValue(sqlField);
            if (sql == null || StringUtils.isEmpty((String) sql)) {
                throw new MalformedRecordException(format("Record had no (or null) value for Field Containing SQL: %s, FlowFile %s", sqlField, flowFile));
            }
            // Execute the statement as-is
            s.execute((String) sql);
        }
        result.routeTo(flowFile, REL_SUCCESS);
        session.getProvenanceReporter().send(flowFile, functionContext.jdbcUrl);
    }
}
Also used : SQLException(java.sql.SQLException) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) Record(org.apache.nifi.serialization.record.Record) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException)

Example 28 with MalformedRecordException

use of org.apache.nifi.serialization.MalformedRecordException in project nifi by apache.

the class PutParquetTest method testMalformedRecordExceptionFromReaderShouldRouteToFailure.

@Test
public void testMalformedRecordExceptionFromReaderShouldRouteToFailure() throws InitializationException, IOException, MalformedRecordException, SchemaNotFoundException {
    configure(proc, 10);
    final RecordReader recordReader = Mockito.mock(RecordReader.class);
    when(recordReader.nextRecord()).thenThrow(new MalformedRecordException("ERROR"));
    final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
    when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
    when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenReturn(recordReader);
    testRunner.addControllerService("mock-reader-factory", readerFactory);
    testRunner.enableControllerService(readerFactory);
    testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
    final String filename = "testMalformedRecordExceptionShouldRouteToFailure-" + System.currentTimeMillis();
    final Map<String, String> flowFileAttributes = new HashMap<>();
    flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
    testRunner.enqueue("trigger", flowFileAttributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(PutParquet.REL_FAILURE, 1);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) HashMap(java.util.HashMap) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) ComponentLog(org.apache.nifi.logging.ComponentLog) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) Test(org.junit.Test)

Example 29 with MalformedRecordException

use of org.apache.nifi.serialization.MalformedRecordException in project nifi by apache.

the class TestPutKudu method testMalformedRecordExceptionFromReaderShouldRouteToFailure.

@Test
public void testMalformedRecordExceptionFromReaderShouldRouteToFailure() throws InitializationException, IOException, MalformedRecordException, SchemaNotFoundException {
    createRecordReader(10);
    final RecordReader recordReader = Mockito.mock(RecordReader.class);
    when(recordReader.nextRecord()).thenThrow(new MalformedRecordException("ERROR"));
    final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
    when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
    when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenReturn(recordReader);
    testRunner.addControllerService("mock-reader-factory", readerFactory);
    testRunner.enableControllerService(readerFactory);
    testRunner.setProperty(PutKudu.RECORD_READER, "mock-reader-factory");
    final String filename = "testMalformedRecordExceptionShouldRouteToFailure-" + System.currentTimeMillis();
    final Map<String, String> flowFileAttributes = new HashMap<>();
    flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
    testRunner.enqueue("trigger", flowFileAttributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(PutKudu.REL_FAILURE, 1);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) HashMap(java.util.HashMap) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) ComponentLog(org.apache.nifi.logging.ComponentLog) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) Test(org.junit.Test)

Example 30 with MalformedRecordException

use of org.apache.nifi.serialization.MalformedRecordException in project nifi by apache.

the class PutMongoRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final RecordReaderFactory recordParserFactory = context.getProperty(RECORD_READER_FACTORY).asControllerService(RecordReaderFactory.class);
    final WriteConcern writeConcern = getWriteConcern(context);
    final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern);
    List<Document> inserts = new ArrayList<>();
    int ceiling = context.getProperty(INSERT_COUNT).asInteger();
    int added = 0;
    boolean error = false;
    try (final InputStream inStream = session.read(flowFile);
        final RecordReader reader = recordParserFactory.createRecordReader(flowFile, inStream, getLogger())) {
        RecordSchema schema = reader.getSchema();
        Record record;
        while ((record = reader.nextRecord()) != null) {
            // Convert each Record to HashMap and put into the Mongo document
            Map<String, Object> contentMap = (Map<String, Object>) DataTypeUtils.convertRecordFieldtoObject(record, RecordFieldType.RECORD.getRecordDataType(record.getSchema()));
            Document document = new Document();
            for (String name : schema.getFieldNames()) {
                document.put(name, contentMap.get(name));
            }
            inserts.add(document);
            if (inserts.size() == ceiling) {
                collection.insertMany(inserts);
                added += inserts.size();
                inserts = new ArrayList<>();
            }
        }
        if (inserts.size() > 0) {
            collection.insertMany(inserts);
        }
    } catch (SchemaNotFoundException | IOException | MalformedRecordException e) {
        getLogger().error("PutMongoRecord failed with error:", e);
        session.transfer(flowFile, REL_FAILURE);
        error = true;
    } finally {
        if (!error) {
            session.getProvenanceReporter().send(flowFile, context.getProperty(URI).evaluateAttributeExpressions().getValue(), String.format("Added %d documents to MongoDB.", added));
            session.transfer(flowFile, REL_SUCCESS);
            getLogger().info("Inserted {} records into MongoDB", new Object[] { added });
        }
    }
    session.commit();
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Document(org.bson.Document) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) WriteConcern(com.mongodb.WriteConcern) Record(org.apache.nifi.serialization.record.Record) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) Map(java.util.Map)

Aggregations

MalformedRecordException (org.apache.nifi.serialization.MalformedRecordException)39 IOException (java.io.IOException)30 InputStream (java.io.InputStream)28 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)28 Record (org.apache.nifi.serialization.record.Record)24 SimpleRecordSchema (org.apache.nifi.serialization.SimpleRecordSchema)21 ComponentLog (org.apache.nifi.logging.ComponentLog)20 RecordField (org.apache.nifi.serialization.record.RecordField)20 ArrayList (java.util.ArrayList)19 Test (org.junit.Test)19 FileInputStream (java.io.FileInputStream)17 File (java.io.File)16 Arrays (java.util.Arrays)16 HashMap (java.util.HashMap)16 List (java.util.List)16 Collectors (java.util.stream.Collectors)16 RecordReader (org.apache.nifi.serialization.RecordReader)16 DataType (org.apache.nifi.serialization.record.DataType)16 RecordFieldType (org.apache.nifi.serialization.record.RecordFieldType)16 Assert.assertEquals (org.junit.Assert.assertEquals)16