Search in sources :

Example 11 with RecordSchema

use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.

the class SplitRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile original = session.get();
    if (original == null) {
        return;
    }
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final int maxRecords = context.getProperty(RECORDS_PER_SPLIT).evaluateAttributeExpressions(original).asInteger();
    final List<FlowFile> splits = new ArrayList<>();
    final Map<String, String> originalAttributes = original.getAttributes();
    try {
        session.read(original, new InputStreamCallback() {

            @Override
            public void process(final InputStream in) throws IOException {
                try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
                    final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema());
                    final RecordSet recordSet = reader.createRecordSet();
                    final PushBackRecordSet pushbackSet = new PushBackRecordSet(recordSet);
                    while (pushbackSet.isAnotherRecord()) {
                        FlowFile split = session.create(original);
                        try {
                            final Map<String, String> attributes = new HashMap<>();
                            final WriteResult writeResult;
                            try (final OutputStream out = session.write(split);
                                final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out)) {
                                if (maxRecords == 1) {
                                    final Record record = pushbackSet.next();
                                    writeResult = writer.write(record);
                                } else {
                                    final RecordSet limitedSet = pushbackSet.limit(maxRecords);
                                    writeResult = writer.write(limitedSet);
                                }
                                attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
                                attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
                                attributes.putAll(writeResult.getAttributes());
                                session.adjustCounter("Records Split", writeResult.getRecordCount(), false);
                            }
                            split = session.putAllAttributes(split, attributes);
                        } finally {
                            splits.add(split);
                        }
                    }
                } catch (final SchemaNotFoundException | MalformedRecordException e) {
                    throw new ProcessException("Failed to parse incoming data", e);
                }
            }
        });
    } catch (final ProcessException pe) {
        getLogger().error("Failed to split {}", new Object[] { original, pe });
        session.remove(splits);
        session.transfer(original, REL_FAILURE);
        return;
    }
    session.transfer(original, REL_ORIGINAL);
    session.transfer(splits, REL_SPLITS);
    getLogger().info("Successfully split {} into {} FlowFiles, each containing up to {} records", new Object[] { original, splits.size(), maxRecords });
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) IOException(java.io.IOException) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) WriteResult(org.apache.nifi.serialization.WriteResult) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) Record(org.apache.nifi.serialization.record.Record) PushBackRecordSet(org.apache.nifi.serialization.record.PushBackRecordSet) RecordSet(org.apache.nifi.serialization.record.RecordSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) HashMap(java.util.HashMap) Map(java.util.Map) PushBackRecordSet(org.apache.nifi.serialization.record.PushBackRecordSet)

Example 12 with RecordSchema

use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.

the class MergeRecord method binFlowFile.

private void binFlowFile(final ProcessContext context, final FlowFile flowFile, final ProcessSession session, final RecordBinManager binManager, final boolean block) {
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    try (final InputStream in = session.read(flowFile);
        final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {
        final RecordSchema schema = reader.getSchema();
        final String groupId = getGroupId(context, flowFile, schema, session);
        getLogger().debug("Got Group ID {} for {}", new Object[] { groupId, flowFile });
        binManager.add(groupId, flowFile, reader, session, block);
    } catch (MalformedRecordException | IOException | SchemaNotFoundException e) {
        throw new ProcessException(e);
    }
}
Also used : ProcessException(org.apache.nifi.processor.exception.ProcessException) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException)

Example 13 with RecordSchema

use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.

the class UpdateRecord method updateRecord.

private Record updateRecord(final List<FieldValue> destinationFields, final List<FieldValue> selectedFields, final Record record) {
    if (destinationFields.size() == 1 && !destinationFields.get(0).getParentRecord().isPresent()) {
        final Object replacement = getReplacementObject(selectedFields);
        if (replacement == null) {
            return record;
        }
        if (replacement instanceof Record) {
            return (Record) replacement;
        }
        final List<RecordField> fields = selectedFields.stream().map(FieldValue::getField).collect(Collectors.toList());
        final RecordSchema schema = new SimpleRecordSchema(fields);
        final Record mapRecord = new MapRecord(schema, new HashMap<>());
        for (final FieldValue selectedField : selectedFields) {
            mapRecord.setValue(selectedField.getField().getFieldName(), selectedField.getValue());
        }
        return mapRecord;
    } else {
        for (final FieldValue fieldVal : destinationFields) {
            fieldVal.updateValue(getReplacementObject(selectedFields));
        }
        return record;
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) FieldValue(org.apache.nifi.record.path.FieldValue) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema)

Example 14 with RecordSchema

use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.

the class ValidateRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final boolean allowExtraFields = context.getProperty(ALLOW_EXTRA_FIELDS).asBoolean();
    final boolean strictTypeChecking = context.getProperty(STRICT_TYPE_CHECKING).asBoolean();
    RecordSetWriter validWriter = null;
    RecordSetWriter invalidWriter = null;
    FlowFile validFlowFile = null;
    FlowFile invalidFlowFile = null;
    try (final InputStream in = session.read(flowFile);
        final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {
        final RecordSchema validationSchema = getValidationSchema(context, flowFile, reader);
        final SchemaValidationContext validationContext = new SchemaValidationContext(validationSchema, allowExtraFields, strictTypeChecking);
        final RecordSchemaValidator validator = new StandardSchemaValidator(validationContext);
        int recordCount = 0;
        int validCount = 0;
        int invalidCount = 0;
        final Set<String> extraFields = new HashSet<>();
        final Set<String> missingFields = new HashSet<>();
        final Set<String> invalidFields = new HashSet<>();
        final Set<String> otherProblems = new HashSet<>();
        try {
            Record record;
            while ((record = reader.nextRecord(false, false)) != null) {
                final SchemaValidationResult result = validator.validate(record);
                recordCount++;
                RecordSetWriter writer;
                if (result.isValid()) {
                    validCount++;
                    if (validFlowFile == null) {
                        validFlowFile = session.create(flowFile);
                    }
                    validWriter = writer = createIfNecessary(validWriter, writerFactory, session, validFlowFile, record.getSchema());
                } else {
                    invalidCount++;
                    logValidationErrors(flowFile, recordCount, result);
                    if (invalidFlowFile == null) {
                        invalidFlowFile = session.create(flowFile);
                    }
                    invalidWriter = writer = createIfNecessary(invalidWriter, writerFactory, session, invalidFlowFile, record.getSchema());
                    // that it is too noisy to be useful.
                    for (final ValidationError validationError : result.getValidationErrors()) {
                        final Optional<String> fieldName = validationError.getFieldName();
                        switch(validationError.getType()) {
                            case EXTRA_FIELD:
                                if (fieldName.isPresent()) {
                                    extraFields.add(fieldName.get());
                                } else {
                                    otherProblems.add(validationError.getExplanation());
                                }
                                break;
                            case MISSING_FIELD:
                                if (fieldName.isPresent()) {
                                    missingFields.add(fieldName.get());
                                } else {
                                    otherProblems.add(validationError.getExplanation());
                                }
                                break;
                            case INVALID_FIELD:
                                if (fieldName.isPresent()) {
                                    invalidFields.add(fieldName.get());
                                } else {
                                    otherProblems.add(validationError.getExplanation());
                                }
                                break;
                            case OTHER:
                                otherProblems.add(validationError.getExplanation());
                                break;
                        }
                    }
                }
                if (writer instanceof RawRecordWriter) {
                    ((RawRecordWriter) writer).writeRawRecord(record);
                } else {
                    writer.write(record);
                }
            }
            if (validWriter != null) {
                completeFlowFile(session, validFlowFile, validWriter, REL_VALID, null);
            }
            if (invalidWriter != null) {
                // Build up a String that explains why the records were invalid, so that we can add this to the Provenance Event.
                final StringBuilder errorBuilder = new StringBuilder();
                errorBuilder.append("Records in this FlowFile were invalid for the following reasons: ");
                if (!missingFields.isEmpty()) {
                    errorBuilder.append("The following ").append(missingFields.size()).append(" fields were missing: ").append(missingFields.toString());
                }
                if (!extraFields.isEmpty()) {
                    if (errorBuilder.length() > 0) {
                        errorBuilder.append("; ");
                    }
                    errorBuilder.append("The following ").append(extraFields.size()).append(" fields were present in the Record but not in the schema: ").append(extraFields.toString());
                }
                if (!invalidFields.isEmpty()) {
                    if (errorBuilder.length() > 0) {
                        errorBuilder.append("; ");
                    }
                    errorBuilder.append("The following ").append(invalidFields.size()).append(" fields had values whose type did not match the schema: ").append(invalidFields.toString());
                }
                if (!otherProblems.isEmpty()) {
                    if (errorBuilder.length() > 0) {
                        errorBuilder.append("; ");
                    }
                    errorBuilder.append("The following ").append(otherProblems.size()).append(" additional problems were encountered: ").append(otherProblems.toString());
                }
                final String validationErrorString = errorBuilder.toString();
                completeFlowFile(session, invalidFlowFile, invalidWriter, REL_INVALID, validationErrorString);
            }
        } finally {
            closeQuietly(validWriter);
            closeQuietly(invalidWriter);
        }
        session.adjustCounter("Records Validated", recordCount, false);
        session.adjustCounter("Records Found Valid", validCount, false);
        session.adjustCounter("Records Found Invalid", invalidCount, false);
    } catch (final IOException | MalformedRecordException | SchemaNotFoundException e) {
        getLogger().error("Failed to process {}; will route to failure", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        if (validFlowFile != null) {
            session.remove(validFlowFile);
        }
        if (invalidFlowFile != null) {
            session.remove(invalidFlowFile);
        }
        return;
    }
    session.remove(flowFile);
}
Also used : RecordReader(org.apache.nifi.serialization.RecordReader) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) StandardSchemaValidator(org.apache.nifi.schema.validation.StandardSchemaValidator) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) RecordSchemaValidator(org.apache.nifi.serialization.record.validation.RecordSchemaValidator) SchemaValidationResult(org.apache.nifi.serialization.record.validation.SchemaValidationResult) Record(org.apache.nifi.serialization.record.Record) ValidationError(org.apache.nifi.serialization.record.validation.ValidationError) RawRecordWriter(org.apache.nifi.serialization.record.RawRecordWriter) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) HashSet(java.util.HashSet) SchemaValidationContext(org.apache.nifi.schema.validation.SchemaValidationContext) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException)

Example 15 with RecordSchema

use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.

the class ValidateRecord method getValidationSchema.

protected RecordSchema getValidationSchema(final ProcessContext context, final FlowFile flowFile, final RecordReader reader) throws MalformedRecordException, IOException, SchemaNotFoundException {
    final String schemaAccessStrategy = context.getProperty(SCHEMA_ACCESS_STRATEGY).getValue();
    if (schemaAccessStrategy.equals(READER_SCHEMA.getValue())) {
        return reader.getSchema();
    } else if (schemaAccessStrategy.equals(SCHEMA_NAME_PROPERTY.getValue())) {
        final SchemaRegistry schemaRegistry = context.getProperty(SCHEMA_REGISTRY).asControllerService(SchemaRegistry.class);
        final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue();
        final SchemaIdentifier schemaIdentifier = SchemaIdentifier.builder().name(schemaName).build();
        return schemaRegistry.retrieveSchema(schemaIdentifier);
    } else if (schemaAccessStrategy.equals(SCHEMA_TEXT_PROPERTY.getValue())) {
        final String schemaText = context.getProperty(SCHEMA_TEXT).evaluateAttributeExpressions(flowFile).getValue();
        final Parser parser = new Schema.Parser();
        final Schema avroSchema = parser.parse(schemaText);
        return AvroTypeUtil.createSchema(avroSchema);
    } else {
        throw new ProcessException("Invalid Schema Access Strategy: " + schemaAccessStrategy);
    }
}
Also used : ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) Schema(org.apache.avro.Schema) SchemaIdentifier(org.apache.nifi.serialization.record.SchemaIdentifier) SchemaRegistry(org.apache.nifi.schemaregistry.services.SchemaRegistry) Parser(org.apache.avro.Schema.Parser)

Aggregations

RecordSchema (org.apache.nifi.serialization.record.RecordSchema)243 SimpleRecordSchema (org.apache.nifi.serialization.SimpleRecordSchema)178 Test (org.junit.Test)168 Record (org.apache.nifi.serialization.record.Record)147 RecordField (org.apache.nifi.serialization.record.RecordField)138 ArrayList (java.util.ArrayList)107 MapRecord (org.apache.nifi.serialization.record.MapRecord)94 HashMap (java.util.HashMap)88 InputStream (java.io.InputStream)79 ByteArrayInputStream (java.io.ByteArrayInputStream)64 FileInputStream (java.io.FileInputStream)56 ComponentLog (org.apache.nifi.logging.ComponentLog)54 IOException (java.io.IOException)44 LinkedHashMap (java.util.LinkedHashMap)36 DataType (org.apache.nifi.serialization.record.DataType)36 File (java.io.File)31 Schema (org.apache.avro.Schema)29 SchemaIdentifier (org.apache.nifi.serialization.record.SchemaIdentifier)29 MalformedRecordException (org.apache.nifi.serialization.MalformedRecordException)28 ByteArrayOutputStream (java.io.ByteArrayOutputStream)26