Search in sources :

Example 6 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class PutDatabaseRecord method generateDelete.

SqlAndIncludedColumns generateDelete(final RecordSchema recordSchema, final String tableName, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, SQLDataException {
    final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames);
    for (final String requiredColName : tableSchema.getRequiredColumnNames()) {
        final String normalizedColName = normalizeColumnName(requiredColName, settings.translateFieldNames);
        if (!normalizedFieldNames.contains(normalizedColName)) {
            String missingColMessage = "Record does not have a value for the Required column '" + requiredColName + "'";
            if (settings.failUnmappedColumns) {
                getLogger().error(missingColMessage);
                throw new MalformedRecordException(missingColMessage);
            } else if (settings.warningUnmappedColumns) {
                getLogger().warn(missingColMessage);
            }
        }
    }
    final StringBuilder sqlBuilder = new StringBuilder();
    sqlBuilder.append("DELETE FROM ");
    if (settings.quoteTableName) {
        sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(tableName).append(tableSchema.getQuotedIdentifierString());
    } else {
        sqlBuilder.append(tableName);
    }
    // iterate over all of the fields in the record, building the SQL statement by adding the column names
    List<String> fieldNames = recordSchema.getFieldNames();
    final List<Integer> includedColumns = new ArrayList<>();
    if (fieldNames != null) {
        sqlBuilder.append(" WHERE ");
        int fieldCount = fieldNames.size();
        AtomicInteger fieldsFound = new AtomicInteger(0);
        for (int i = 0; i < fieldCount; i++) {
            RecordField field = recordSchema.getField(i);
            String fieldName = field.getFieldName();
            final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
            if (desc == null && !settings.ignoreUnmappedFields) {
                throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database");
            }
            if (desc != null) {
                if (fieldsFound.getAndIncrement() > 0) {
                    sqlBuilder.append(" AND ");
                }
                String columnName;
                if (settings.escapeColumnNames) {
                    columnName = tableSchema.getQuotedIdentifierString() + desc.getColumnName() + tableSchema.getQuotedIdentifierString();
                } else {
                    columnName = desc.getColumnName();
                }
                // Need to build a null-safe construct for the WHERE clause, since we are using PreparedStatement and won't know if the values are null. If they are null,
                // then the filter should be "column IS null" vs "column = null". Since we don't know whether the value is null, we can use the following construct (from NIFI-3742):
                // (column = ? OR (column is null AND ? is null))
                sqlBuilder.append("(");
                sqlBuilder.append(columnName);
                sqlBuilder.append(" = ? OR (");
                sqlBuilder.append(columnName);
                sqlBuilder.append(" is null AND ? is null))");
                includedColumns.add(i);
            }
        }
        if (fieldsFound.get() == 0) {
            throw new SQLDataException("None of the fields in the record map to the columns defined by the " + tableName + " table");
        }
    }
    return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns);
}
Also used : RecordField(org.apache.nifi.serialization.record.RecordField) ArrayList(java.util.ArrayList) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SQLDataException(java.sql.SQLDataException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger)

Example 7 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class UpdateRecord method updateRecord.

private Record updateRecord(final List<FieldValue> destinationFields, final List<FieldValue> selectedFields, final Record record) {
    if (destinationFields.size() == 1 && !destinationFields.get(0).getParentRecord().isPresent()) {
        final Object replacement = getReplacementObject(selectedFields);
        if (replacement == null) {
            return record;
        }
        if (replacement instanceof Record) {
            return (Record) replacement;
        }
        final List<RecordField> fields = selectedFields.stream().map(FieldValue::getField).collect(Collectors.toList());
        final RecordSchema schema = new SimpleRecordSchema(fields);
        final Record mapRecord = new MapRecord(schema, new HashMap<>());
        for (final FieldValue selectedField : selectedFields) {
            mapRecord.setValue(selectedField.getField().getFieldName(), selectedField.getValue());
        }
        return mapRecord;
    } else {
        for (final FieldValue fieldVal : destinationFields) {
            fieldVal.updateValue(getReplacementObject(selectedFields));
        }
        return record;
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) FieldValue(org.apache.nifi.record.path.FieldValue) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema)

Example 8 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class SolrUtils method solrDocumentsToRecordSet.

/**
 * Writes each SolrDocument to a record.
 */
public static RecordSet solrDocumentsToRecordSet(final List<SolrDocument> docs, final RecordSchema schema) {
    final List<Record> lr = new ArrayList<Record>();
    for (SolrDocument doc : docs) {
        final Map<String, Object> recordValues = new LinkedHashMap<>();
        for (RecordField field : schema.getFields()) {
            final Object fieldValue = doc.getFieldValue(field.getFieldName());
            if (fieldValue != null) {
                if (field.getDataType().getFieldType().equals(RecordFieldType.ARRAY)) {
                    recordValues.put(field.getFieldName(), ((List<Object>) fieldValue).toArray());
                } else {
                    recordValues.put(field.getFieldName(), fieldValue);
                }
            }
        }
        lr.add(new MapRecord(schema, recordValues));
    }
    return new ListRecordSet(schema, lr);
}
Also used : MapRecord(org.apache.nifi.serialization.record.MapRecord) SolrDocument(org.apache.solr.common.SolrDocument) RecordField(org.apache.nifi.serialization.record.RecordField) ListRecordSet(org.apache.nifi.serialization.record.ListRecordSet) ArrayList(java.util.ArrayList) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) LinkedHashMap(java.util.LinkedHashMap)

Example 9 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class HBase_1_1_2_RecordLookupService method lookup.

@Override
public Optional<Record> lookup(Map<String, Object> coordinates) throws LookupFailureException {
    if (coordinates.get(ROW_KEY_KEY) == null) {
        return Optional.empty();
    }
    final String rowKey = coordinates.get(ROW_KEY_KEY).toString();
    if (StringUtils.isBlank(rowKey)) {
        return Optional.empty();
    }
    final byte[] rowKeyBytes = rowKey.getBytes(StandardCharsets.UTF_8);
    try {
        final Map<String, Object> values = new HashMap<>();
        hBaseClientService.scan(tableName, rowKeyBytes, rowKeyBytes, columns, (byte[] row, ResultCell[] resultCells) -> {
            for (final ResultCell cell : resultCells) {
                final byte[] qualifier = Arrays.copyOfRange(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierOffset() + cell.getQualifierLength());
                final byte[] value = Arrays.copyOfRange(cell.getValueArray(), cell.getValueOffset(), cell.getValueOffset() + cell.getValueLength());
                values.put(new String(qualifier, charset), new String(value, charset));
            }
        });
        if (values.size() > 0) {
            final List<RecordField> fields = new ArrayList<>();
            for (String key : values.keySet()) {
                fields.add(new RecordField(key, RecordFieldType.STRING.getDataType()));
            }
            final RecordSchema schema = new SimpleRecordSchema(fields);
            return Optional.ofNullable(new MapRecord(schema, values));
        } else {
            return Optional.empty();
        }
    } catch (IOException e) {
        getLogger().error("Error occurred loading {}", new Object[] { coordinates.get("rowKey") }, e);
        throw new LookupFailureException(e);
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ResultCell(org.apache.nifi.hbase.scan.ResultCell) IOException(java.io.IOException) LookupFailureException(org.apache.nifi.lookup.LookupFailureException) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema)

Example 10 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class CSVRecordLookupService method loadCache.

private void loadCache() throws IllegalStateException, IOException {
    if (lock.tryLock()) {
        try {
            final ComponentLog logger = getLogger();
            if (logger.isDebugEnabled()) {
                logger.debug("Loading lookup table from file: " + csvFile);
            }
            final FileReader reader = new FileReader(csvFile);
            final CSVParser records = csvFormat.withFirstRecordAsHeader().parse(reader);
            ConcurrentHashMap<String, Record> cache = new ConcurrentHashMap<>();
            RecordSchema lookupRecordSchema = null;
            for (final CSVRecord record : records) {
                final String key = record.get(lookupKeyColumn);
                if (StringUtils.isBlank(key)) {
                    throw new IllegalStateException("Empty lookup key encountered in: " + csvFile);
                } else if (!ignoreDuplicates && cache.containsKey(key)) {
                    throw new IllegalStateException("Duplicate lookup key encountered: " + key + " in " + csvFile);
                } else if (ignoreDuplicates && cache.containsKey(key)) {
                    logger.warn("Duplicate lookup key encountered: {} in {}", new Object[] { key, csvFile });
                }
                // Put each key/value pair (except the lookup) into the properties
                final Map<String, Object> properties = new HashMap<>();
                record.toMap().forEach((k, v) -> {
                    if (!lookupKeyColumn.equals(k)) {
                        properties.put(k, v);
                    }
                });
                if (lookupRecordSchema == null) {
                    List<RecordField> recordFields = new ArrayList<>(properties.size());
                    properties.forEach((k, v) -> recordFields.add(new RecordField(k, RecordFieldType.STRING.getDataType())));
                    lookupRecordSchema = new SimpleRecordSchema(recordFields);
                }
                cache.put(key, new MapRecord(lookupRecordSchema, properties));
            }
            this.cache = cache;
            if (cache.isEmpty()) {
                logger.warn("Lookup table is empty after reading file: " + csvFile);
            }
        } finally {
            lock.unlock();
        }
    }
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ArrayList(java.util.ArrayList) ComponentLog(org.apache.nifi.logging.ComponentLog) CSVParser(org.apache.commons.csv.CSVParser) FileReader(java.io.FileReader) CSVRecord(org.apache.commons.csv.CSVRecord) Record(org.apache.nifi.serialization.record.Record) MapRecord(org.apache.nifi.serialization.record.MapRecord) CSVRecord(org.apache.commons.csv.CSVRecord) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema)

Aggregations

RecordField (org.apache.nifi.serialization.record.RecordField)173 SimpleRecordSchema (org.apache.nifi.serialization.SimpleRecordSchema)133 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)130 ArrayList (java.util.ArrayList)116 Test (org.junit.Test)108 Record (org.apache.nifi.serialization.record.Record)97 MapRecord (org.apache.nifi.serialization.record.MapRecord)73 HashMap (java.util.HashMap)52 InputStream (java.io.InputStream)48 FileInputStream (java.io.FileInputStream)44 ByteArrayInputStream (java.io.ByteArrayInputStream)43 ComponentLog (org.apache.nifi.logging.ComponentLog)39 DataType (org.apache.nifi.serialization.record.DataType)37 LinkedHashMap (java.util.LinkedHashMap)36 File (java.io.File)21 ByteArrayOutputStream (java.io.ByteArrayOutputStream)20 SchemaNameAsAttribute (org.apache.nifi.schema.access.SchemaNameAsAttribute)17 RecordDataType (org.apache.nifi.serialization.record.type.RecordDataType)17 Schema (org.apache.avro.Schema)16 RecordFieldType (org.apache.nifi.serialization.record.RecordFieldType)16