Search in sources :

Example 1 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class TestConsumeAzureEventHub method toRecord.

private Record toRecord(String value) {
    Map<String, Object> map = new HashMap<>();
    map.put("value", value);
    return new MapRecord(new SimpleRecordSchema(Collections.singletonList(new RecordField("value", RecordFieldType.STRING.getDataType()))), map);
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) HashMap(java.util.HashMap)

Example 2 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class PutElasticsearchHttpRecord method writeRecord.

private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator) throws IOException {
    RecordSchema schema = record.getSchema();
    generator.writeStartObject();
    for (int i = 0; i < schema.getFieldCount(); i++) {
        final RecordField field = schema.getField(i);
        final String fieldName = field.getFieldName();
        final Object value = record.getValue(field);
        if (value == null) {
            if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || (nullSuppression.equals(SUPPRESS_MISSING.getValue())) && record.getRawFieldNames().contains(fieldName)) {
                generator.writeNullField(fieldName);
            }
            continue;
        }
        generator.writeFieldName(fieldName);
        final DataType dataType = schema.getDataType(fieldName).get();
        writeValue(generator, value, fieldName, dataType);
    }
    generator.writeEndObject();
}
Also used : RecordField(org.apache.nifi.serialization.record.RecordField) DataType(org.apache.nifi.serialization.record.DataType) ChoiceDataType(org.apache.nifi.serialization.record.type.ChoiceDataType) MapDataType(org.apache.nifi.serialization.record.type.MapDataType) ArrayDataType(org.apache.nifi.serialization.record.type.ArrayDataType) RecordDataType(org.apache.nifi.serialization.record.type.RecordDataType) RecordSchema(org.apache.nifi.serialization.record.RecordSchema)

Example 3 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class MockRecordParser method createRecordReader.

@Override
public RecordReader createRecordReader(Map<String, String> variables, InputStream in, ComponentLog logger) throws IOException, SchemaNotFoundException {
    final BufferedReader reader = new BufferedReader(new InputStreamReader(in));
    return new RecordReader() {

        private int recordCount = 0;

        @Override
        public void close() throws IOException {
        }

        @Override
        public Record nextRecord(boolean coerceTypes, boolean dropUnknown) throws IOException, MalformedRecordException, SchemaValidationException {
            if (failAfterN >= recordCount) {
                throw new MalformedRecordException("Intentional Unit Test Exception because " + recordCount + " records have been read");
            }
            final String line = reader.readLine();
            if (line == null) {
                return null;
            }
            recordCount++;
            final String[] values = line.split(",");
            final Map<String, Object> valueMap = new HashMap<>();
            int i = 0;
            for (final RecordField field : fields) {
                final String fieldName = field.getFieldName();
                valueMap.put(fieldName, values[i++].trim());
            }
            return new MapRecord(new SimpleRecordSchema(fields), valueMap);
        }

        @Override
        public RecordSchema getSchema() {
            return new SimpleRecordSchema(fields);
        }
    };
}
Also used : SimpleRecordSchema(org.apache.nifi.serialization.SimpleRecordSchema) MapRecord(org.apache.nifi.serialization.record.MapRecord) RecordField(org.apache.nifi.serialization.record.RecordField) InputStreamReader(java.io.InputStreamReader) HashMap(java.util.HashMap) RecordReader(org.apache.nifi.serialization.RecordReader) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) BufferedReader(java.io.BufferedReader)

Example 4 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class PutParquetTest method configure.

private void configure(final PutParquet putParquet, final int numUsers) throws InitializationException {
    testRunner = TestRunners.newTestRunner(putParquet);
    testRunner.setProperty(PutParquet.HADOOP_CONFIGURATION_RESOURCES, TEST_CONF_PATH);
    testRunner.setProperty(PutParquet.DIRECTORY, DIRECTORY);
    readerFactory = new MockRecordParser();
    final RecordSchema recordSchema = AvroTypeUtil.createSchema(schema);
    for (final RecordField recordField : recordSchema.getFields()) {
        readerFactory.addSchemaField(recordField.getFieldName(), recordField.getDataType().getFieldType());
    }
    for (int i = 0; i < numUsers; i++) {
        readerFactory.addRecord("name" + i, i, "blue" + i);
    }
    testRunner.addControllerService("mock-reader-factory", readerFactory);
    testRunner.enableControllerService(readerFactory);
    testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
}
Also used : RecordField(org.apache.nifi.serialization.record.RecordField) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) MockRecordParser(org.apache.nifi.serialization.record.MockRecordParser)

Example 5 with RecordField

use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.

the class PutDatabaseRecord method generateUpdate.

SqlAndIncludedColumns generateUpdate(final RecordSchema recordSchema, final String tableName, final String updateKeys, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, SQLException {
    final Set<String> updateKeyNames;
    if (updateKeys == null) {
        updateKeyNames = tableSchema.getPrimaryKeyColumnNames();
    } else {
        updateKeyNames = new HashSet<>();
        for (final String updateKey : updateKeys.split(",")) {
            updateKeyNames.add(updateKey.trim());
        }
    }
    if (updateKeyNames.isEmpty()) {
        throw new SQLIntegrityConstraintViolationException("Table '" + tableName + "' does not have a Primary Key and no Update Keys were specified");
    }
    final StringBuilder sqlBuilder = new StringBuilder();
    sqlBuilder.append("UPDATE ");
    if (settings.quoteTableName) {
        sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(tableName).append(tableSchema.getQuotedIdentifierString());
    } else {
        sqlBuilder.append(tableName);
    }
    // Create a Set of all normalized Update Key names, and ensure that there is a field in the record
    // for each of the Update Key fields.
    final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames);
    final Set<String> normalizedUpdateNames = new HashSet<>();
    for (final String uk : updateKeyNames) {
        final String normalizedUK = normalizeColumnName(uk, settings.translateFieldNames);
        normalizedUpdateNames.add(normalizedUK);
        if (!normalizedFieldNames.contains(normalizedUK)) {
            String missingColMessage = "Record does not have a value for the " + (updateKeys == null ? "Primary" : "Update") + "Key column '" + uk + "'";
            if (settings.failUnmappedColumns) {
                getLogger().error(missingColMessage);
                throw new MalformedRecordException(missingColMessage);
            } else if (settings.warningUnmappedColumns) {
                getLogger().warn(missingColMessage);
            }
        }
    }
    // iterate over all of the fields in the record, building the SQL statement by adding the column names
    List<String> fieldNames = recordSchema.getFieldNames();
    final List<Integer> includedColumns = new ArrayList<>();
    if (fieldNames != null) {
        sqlBuilder.append(" SET ");
        int fieldCount = fieldNames.size();
        AtomicInteger fieldsFound = new AtomicInteger(0);
        for (int i = 0; i < fieldCount; i++) {
            RecordField field = recordSchema.getField(i);
            String fieldName = field.getFieldName();
            final String normalizedColName = normalizeColumnName(fieldName, settings.translateFieldNames);
            final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
            if (desc == null) {
                if (!settings.ignoreUnmappedFields) {
                    throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database");
                } else {
                    continue;
                }
            }
            // back to it after we finish the SET clause
            if (!normalizedUpdateNames.contains(normalizedColName)) {
                if (fieldsFound.getAndIncrement() > 0) {
                    sqlBuilder.append(", ");
                }
                if (settings.escapeColumnNames) {
                    sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(desc.getColumnName()).append(tableSchema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }
                sqlBuilder.append(" = ?");
                includedColumns.add(i);
            }
        }
        // Set the WHERE clause based on the Update Key values
        sqlBuilder.append(" WHERE ");
        AtomicInteger whereFieldCount = new AtomicInteger(0);
        for (int i = 0; i < fieldCount; i++) {
            RecordField field = recordSchema.getField(i);
            String fieldName = field.getFieldName();
            final String normalizedColName = normalizeColumnName(fieldName, settings.translateFieldNames);
            final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
            if (desc != null) {
                // Check if this column is a Update Key. If so, add it to the WHERE clause
                if (normalizedUpdateNames.contains(normalizedColName)) {
                    if (whereFieldCount.getAndIncrement() > 0) {
                        sqlBuilder.append(" AND ");
                    }
                    if (settings.escapeColumnNames) {
                        sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(normalizedColName).append(tableSchema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(normalizedColName);
                    }
                    sqlBuilder.append(" = ?");
                    includedColumns.add(i);
                }
            }
        }
    }
    return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns);
}
Also used : RecordField(org.apache.nifi.serialization.record.RecordField) ArrayList(java.util.ArrayList) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SQLDataException(java.sql.SQLDataException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SQLIntegrityConstraintViolationException(java.sql.SQLIntegrityConstraintViolationException) HashSet(java.util.HashSet)

Aggregations

RecordField (org.apache.nifi.serialization.record.RecordField)173 SimpleRecordSchema (org.apache.nifi.serialization.SimpleRecordSchema)133 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)130 ArrayList (java.util.ArrayList)116 Test (org.junit.Test)108 Record (org.apache.nifi.serialization.record.Record)97 MapRecord (org.apache.nifi.serialization.record.MapRecord)73 HashMap (java.util.HashMap)52 InputStream (java.io.InputStream)48 FileInputStream (java.io.FileInputStream)44 ByteArrayInputStream (java.io.ByteArrayInputStream)43 ComponentLog (org.apache.nifi.logging.ComponentLog)39 DataType (org.apache.nifi.serialization.record.DataType)37 LinkedHashMap (java.util.LinkedHashMap)36 File (java.io.File)21 ByteArrayOutputStream (java.io.ByteArrayOutputStream)20 SchemaNameAsAttribute (org.apache.nifi.schema.access.SchemaNameAsAttribute)17 RecordDataType (org.apache.nifi.serialization.record.type.RecordDataType)17 Schema (org.apache.avro.Schema)16 RecordFieldType (org.apache.nifi.serialization.record.RecordFieldType)16