use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class TestConsumeAzureEventHub method toRecord.
private Record toRecord(String value) {
Map<String, Object> map = new HashMap<>();
map.put("value", value);
return new MapRecord(new SimpleRecordSchema(Collections.singletonList(new RecordField("value", RecordFieldType.STRING.getDataType()))), map);
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class PutElasticsearchHttpRecord method writeRecord.
private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator) throws IOException {
RecordSchema schema = record.getSchema();
generator.writeStartObject();
for (int i = 0; i < schema.getFieldCount(); i++) {
final RecordField field = schema.getField(i);
final String fieldName = field.getFieldName();
final Object value = record.getValue(field);
if (value == null) {
if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || (nullSuppression.equals(SUPPRESS_MISSING.getValue())) && record.getRawFieldNames().contains(fieldName)) {
generator.writeNullField(fieldName);
}
continue;
}
generator.writeFieldName(fieldName);
final DataType dataType = schema.getDataType(fieldName).get();
writeValue(generator, value, fieldName, dataType);
}
generator.writeEndObject();
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class MockRecordParser method createRecordReader.
@Override
public RecordReader createRecordReader(Map<String, String> variables, InputStream in, ComponentLog logger) throws IOException, SchemaNotFoundException {
final BufferedReader reader = new BufferedReader(new InputStreamReader(in));
return new RecordReader() {
private int recordCount = 0;
@Override
public void close() throws IOException {
}
@Override
public Record nextRecord(boolean coerceTypes, boolean dropUnknown) throws IOException, MalformedRecordException, SchemaValidationException {
if (failAfterN >= recordCount) {
throw new MalformedRecordException("Intentional Unit Test Exception because " + recordCount + " records have been read");
}
final String line = reader.readLine();
if (line == null) {
return null;
}
recordCount++;
final String[] values = line.split(",");
final Map<String, Object> valueMap = new HashMap<>();
int i = 0;
for (final RecordField field : fields) {
final String fieldName = field.getFieldName();
valueMap.put(fieldName, values[i++].trim());
}
return new MapRecord(new SimpleRecordSchema(fields), valueMap);
}
@Override
public RecordSchema getSchema() {
return new SimpleRecordSchema(fields);
}
};
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class PutParquetTest method configure.
private void configure(final PutParquet putParquet, final int numUsers) throws InitializationException {
testRunner = TestRunners.newTestRunner(putParquet);
testRunner.setProperty(PutParquet.HADOOP_CONFIGURATION_RESOURCES, TEST_CONF_PATH);
testRunner.setProperty(PutParquet.DIRECTORY, DIRECTORY);
readerFactory = new MockRecordParser();
final RecordSchema recordSchema = AvroTypeUtil.createSchema(schema);
for (final RecordField recordField : recordSchema.getFields()) {
readerFactory.addSchemaField(recordField.getFieldName(), recordField.getDataType().getFieldType());
}
for (int i = 0; i < numUsers; i++) {
readerFactory.addRecord("name" + i, i, "blue" + i);
}
testRunner.addControllerService("mock-reader-factory", readerFactory);
testRunner.enableControllerService(readerFactory);
testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class PutDatabaseRecord method generateUpdate.
SqlAndIncludedColumns generateUpdate(final RecordSchema recordSchema, final String tableName, final String updateKeys, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, SQLException {
final Set<String> updateKeyNames;
if (updateKeys == null) {
updateKeyNames = tableSchema.getPrimaryKeyColumnNames();
} else {
updateKeyNames = new HashSet<>();
for (final String updateKey : updateKeys.split(",")) {
updateKeyNames.add(updateKey.trim());
}
}
if (updateKeyNames.isEmpty()) {
throw new SQLIntegrityConstraintViolationException("Table '" + tableName + "' does not have a Primary Key and no Update Keys were specified");
}
final StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("UPDATE ");
if (settings.quoteTableName) {
sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(tableName).append(tableSchema.getQuotedIdentifierString());
} else {
sqlBuilder.append(tableName);
}
// Create a Set of all normalized Update Key names, and ensure that there is a field in the record
// for each of the Update Key fields.
final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames);
final Set<String> normalizedUpdateNames = new HashSet<>();
for (final String uk : updateKeyNames) {
final String normalizedUK = normalizeColumnName(uk, settings.translateFieldNames);
normalizedUpdateNames.add(normalizedUK);
if (!normalizedFieldNames.contains(normalizedUK)) {
String missingColMessage = "Record does not have a value for the " + (updateKeys == null ? "Primary" : "Update") + "Key column '" + uk + "'";
if (settings.failUnmappedColumns) {
getLogger().error(missingColMessage);
throw new MalformedRecordException(missingColMessage);
} else if (settings.warningUnmappedColumns) {
getLogger().warn(missingColMessage);
}
}
}
// iterate over all of the fields in the record, building the SQL statement by adding the column names
List<String> fieldNames = recordSchema.getFieldNames();
final List<Integer> includedColumns = new ArrayList<>();
if (fieldNames != null) {
sqlBuilder.append(" SET ");
int fieldCount = fieldNames.size();
AtomicInteger fieldsFound = new AtomicInteger(0);
for (int i = 0; i < fieldCount; i++) {
RecordField field = recordSchema.getField(i);
String fieldName = field.getFieldName();
final String normalizedColName = normalizeColumnName(fieldName, settings.translateFieldNames);
final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
if (desc == null) {
if (!settings.ignoreUnmappedFields) {
throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database");
} else {
continue;
}
}
// back to it after we finish the SET clause
if (!normalizedUpdateNames.contains(normalizedColName)) {
if (fieldsFound.getAndIncrement() > 0) {
sqlBuilder.append(", ");
}
if (settings.escapeColumnNames) {
sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(desc.getColumnName()).append(tableSchema.getQuotedIdentifierString());
} else {
sqlBuilder.append(desc.getColumnName());
}
sqlBuilder.append(" = ?");
includedColumns.add(i);
}
}
// Set the WHERE clause based on the Update Key values
sqlBuilder.append(" WHERE ");
AtomicInteger whereFieldCount = new AtomicInteger(0);
for (int i = 0; i < fieldCount; i++) {
RecordField field = recordSchema.getField(i);
String fieldName = field.getFieldName();
final String normalizedColName = normalizeColumnName(fieldName, settings.translateFieldNames);
final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
if (desc != null) {
// Check if this column is a Update Key. If so, add it to the WHERE clause
if (normalizedUpdateNames.contains(normalizedColName)) {
if (whereFieldCount.getAndIncrement() > 0) {
sqlBuilder.append(" AND ");
}
if (settings.escapeColumnNames) {
sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(normalizedColName).append(tableSchema.getQuotedIdentifierString());
} else {
sqlBuilder.append(normalizedColName);
}
sqlBuilder.append(" = ?");
includedColumns.add(i);
}
}
}
}
return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns);
}
Aggregations