use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class PutDatabaseRecord method generateDelete.
SqlAndIncludedColumns generateDelete(final RecordSchema recordSchema, final String tableName, final TableSchema tableSchema, final DMLSettings settings) throws IllegalArgumentException, MalformedRecordException, SQLDataException {
final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames);
for (final String requiredColName : tableSchema.getRequiredColumnNames()) {
final String normalizedColName = normalizeColumnName(requiredColName, settings.translateFieldNames);
if (!normalizedFieldNames.contains(normalizedColName)) {
String missingColMessage = "Record does not have a value for the Required column '" + requiredColName + "'";
if (settings.failUnmappedColumns) {
getLogger().error(missingColMessage);
throw new MalformedRecordException(missingColMessage);
} else if (settings.warningUnmappedColumns) {
getLogger().warn(missingColMessage);
}
}
}
final StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("DELETE FROM ");
if (settings.quoteTableName) {
sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(tableName).append(tableSchema.getQuotedIdentifierString());
} else {
sqlBuilder.append(tableName);
}
// iterate over all of the fields in the record, building the SQL statement by adding the column names
List<String> fieldNames = recordSchema.getFieldNames();
final List<Integer> includedColumns = new ArrayList<>();
if (fieldNames != null) {
sqlBuilder.append(" WHERE ");
int fieldCount = fieldNames.size();
AtomicInteger fieldsFound = new AtomicInteger(0);
for (int i = 0; i < fieldCount; i++) {
RecordField field = recordSchema.getField(i);
String fieldName = field.getFieldName();
final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
if (desc == null && !settings.ignoreUnmappedFields) {
throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database");
}
if (desc != null) {
if (fieldsFound.getAndIncrement() > 0) {
sqlBuilder.append(" AND ");
}
String columnName;
if (settings.escapeColumnNames) {
columnName = tableSchema.getQuotedIdentifierString() + desc.getColumnName() + tableSchema.getQuotedIdentifierString();
} else {
columnName = desc.getColumnName();
}
// Need to build a null-safe construct for the WHERE clause, since we are using PreparedStatement and won't know if the values are null. If they are null,
// then the filter should be "column IS null" vs "column = null". Since we don't know whether the value is null, we can use the following construct (from NIFI-3742):
// (column = ? OR (column is null AND ? is null))
sqlBuilder.append("(");
sqlBuilder.append(columnName);
sqlBuilder.append(" = ? OR (");
sqlBuilder.append(columnName);
sqlBuilder.append(" is null AND ? is null))");
includedColumns.add(i);
}
}
if (fieldsFound.get() == 0) {
throw new SQLDataException("None of the fields in the record map to the columns defined by the " + tableName + " table");
}
}
return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns);
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class UpdateRecord method updateRecord.
private Record updateRecord(final List<FieldValue> destinationFields, final List<FieldValue> selectedFields, final Record record) {
if (destinationFields.size() == 1 && !destinationFields.get(0).getParentRecord().isPresent()) {
final Object replacement = getReplacementObject(selectedFields);
if (replacement == null) {
return record;
}
if (replacement instanceof Record) {
return (Record) replacement;
}
final List<RecordField> fields = selectedFields.stream().map(FieldValue::getField).collect(Collectors.toList());
final RecordSchema schema = new SimpleRecordSchema(fields);
final Record mapRecord = new MapRecord(schema, new HashMap<>());
for (final FieldValue selectedField : selectedFields) {
mapRecord.setValue(selectedField.getField().getFieldName(), selectedField.getValue());
}
return mapRecord;
} else {
for (final FieldValue fieldVal : destinationFields) {
fieldVal.updateValue(getReplacementObject(selectedFields));
}
return record;
}
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class SolrUtils method solrDocumentsToRecordSet.
/**
* Writes each SolrDocument to a record.
*/
public static RecordSet solrDocumentsToRecordSet(final List<SolrDocument> docs, final RecordSchema schema) {
final List<Record> lr = new ArrayList<Record>();
for (SolrDocument doc : docs) {
final Map<String, Object> recordValues = new LinkedHashMap<>();
for (RecordField field : schema.getFields()) {
final Object fieldValue = doc.getFieldValue(field.getFieldName());
if (fieldValue != null) {
if (field.getDataType().getFieldType().equals(RecordFieldType.ARRAY)) {
recordValues.put(field.getFieldName(), ((List<Object>) fieldValue).toArray());
} else {
recordValues.put(field.getFieldName(), fieldValue);
}
}
}
lr.add(new MapRecord(schema, recordValues));
}
return new ListRecordSet(schema, lr);
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class HBase_1_1_2_RecordLookupService method lookup.
@Override
public Optional<Record> lookup(Map<String, Object> coordinates) throws LookupFailureException {
if (coordinates.get(ROW_KEY_KEY) == null) {
return Optional.empty();
}
final String rowKey = coordinates.get(ROW_KEY_KEY).toString();
if (StringUtils.isBlank(rowKey)) {
return Optional.empty();
}
final byte[] rowKeyBytes = rowKey.getBytes(StandardCharsets.UTF_8);
try {
final Map<String, Object> values = new HashMap<>();
hBaseClientService.scan(tableName, rowKeyBytes, rowKeyBytes, columns, (byte[] row, ResultCell[] resultCells) -> {
for (final ResultCell cell : resultCells) {
final byte[] qualifier = Arrays.copyOfRange(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierOffset() + cell.getQualifierLength());
final byte[] value = Arrays.copyOfRange(cell.getValueArray(), cell.getValueOffset(), cell.getValueOffset() + cell.getValueLength());
values.put(new String(qualifier, charset), new String(value, charset));
}
});
if (values.size() > 0) {
final List<RecordField> fields = new ArrayList<>();
for (String key : values.keySet()) {
fields.add(new RecordField(key, RecordFieldType.STRING.getDataType()));
}
final RecordSchema schema = new SimpleRecordSchema(fields);
return Optional.ofNullable(new MapRecord(schema, values));
} else {
return Optional.empty();
}
} catch (IOException e) {
getLogger().error("Error occurred loading {}", new Object[] { coordinates.get("rowKey") }, e);
throw new LookupFailureException(e);
}
}
use of org.apache.nifi.serialization.record.RecordField in project nifi by apache.
the class CSVRecordLookupService method loadCache.
private void loadCache() throws IllegalStateException, IOException {
if (lock.tryLock()) {
try {
final ComponentLog logger = getLogger();
if (logger.isDebugEnabled()) {
logger.debug("Loading lookup table from file: " + csvFile);
}
final FileReader reader = new FileReader(csvFile);
final CSVParser records = csvFormat.withFirstRecordAsHeader().parse(reader);
ConcurrentHashMap<String, Record> cache = new ConcurrentHashMap<>();
RecordSchema lookupRecordSchema = null;
for (final CSVRecord record : records) {
final String key = record.get(lookupKeyColumn);
if (StringUtils.isBlank(key)) {
throw new IllegalStateException("Empty lookup key encountered in: " + csvFile);
} else if (!ignoreDuplicates && cache.containsKey(key)) {
throw new IllegalStateException("Duplicate lookup key encountered: " + key + " in " + csvFile);
} else if (ignoreDuplicates && cache.containsKey(key)) {
logger.warn("Duplicate lookup key encountered: {} in {}", new Object[] { key, csvFile });
}
// Put each key/value pair (except the lookup) into the properties
final Map<String, Object> properties = new HashMap<>();
record.toMap().forEach((k, v) -> {
if (!lookupKeyColumn.equals(k)) {
properties.put(k, v);
}
});
if (lookupRecordSchema == null) {
List<RecordField> recordFields = new ArrayList<>(properties.size());
properties.forEach((k, v) -> recordFields.add(new RecordField(k, RecordFieldType.STRING.getDataType())));
lookupRecordSchema = new SimpleRecordSchema(recordFields);
}
cache.put(key, new MapRecord(lookupRecordSchema, properties));
}
this.cache = cache;
if (cache.isEmpty()) {
logger.warn("Lookup table is empty after reading file: " + csvFile);
}
} finally {
lock.unlock();
}
}
}
Aggregations