use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class TestConsumeAzureEventHub method toRecord.
private Record toRecord(String value) {
Map<String, Object> map = new HashMap<>();
map.put("value", value);
return new MapRecord(new SimpleRecordSchema(Collections.singletonList(new RecordField("value", RecordFieldType.STRING.getDataType()))), map);
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class PutElasticsearchHttpRecordIT method testMissingRecord.
@Test
public void testMissingRecord() throws Exception {
recordReader.addRecord(1, new MapRecord(personSchema, new HashMap<String, Object>() {
{
put("name", "John Doe");
put("age", 48);
}
}));
recordReader.addRecord(2, new MapRecord(personSchema, new HashMap<String, Object>() {
{
put("name", "John Doe");
put("age", 48);
put("sport", null);
}
}));
runner.setProperty(PutElasticsearchHttpRecord.SUPPRESS_NULLS, PutElasticsearchHttpRecord.SUPPRESS_MISSING);
sharedSuppressTest((p1, p2) -> {
Assert.assertFalse(p1.containsKey("sport"));
Assert.assertTrue(p2.containsKey("sport"));
Assert.assertNull(p2.get("sport"));
});
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class MockRecordParser method createRecordReader.
@Override
public RecordReader createRecordReader(Map<String, String> variables, InputStream in, ComponentLog logger) throws IOException, SchemaNotFoundException {
final BufferedReader reader = new BufferedReader(new InputStreamReader(in));
return new RecordReader() {
private int recordCount = 0;
@Override
public void close() throws IOException {
}
@Override
public Record nextRecord(boolean coerceTypes, boolean dropUnknown) throws IOException, MalformedRecordException, SchemaValidationException {
if (failAfterN >= recordCount) {
throw new MalformedRecordException("Intentional Unit Test Exception because " + recordCount + " records have been read");
}
final String line = reader.readLine();
if (line == null) {
return null;
}
recordCount++;
final String[] values = line.split(",");
final Map<String, Object> valueMap = new HashMap<>();
int i = 0;
for (final RecordField field : fields) {
final String fieldName = field.getFieldName();
valueMap.put(fieldName, values[i++].trim());
}
return new MapRecord(new SimpleRecordSchema(fields), valueMap);
}
@Override
public RecordSchema getSchema() {
return new SimpleRecordSchema(fields);
}
};
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class UpdateRecord method updateRecord.
private Record updateRecord(final List<FieldValue> destinationFields, final List<FieldValue> selectedFields, final Record record) {
if (destinationFields.size() == 1 && !destinationFields.get(0).getParentRecord().isPresent()) {
final Object replacement = getReplacementObject(selectedFields);
if (replacement == null) {
return record;
}
if (replacement instanceof Record) {
return (Record) replacement;
}
final List<RecordField> fields = selectedFields.stream().map(FieldValue::getField).collect(Collectors.toList());
final RecordSchema schema = new SimpleRecordSchema(fields);
final Record mapRecord = new MapRecord(schema, new HashMap<>());
for (final FieldValue selectedField : selectedFields) {
mapRecord.setValue(selectedField.getField().getFieldName(), selectedField.getValue());
}
return mapRecord;
} else {
for (final FieldValue fieldVal : destinationFields) {
fieldVal.updateValue(getReplacementObject(selectedFields));
}
return record;
}
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class SolrUtils method solrDocumentsToRecordSet.
/**
* Writes each SolrDocument to a record.
*/
public static RecordSet solrDocumentsToRecordSet(final List<SolrDocument> docs, final RecordSchema schema) {
final List<Record> lr = new ArrayList<Record>();
for (SolrDocument doc : docs) {
final Map<String, Object> recordValues = new LinkedHashMap<>();
for (RecordField field : schema.getFields()) {
final Object fieldValue = doc.getFieldValue(field.getFieldName());
if (fieldValue != null) {
if (field.getDataType().getFieldType().equals(RecordFieldType.ARRAY)) {
recordValues.put(field.getFieldName(), ((List<Object>) fieldValue).toArray());
} else {
recordValues.put(field.getFieldName(), fieldValue);
}
}
}
lr.add(new MapRecord(schema, recordValues));
}
return new ListRecordSet(schema, lr);
}
Aggregations