use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class TestLookupRecord method testAddFieldsToNonRecordField.
/**
* If the output fields are added to a non-record field, then the result should be that the field
* becomes a UNION that does allow the Record and the value is set to a Record.
*/
@Test
public void testAddFieldsToNonRecordField() throws InitializationException {
final RecordLookup lookupService = new RecordLookup();
runner.addControllerService("lookup", lookupService);
runner.enableControllerService(lookupService);
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("favorite", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("least", RecordFieldType.STRING.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
final Record sports = new MapRecord(schema, new HashMap<String, Object>());
sports.setValue("favorite", "basketball");
sports.setValue("least", "soccer");
lookupService.addValue("John Doe", sports);
recordReader = new MockRecordParser();
recordReader.addSchemaField("name", RecordFieldType.STRING);
recordReader.addSchemaField("age", RecordFieldType.INT);
recordReader.addSchemaField("sport", RecordFieldType.STRING);
recordReader.addRecord("John Doe", 48, null);
runner.addControllerService("reader", recordReader);
runner.enableControllerService(recordReader);
runner.setProperty("lookup", "/name");
runner.setProperty(LookupRecord.RESULT_RECORD_PATH, "/sport");
runner.setProperty(LookupRecord.RESULT_CONTENTS, LookupRecord.RESULT_RECORD_FIELDS);
runner.enqueue("");
runner.run();
final MockFlowFile out = runner.getFlowFilesForRelationship(LookupRecord.REL_MATCHED).get(0);
// We can't be sure of the order of the fields in the record, so we allow either 'least' or 'favorite' to be first
final String outputContents = new String(out.toByteArray());
assertTrue(outputContents.equals("John Doe,48,MapRecord[{favorite=basketball, least=soccer}]\n") || outputContents.equals("John Doe,48,MapRecord[{least=soccer, favorite=basketball}]\n"));
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class IPLookupService method createRecord.
private Record createRecord(final CityResponse city) {
if (city == null) {
return null;
}
final Map<String, Object> values = new HashMap<>();
values.put(CitySchema.CITY.getFieldName(), city.getCity().getName());
final Location location = city.getLocation();
values.put(CitySchema.ACCURACY.getFieldName(), location.getAccuracyRadius());
values.put(CitySchema.METRO_CODE.getFieldName(), location.getMetroCode());
values.put(CitySchema.TIMEZONE.getFieldName(), location.getTimeZone());
values.put(CitySchema.LATITUDE.getFieldName(), location.getLatitude());
values.put(CitySchema.LONGITUDE.getFieldName(), location.getLongitude());
values.put(CitySchema.CONTINENT.getFieldName(), city.getContinent().getName());
values.put(CitySchema.POSTALCODE.getFieldName(), city.getPostal().getCode());
values.put(CitySchema.COUNTRY.getFieldName(), createRecord(city.getCountry()));
final Object[] subdivisions = new Object[city.getSubdivisions().size()];
int i = 0;
for (final Subdivision subdivision : city.getSubdivisions()) {
subdivisions[i++] = createRecord(subdivision);
}
values.put(CitySchema.SUBDIVISIONS.getFieldName(), subdivisions);
return new MapRecord(CitySchema.GEO_SCHEMA, values);
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class AvroRecordReader method nextRecord.
@Override
public Record nextRecord(final boolean coerceTypes, final boolean dropUnknownFields) throws IOException, MalformedRecordException {
GenericRecord record = nextAvroRecord();
if (record == null) {
return null;
}
final RecordSchema schema = getSchema();
final Map<String, Object> values = AvroTypeUtil.convertAvroRecordToMap(record, schema);
return new MapRecord(schema, values);
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class JacksonCSVRecordReader method nextRecord.
@Override
public Record nextRecord(final boolean coerceTypes, final boolean dropUnknownFields) throws IOException, MalformedRecordException {
final RecordSchema schema = getSchema();
if (recordStream.hasNext()) {
String[] csvRecord = recordStream.next();
// If the first record is the header names (and we're using them), store those off for use in creating the value map on the next iterations
if (rawFieldNames == null) {
if (!hasHeader || ignoreHeader) {
rawFieldNames = schema.getFieldNames();
} else {
rawFieldNames = Arrays.asList(csvRecord);
// Advance the stream to keep the record count correct
if (recordStream.hasNext()) {
csvRecord = recordStream.next();
} else {
return null;
}
}
}
// Check for empty lines and ignore them
boolean foundRecord = true;
if (csvRecord == null || (csvRecord.length == 1 && StringUtils.isEmpty(csvRecord[0]))) {
foundRecord = false;
while (recordStream.hasNext()) {
csvRecord = recordStream.next();
if (csvRecord != null && !(csvRecord.length == 1 && StringUtils.isEmpty(csvRecord[0]))) {
// This is a non-empty record/row, so continue processing
foundRecord = true;
break;
}
}
}
// If we didn't find a record, then the end of the file was comprised of empty lines, so we have no record to return
if (!foundRecord) {
return null;
}
final Map<String, Object> values = new HashMap<>(rawFieldNames.size() * 2);
final int numFieldNames = rawFieldNames.size();
for (int i = 0; i < csvRecord.length; i++) {
final String rawFieldName = numFieldNames <= i ? "unknown_field_index_" + i : rawFieldNames.get(i);
String rawValue = (i >= csvRecord.length) ? null : csvRecord[i];
final Optional<DataType> dataTypeOption = schema.getDataType(rawFieldName);
if (!dataTypeOption.isPresent() && dropUnknownFields) {
continue;
}
final Object value;
if (coerceTypes && dataTypeOption.isPresent()) {
value = convert(rawValue, dataTypeOption.get(), rawFieldName);
} else if (dataTypeOption.isPresent()) {
// The CSV Reader is going to return all fields as Strings, because CSV doesn't have any way to
// dictate a field type. As a result, we will use the schema that we have to attempt to convert
// the value into the desired type if it's a simple type.
value = convertSimpleIfPossible(rawValue, dataTypeOption.get(), rawFieldName);
} else {
value = rawValue;
}
values.put(rawFieldName, value);
}
return new MapRecord(schema, values, coerceTypes, dropUnknownFields);
}
return null;
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class JsonPathRowRecordReader method convert.
@SuppressWarnings("unchecked")
protected Object convert(final Object value, final DataType dataType, final String fieldName, final Object defaultValue) {
if (value == null) {
return defaultValue;
}
if (value instanceof List) {
if (dataType.getFieldType() != RecordFieldType.ARRAY) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type Array to " + dataType);
}
final ArrayDataType arrayType = (ArrayDataType) dataType;
final List<?> list = (List<?>) value;
final Object[] coercedValues = new Object[list.size()];
int i = 0;
for (final Object rawValue : list) {
coercedValues[i++] = convert(rawValue, arrayType.getElementType(), fieldName, null);
}
return coercedValues;
}
if (dataType.getFieldType() == RecordFieldType.RECORD && value instanceof Map) {
final RecordDataType recordDataType = (RecordDataType) dataType;
final RecordSchema childSchema = recordDataType.getChildSchema();
final Map<String, Object> rawValues = (Map<String, Object>) value;
final Map<String, Object> coercedValues = new HashMap<>();
for (final Map.Entry<String, Object> entry : rawValues.entrySet()) {
final String key = entry.getKey();
final Optional<DataType> desiredTypeOption = childSchema.getDataType(key);
if (desiredTypeOption.isPresent()) {
final Optional<RecordField> field = childSchema.getField(key);
final Object defaultFieldValue = field.isPresent() ? field.get().getDefaultValue() : null;
final Object coercedValue = convert(entry.getValue(), desiredTypeOption.get(), fieldName + "." + key, defaultFieldValue);
coercedValues.put(key, coercedValue);
}
}
return new MapRecord(childSchema, coercedValues);
} else {
return DataTypeUtils.convertType(value, dataType, LAZY_DATE_FORMAT, LAZY_TIME_FORMAT, LAZY_TIMESTAMP_FORMAT, fieldName);
}
}
Aggregations