use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class CSVRecordReader method nextRecord.
@Override
public Record nextRecord(final boolean coerceTypes, final boolean dropUnknownFields) throws IOException, MalformedRecordException {
final RecordSchema schema = getSchema();
final List<RecordField> recordFields = getRecordFields();
final int numFieldNames = recordFields.size();
for (final CSVRecord csvRecord : csvParser) {
final Map<String, Object> values = new LinkedHashMap<>(recordFields.size() * 2);
for (int i = 0; i < csvRecord.size(); i++) {
final String rawValue = csvRecord.get(i);
final String rawFieldName;
final DataType dataType;
if (i >= numFieldNames) {
if (!dropUnknownFields) {
values.put("unknown_field_index_" + i, rawValue);
}
continue;
} else {
final RecordField recordField = recordFields.get(i);
rawFieldName = recordField.getFieldName();
dataType = recordField.getDataType();
}
final Object value;
if (coerceTypes) {
value = convert(rawValue, dataType, rawFieldName);
} else {
// The CSV Reader is going to return all fields as Strings, because CSV doesn't have any way to
// dictate a field type. As a result, we will use the schema that we have to attempt to convert
// the value into the desired type if it's a simple type.
value = convertSimpleIfPossible(rawValue, dataType, rawFieldName);
}
values.put(rawFieldName, value);
}
return new MapRecord(schema, values, coerceTypes, dropUnknownFields);
}
return null;
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class GrokRecordReader method createRecord.
private Record createRecord(final Map<String, Object> valueMap, final StringBuilder trailingText, final String stackTrace, final String raw, final boolean coerceTypes, final boolean dropUnknown) {
final Map<String, Object> converted = new HashMap<>();
for (final Map.Entry<String, Object> entry : valueMap.entrySet()) {
final String fieldName = entry.getKey();
final Object rawValue = entry.getValue();
final Object normalizedValue;
if (rawValue instanceof List) {
final List<?> list = (List<?>) rawValue;
final String[] array = new String[list.size()];
for (int i = 0; i < list.size(); i++) {
final Object rawObject = list.get(i);
array[i] = rawObject == null ? null : rawObject.toString();
}
normalizedValue = array;
} else {
normalizedValue = rawValue == null ? null : rawValue.toString();
}
final Optional<RecordField> optionalRecordField = schema.getField(fieldName);
final Object coercedValue;
if (coerceTypes && optionalRecordField.isPresent()) {
final RecordField field = optionalRecordField.get();
final DataType fieldType = field.getDataType();
coercedValue = convert(fieldType, normalizedValue, fieldName);
} else {
coercedValue = normalizedValue;
}
converted.put(fieldName, coercedValue);
}
// and then append the trailing text to it.
if (append && trailingText.length() > 0) {
String lastPopulatedFieldName = null;
final List<RecordField> schemaFields = schemaFromGrok.getFields();
for (int i = schemaFields.size() - 1; i >= 0; i--) {
final RecordField field = schemaFields.get(i);
Object value = converted.get(field.getFieldName());
if (value != null) {
lastPopulatedFieldName = field.getFieldName();
break;
}
for (final String alias : field.getAliases()) {
value = converted.get(alias);
if (value != null) {
lastPopulatedFieldName = alias;
break;
}
}
}
if (lastPopulatedFieldName != null) {
final Object value = converted.get(lastPopulatedFieldName);
if (value == null) {
converted.put(lastPopulatedFieldName, trailingText.toString());
} else if (value instanceof String) {
// if not a String it is a List and we will just drop the trailing text
converted.put(lastPopulatedFieldName, (String) value + trailingText.toString());
}
}
}
converted.put(STACK_TRACE_COLUMN_NAME, stackTrace);
converted.put(RAW_MESSAGE_NAME, raw);
return new MapRecord(schema, converted);
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class AbstractJsonRowRecordReader method getRawNodeValue.
protected Object getRawNodeValue(final JsonNode fieldNode, final DataType dataType) throws IOException {
if (fieldNode == null || fieldNode.isNull()) {
return null;
}
if (fieldNode.isNumber()) {
return fieldNode.getNumberValue();
}
if (fieldNode.isBinary()) {
return fieldNode.getBinaryValue();
}
if (fieldNode.isBoolean()) {
return fieldNode.getBooleanValue();
}
if (fieldNode.isTextual()) {
return fieldNode.getTextValue();
}
if (fieldNode.isArray()) {
final ArrayNode arrayNode = (ArrayNode) fieldNode;
final int numElements = arrayNode.size();
final Object[] arrayElements = new Object[numElements];
int count = 0;
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
elementDataType = arrayDataType.getElementType();
} else {
elementDataType = null;
}
for (final JsonNode node : arrayNode) {
final Object value = getRawNodeValue(node, elementDataType);
arrayElements[count++] = value;
}
return arrayElements;
}
if (fieldNode.isObject()) {
RecordSchema childSchema;
if (dataType != null && RecordFieldType.RECORD == dataType.getFieldType()) {
final RecordDataType recordDataType = (RecordDataType) dataType;
childSchema = recordDataType.getChildSchema();
} else {
childSchema = null;
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Iterator<String> fieldNames = fieldNode.getFieldNames();
final Map<String, Object> childValues = new HashMap<>();
while (fieldNames.hasNext()) {
final String childFieldName = fieldNames.next();
final Object childValue = getRawNodeValue(fieldNode.get(childFieldName), dataType);
childValues.put(childFieldName, childValue);
}
final MapRecord record = new MapRecord(childSchema, childValues);
return record;
}
return null;
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class JsonPathRowRecordReader method convert.
@SuppressWarnings("unchecked")
protected Object convert(final Object value, final DataType dataType) {
if (value == null) {
return null;
}
if (value instanceof List) {
final List<?> list = (List<?>) value;
final Object[] array = new Object[list.size()];
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
elementDataType = ((ArrayDataType) dataType).getElementType();
} else {
elementDataType = null;
}
int i = 0;
for (final Object val : list) {
array[i++] = convert(val, elementDataType);
}
return array;
}
if (value instanceof Map) {
final Map<String, ?> map = (Map<String, ?>) value;
boolean record = false;
for (final Object obj : map.values()) {
if (obj instanceof JsonNode) {
record = true;
}
}
if (!record) {
return value;
}
RecordSchema childSchema = null;
if (dataType != null && dataType.getFieldType() == RecordFieldType.RECORD) {
childSchema = ((RecordDataType) dataType).getChildSchema();
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Map<String, Object> values = new HashMap<>();
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String key = entry.getKey();
final Object childValue = entry.getValue();
final RecordField recordField = childSchema.getField(key).orElse(null);
final DataType childDataType = recordField == null ? null : recordField.getDataType();
values.put(key, convert(childValue, childDataType));
}
return new MapRecord(childSchema, values);
}
return value;
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class TestWriteCSVResult method testMissingAndExtraFieldWriteRecord.
@Test
public void testMissingAndExtraFieldWriteRecord() throws IOException {
final CSVFormat csvFormat = CSVFormat.DEFAULT.withEscape('\\').withQuoteMode(QuoteMode.NONE).withRecordSeparator("\n");
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("id", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("name", RecordFieldType.STRING.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
final Map<String, Object> values = new LinkedHashMap<>();
values.put("id", "1");
values.put("dob", "1/1/1970");
final Record record = new MapRecord(schema, values);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final String output;
try (final WriteCSVResult writer = new WriteCSVResult(csvFormat, schema, new SchemaNameAsAttribute(), baos, RecordFieldType.DATE.getDefaultFormat(), RecordFieldType.TIME.getDefaultFormat(), RecordFieldType.TIMESTAMP.getDefaultFormat(), true, "ASCII")) {
writer.beginRecordSet();
writer.writeRecord(record);
writer.finishRecordSet();
writer.flush();
output = baos.toString();
}
assertEquals("id,name\n1,\n", output);
}
Aggregations