use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.
the class GrokReader method createRecordSchema.
static RecordSchema createRecordSchema(final Grok grok) {
final List<RecordField> fields = new ArrayList<>();
String grokExpression = grok.getOriginalGrokPattern();
populateSchemaFieldNames(grok, grokExpression, fields);
fields.add(new RecordField(GrokRecordReader.STACK_TRACE_COLUMN_NAME, RecordFieldType.STRING.getDataType(), true));
fields.add(new RecordField(GrokRecordReader.RAW_MESSAGE_NAME, RecordFieldType.STRING.getDataType(), true));
final RecordSchema schema = new SimpleRecordSchema(fields);
return schema;
}
use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.
the class AbstractJsonRowRecordReader method getRawNodeValue.
protected Object getRawNodeValue(final JsonNode fieldNode, final DataType dataType) throws IOException {
if (fieldNode == null || fieldNode.isNull()) {
return null;
}
if (fieldNode.isNumber()) {
return fieldNode.getNumberValue();
}
if (fieldNode.isBinary()) {
return fieldNode.getBinaryValue();
}
if (fieldNode.isBoolean()) {
return fieldNode.getBooleanValue();
}
if (fieldNode.isTextual()) {
return fieldNode.getTextValue();
}
if (fieldNode.isArray()) {
final ArrayNode arrayNode = (ArrayNode) fieldNode;
final int numElements = arrayNode.size();
final Object[] arrayElements = new Object[numElements];
int count = 0;
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
elementDataType = arrayDataType.getElementType();
} else {
elementDataType = null;
}
for (final JsonNode node : arrayNode) {
final Object value = getRawNodeValue(node, elementDataType);
arrayElements[count++] = value;
}
return arrayElements;
}
if (fieldNode.isObject()) {
RecordSchema childSchema;
if (dataType != null && RecordFieldType.RECORD == dataType.getFieldType()) {
final RecordDataType recordDataType = (RecordDataType) dataType;
childSchema = recordDataType.getChildSchema();
} else {
childSchema = null;
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Iterator<String> fieldNames = fieldNode.getFieldNames();
final Map<String, Object> childValues = new HashMap<>();
while (fieldNames.hasNext()) {
final String childFieldName = fieldNames.next();
final Object childValue = getRawNodeValue(fieldNode.get(childFieldName), dataType);
childValues.put(childFieldName, childValue);
}
final MapRecord record = new MapRecord(childSchema, childValues);
return record;
}
return null;
}
use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.
the class AbstractJsonRowRecordReader method nextRecord.
@Override
public Record nextRecord(final boolean coerceTypes, final boolean dropUnknownFields) throws IOException, MalformedRecordException {
if (firstObjectConsumed && !array) {
return null;
}
final JsonNode nextNode = getNextJsonNode();
final RecordSchema schema = getSchema();
try {
return convertJsonNodeToRecord(nextNode, schema, coerceTypes, dropUnknownFields);
} catch (final MalformedRecordException mre) {
throw mre;
} catch (final IOException ioe) {
throw ioe;
} catch (final Exception e) {
logger.debug("Failed to convert JSON Element {} into a Record object using schema {} due to {}", new Object[] { nextNode, schema, e.toString(), e });
throw new MalformedRecordException("Successfully parsed a JSON object from input but failed to convert into a Record object with the given schema", e);
}
}
use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.
the class JsonPathRowRecordReader method convert.
@SuppressWarnings("unchecked")
protected Object convert(final Object value, final DataType dataType) {
if (value == null) {
return null;
}
if (value instanceof List) {
final List<?> list = (List<?>) value;
final Object[] array = new Object[list.size()];
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
elementDataType = ((ArrayDataType) dataType).getElementType();
} else {
elementDataType = null;
}
int i = 0;
for (final Object val : list) {
array[i++] = convert(val, elementDataType);
}
return array;
}
if (value instanceof Map) {
final Map<String, ?> map = (Map<String, ?>) value;
boolean record = false;
for (final Object obj : map.values()) {
if (obj instanceof JsonNode) {
record = true;
}
}
if (!record) {
return value;
}
RecordSchema childSchema = null;
if (dataType != null && dataType.getFieldType() == RecordFieldType.RECORD) {
childSchema = ((RecordDataType) dataType).getChildSchema();
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Map<String, Object> values = new HashMap<>();
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String key = entry.getKey();
final Object childValue = entry.getValue();
final RecordField recordField = childSchema.getField(key).orElse(null);
final DataType childDataType = recordField == null ? null : recordField.getDataType();
values.put(key, convert(childValue, childDataType));
}
return new MapRecord(childSchema, values);
}
return value;
}
use of org.apache.nifi.serialization.record.RecordSchema in project nifi by apache.
the class WriteJsonResult method writeValue.
@SuppressWarnings("unchecked")
private void writeValue(final JsonGenerator generator, final Object value, final String fieldName, final DataType dataType) throws JsonGenerationException, IOException {
if (value == null) {
generator.writeNull();
return;
}
final DataType chosenDataType = dataType.getFieldType() == RecordFieldType.CHOICE ? DataTypeUtils.chooseDataType(value, (ChoiceDataType) dataType) : dataType;
final Object coercedValue = DataTypeUtils.convertType(value, chosenDataType, LAZY_DATE_FORMAT, LAZY_TIME_FORMAT, LAZY_TIMESTAMP_FORMAT, fieldName);
if (coercedValue == null) {
generator.writeNull();
return;
}
switch(chosenDataType.getFieldType()) {
case DATE:
{
final String stringValue = DataTypeUtils.toString(coercedValue, LAZY_DATE_FORMAT);
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case TIME:
{
final String stringValue = DataTypeUtils.toString(coercedValue, LAZY_TIME_FORMAT);
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case TIMESTAMP:
{
final String stringValue = DataTypeUtils.toString(coercedValue, LAZY_TIMESTAMP_FORMAT);
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case DOUBLE:
generator.writeNumber(DataTypeUtils.toDouble(coercedValue, fieldName));
break;
case FLOAT:
generator.writeNumber(DataTypeUtils.toFloat(coercedValue, fieldName));
break;
case LONG:
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
break;
case INT:
case BYTE:
case SHORT:
generator.writeNumber(DataTypeUtils.toInteger(coercedValue, fieldName));
break;
case CHAR:
case STRING:
generator.writeString(coercedValue.toString());
break;
case BIGINT:
if (coercedValue instanceof Long) {
generator.writeNumber(((Long) coercedValue).longValue());
} else {
generator.writeNumber((BigInteger) coercedValue);
}
break;
case BOOLEAN:
final String stringValue = coercedValue.toString();
if ("true".equalsIgnoreCase(stringValue)) {
generator.writeBoolean(true);
} else if ("false".equalsIgnoreCase(stringValue)) {
generator.writeBoolean(false);
} else {
generator.writeString(stringValue);
}
break;
case RECORD:
{
final Record record = (Record) coercedValue;
final RecordDataType recordDataType = (RecordDataType) chosenDataType;
final RecordSchema childSchema = recordDataType.getChildSchema();
writeRecord(record, childSchema, generator, gen -> gen.writeStartObject(), gen -> gen.writeEndObject(), true);
break;
}
case MAP:
{
final MapDataType mapDataType = (MapDataType) chosenDataType;
final DataType valueDataType = mapDataType.getValueType();
final Map<String, ?> map = (Map<String, ?>) coercedValue;
generator.writeStartObject();
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String mapKey = entry.getKey();
final Object mapValue = entry.getValue();
generator.writeFieldName(mapKey);
writeValue(generator, mapValue, fieldName + "." + mapKey, valueDataType);
}
generator.writeEndObject();
break;
}
case ARRAY:
default:
if (coercedValue instanceof Object[]) {
final Object[] values = (Object[]) coercedValue;
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
final DataType elementType = arrayDataType.getElementType();
writeArray(values, fieldName, generator, elementType);
} else {
generator.writeString(coercedValue.toString());
}
break;
}
}
Aggregations