use of org.apache.nifi.serialization.record.type.RecordDataType in project nifi by apache.
the class PutElasticsearchHttpRecord method writeValue.
@SuppressWarnings("unchecked")
private void writeValue(final JsonGenerator generator, final Object value, final String fieldName, final DataType dataType) throws IOException {
if (value == null) {
if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || ((nullSuppression.equals(SUPPRESS_MISSING.getValue())) && fieldName != null && !fieldName.equals(""))) {
generator.writeNullField(fieldName);
}
return;
}
final DataType chosenDataType = dataType.getFieldType() == RecordFieldType.CHOICE ? DataTypeUtils.chooseDataType(value, (ChoiceDataType) dataType) : dataType;
final Object coercedValue = DataTypeUtils.convertType(value, chosenDataType, fieldName);
if (coercedValue == null) {
generator.writeNull();
return;
}
switch(chosenDataType.getFieldType()) {
case DATE:
{
final String stringValue = DataTypeUtils.toString(coercedValue, () -> DataTypeUtils.getDateFormat(RecordFieldType.DATE.getDefaultFormat()));
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case TIME:
{
final String stringValue = DataTypeUtils.toString(coercedValue, () -> DataTypeUtils.getDateFormat(RecordFieldType.TIME.getDefaultFormat()));
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case TIMESTAMP:
{
final String stringValue = DataTypeUtils.toString(coercedValue, () -> DataTypeUtils.getDateFormat(RecordFieldType.TIMESTAMP.getDefaultFormat()));
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case DOUBLE:
generator.writeNumber(DataTypeUtils.toDouble(coercedValue, fieldName));
break;
case FLOAT:
generator.writeNumber(DataTypeUtils.toFloat(coercedValue, fieldName));
break;
case LONG:
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
break;
case INT:
case BYTE:
case SHORT:
generator.writeNumber(DataTypeUtils.toInteger(coercedValue, fieldName));
break;
case CHAR:
case STRING:
generator.writeString(coercedValue.toString());
break;
case BIGINT:
if (coercedValue instanceof Long) {
generator.writeNumber((Long) coercedValue);
} else {
generator.writeNumber((BigInteger) coercedValue);
}
break;
case BOOLEAN:
final String stringValue = coercedValue.toString();
if ("true".equalsIgnoreCase(stringValue)) {
generator.writeBoolean(true);
} else if ("false".equalsIgnoreCase(stringValue)) {
generator.writeBoolean(false);
} else {
generator.writeString(stringValue);
}
break;
case RECORD:
{
final Record record = (Record) coercedValue;
final RecordDataType recordDataType = (RecordDataType) chosenDataType;
final RecordSchema childSchema = recordDataType.getChildSchema();
writeRecord(record, childSchema, generator);
break;
}
case MAP:
{
final MapDataType mapDataType = (MapDataType) chosenDataType;
final DataType valueDataType = mapDataType.getValueType();
final Map<String, ?> map = (Map<String, ?>) coercedValue;
generator.writeStartObject();
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String mapKey = entry.getKey();
final Object mapValue = entry.getValue();
generator.writeFieldName(mapKey);
writeValue(generator, mapValue, fieldName + "." + mapKey, valueDataType);
}
generator.writeEndObject();
break;
}
case ARRAY:
default:
if (coercedValue instanceof Object[]) {
final Object[] values = (Object[]) coercedValue;
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
final DataType elementType = arrayDataType.getElementType();
writeArray(values, fieldName, generator, elementType);
} else {
generator.writeString(coercedValue.toString());
}
break;
}
}
use of org.apache.nifi.serialization.record.type.RecordDataType in project nifi by apache.
the class AbstractJsonRowRecordReader method getRawNodeValue.
protected Object getRawNodeValue(final JsonNode fieldNode, final DataType dataType) throws IOException {
if (fieldNode == null || fieldNode.isNull()) {
return null;
}
if (fieldNode.isNumber()) {
return fieldNode.getNumberValue();
}
if (fieldNode.isBinary()) {
return fieldNode.getBinaryValue();
}
if (fieldNode.isBoolean()) {
return fieldNode.getBooleanValue();
}
if (fieldNode.isTextual()) {
return fieldNode.getTextValue();
}
if (fieldNode.isArray()) {
final ArrayNode arrayNode = (ArrayNode) fieldNode;
final int numElements = arrayNode.size();
final Object[] arrayElements = new Object[numElements];
int count = 0;
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
elementDataType = arrayDataType.getElementType();
} else {
elementDataType = null;
}
for (final JsonNode node : arrayNode) {
final Object value = getRawNodeValue(node, elementDataType);
arrayElements[count++] = value;
}
return arrayElements;
}
if (fieldNode.isObject()) {
RecordSchema childSchema;
if (dataType != null && RecordFieldType.RECORD == dataType.getFieldType()) {
final RecordDataType recordDataType = (RecordDataType) dataType;
childSchema = recordDataType.getChildSchema();
} else {
childSchema = null;
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Iterator<String> fieldNames = fieldNode.getFieldNames();
final Map<String, Object> childValues = new HashMap<>();
while (fieldNames.hasNext()) {
final String childFieldName = fieldNames.next();
final Object childValue = getRawNodeValue(fieldNode.get(childFieldName), dataType);
childValues.put(childFieldName, childValue);
}
final MapRecord record = new MapRecord(childSchema, childValues);
return record;
}
return null;
}
use of org.apache.nifi.serialization.record.type.RecordDataType in project nifi by apache.
the class JsonPathRowRecordReader method convert.
@SuppressWarnings("unchecked")
protected Object convert(final Object value, final DataType dataType) {
if (value == null) {
return null;
}
if (value instanceof List) {
final List<?> list = (List<?>) value;
final Object[] array = new Object[list.size()];
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
elementDataType = ((ArrayDataType) dataType).getElementType();
} else {
elementDataType = null;
}
int i = 0;
for (final Object val : list) {
array[i++] = convert(val, elementDataType);
}
return array;
}
if (value instanceof Map) {
final Map<String, ?> map = (Map<String, ?>) value;
boolean record = false;
for (final Object obj : map.values()) {
if (obj instanceof JsonNode) {
record = true;
}
}
if (!record) {
return value;
}
RecordSchema childSchema = null;
if (dataType != null && dataType.getFieldType() == RecordFieldType.RECORD) {
childSchema = ((RecordDataType) dataType).getChildSchema();
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Map<String, Object> values = new HashMap<>();
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String key = entry.getKey();
final Object childValue = entry.getValue();
final RecordField recordField = childSchema.getField(key).orElse(null);
final DataType childDataType = recordField == null ? null : recordField.getDataType();
values.put(key, convert(childValue, childDataType));
}
return new MapRecord(childSchema, values);
}
return value;
}
use of org.apache.nifi.serialization.record.type.RecordDataType in project nifi by apache.
the class WriteJsonResult method writeValue.
@SuppressWarnings("unchecked")
private void writeValue(final JsonGenerator generator, final Object value, final String fieldName, final DataType dataType) throws JsonGenerationException, IOException {
if (value == null) {
generator.writeNull();
return;
}
final DataType chosenDataType = dataType.getFieldType() == RecordFieldType.CHOICE ? DataTypeUtils.chooseDataType(value, (ChoiceDataType) dataType) : dataType;
final Object coercedValue = DataTypeUtils.convertType(value, chosenDataType, LAZY_DATE_FORMAT, LAZY_TIME_FORMAT, LAZY_TIMESTAMP_FORMAT, fieldName);
if (coercedValue == null) {
generator.writeNull();
return;
}
switch(chosenDataType.getFieldType()) {
case DATE:
{
final String stringValue = DataTypeUtils.toString(coercedValue, LAZY_DATE_FORMAT);
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case TIME:
{
final String stringValue = DataTypeUtils.toString(coercedValue, LAZY_TIME_FORMAT);
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case TIMESTAMP:
{
final String stringValue = DataTypeUtils.toString(coercedValue, LAZY_TIMESTAMP_FORMAT);
if (DataTypeUtils.isLongTypeCompatible(stringValue)) {
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
} else {
generator.writeString(stringValue);
}
break;
}
case DOUBLE:
generator.writeNumber(DataTypeUtils.toDouble(coercedValue, fieldName));
break;
case FLOAT:
generator.writeNumber(DataTypeUtils.toFloat(coercedValue, fieldName));
break;
case LONG:
generator.writeNumber(DataTypeUtils.toLong(coercedValue, fieldName));
break;
case INT:
case BYTE:
case SHORT:
generator.writeNumber(DataTypeUtils.toInteger(coercedValue, fieldName));
break;
case CHAR:
case STRING:
generator.writeString(coercedValue.toString());
break;
case BIGINT:
if (coercedValue instanceof Long) {
generator.writeNumber(((Long) coercedValue).longValue());
} else {
generator.writeNumber((BigInteger) coercedValue);
}
break;
case BOOLEAN:
final String stringValue = coercedValue.toString();
if ("true".equalsIgnoreCase(stringValue)) {
generator.writeBoolean(true);
} else if ("false".equalsIgnoreCase(stringValue)) {
generator.writeBoolean(false);
} else {
generator.writeString(stringValue);
}
break;
case RECORD:
{
final Record record = (Record) coercedValue;
final RecordDataType recordDataType = (RecordDataType) chosenDataType;
final RecordSchema childSchema = recordDataType.getChildSchema();
writeRecord(record, childSchema, generator, gen -> gen.writeStartObject(), gen -> gen.writeEndObject(), true);
break;
}
case MAP:
{
final MapDataType mapDataType = (MapDataType) chosenDataType;
final DataType valueDataType = mapDataType.getValueType();
final Map<String, ?> map = (Map<String, ?>) coercedValue;
generator.writeStartObject();
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String mapKey = entry.getKey();
final Object mapValue = entry.getValue();
generator.writeFieldName(mapKey);
writeValue(generator, mapValue, fieldName + "." + mapKey, valueDataType);
}
generator.writeEndObject();
break;
}
case ARRAY:
default:
if (coercedValue instanceof Object[]) {
final Object[] values = (Object[]) coercedValue;
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
final DataType elementType = arrayDataType.getElementType();
writeArray(values, fieldName, generator, elementType);
} else {
generator.writeString(coercedValue.toString());
}
break;
}
}
use of org.apache.nifi.serialization.record.type.RecordDataType in project nifi by apache.
the class DataTypeUtils method convertRecordFieldtoObject.
/**
* Creates a native Java object from a given object of a specified type. Non-scalar (complex, nested, etc.) data types are processed iteratively/recursively, such that all
* included objects are native Java objects, rather than Record API objects or implementation-specific objects.
* @param value The object to be converted
* @param dataType The type of the provided object
* @return An object representing a native Java conversion of the given input object
*/
public static Object convertRecordFieldtoObject(final Object value, final DataType dataType) {
if (value == null) {
return null;
}
if (value instanceof Record) {
Record record = (Record) value;
RecordSchema recordSchema = record.getSchema();
if (recordSchema == null) {
throw new IllegalTypeConversionException("Cannot convert value of type Record to Map because Record does not have an associated Schema");
}
final Map<String, Object> recordMap = new HashMap<>();
for (RecordField field : recordSchema.getFields()) {
final DataType fieldDataType = field.getDataType();
final String fieldName = field.getFieldName();
Object fieldValue = record.getValue(fieldName);
if (fieldValue == null) {
recordMap.put(fieldName, null);
} else if (isScalarValue(fieldDataType, fieldValue)) {
recordMap.put(fieldName, fieldValue);
} else if (fieldDataType instanceof RecordDataType) {
Record nestedRecord = (Record) fieldValue;
recordMap.put(fieldName, convertRecordFieldtoObject(nestedRecord, fieldDataType));
} else if (fieldDataType instanceof MapDataType) {
recordMap.put(fieldName, convertRecordMapToJavaMap((Map) fieldValue, ((MapDataType) fieldDataType).getValueType()));
} else if (fieldDataType instanceof ArrayDataType) {
recordMap.put(fieldName, convertRecordArrayToJavaArray((Object[]) fieldValue, ((ArrayDataType) fieldDataType).getElementType()));
} else {
throw new IllegalTypeConversionException("Cannot convert value [" + fieldValue + "] of type " + fieldDataType.toString() + " to Map for field " + fieldName + " because the type is not supported");
}
}
return recordMap;
} else if (value instanceof Map) {
return convertRecordMapToJavaMap((Map) value, ((MapDataType) dataType).getValueType());
} else if (dataType != null && isScalarValue(dataType, value)) {
return value;
}
throw new IllegalTypeConversionException("Cannot convert value of class " + value.getClass().getName() + " because the type is not supported");
}
Aggregations