use of org.apache.nifi.serialization.record.type.MapDataType in project nifi by apache.
the class AvroTypeUtil method buildAvroSchema.
private static Schema buildAvroSchema(final DataType dataType, final String fieldName, final boolean nullable) {
final Schema schema;
switch(dataType.getFieldType()) {
case ARRAY:
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
final DataType elementDataType = arrayDataType.getElementType();
if (RecordFieldType.BYTE.equals(elementDataType.getFieldType())) {
schema = Schema.create(Type.BYTES);
} else {
final Schema elementType = buildAvroSchema(elementDataType, fieldName, false);
schema = Schema.createArray(elementType);
}
break;
case BIGINT:
schema = Schema.create(Type.STRING);
break;
case BOOLEAN:
schema = Schema.create(Type.BOOLEAN);
break;
case BYTE:
schema = Schema.create(Type.INT);
break;
case CHAR:
schema = Schema.create(Type.STRING);
break;
case CHOICE:
final ChoiceDataType choiceDataType = (ChoiceDataType) dataType;
final List<DataType> options = choiceDataType.getPossibleSubTypes();
// We need to keep track of which types have been added to the union, because if we have
// two elements in the UNION with the same type, it will fail - even if the logical type is
// different. So if we have an int and a logical type date (which also has a 'concrete type' of int)
// then an Exception will be thrown when we try to create the union. To avoid this, we just keep track
// of the Types and avoid adding it in such a case.
final List<Schema> unionTypes = new ArrayList<>(options.size());
final Set<Type> typesAdded = new HashSet<>();
for (final DataType option : options) {
final Schema optionSchema = buildAvroSchema(option, fieldName, false);
if (!typesAdded.contains(optionSchema.getType())) {
unionTypes.add(optionSchema);
typesAdded.add(optionSchema.getType());
}
}
schema = Schema.createUnion(unionTypes);
break;
case DATE:
schema = Schema.create(Type.INT);
LogicalTypes.date().addToSchema(schema);
break;
case DOUBLE:
schema = Schema.create(Type.DOUBLE);
break;
case FLOAT:
schema = Schema.create(Type.FLOAT);
break;
case INT:
schema = Schema.create(Type.INT);
break;
case LONG:
schema = Schema.create(Type.LONG);
break;
case MAP:
schema = Schema.createMap(buildAvroSchema(((MapDataType) dataType).getValueType(), fieldName, false));
break;
case RECORD:
final RecordDataType recordDataType = (RecordDataType) dataType;
final RecordSchema childSchema = recordDataType.getChildSchema();
final List<Field> childFields = new ArrayList<>(childSchema.getFieldCount());
for (final RecordField field : childSchema.getFields()) {
childFields.add(buildAvroField(field));
}
schema = Schema.createRecord(fieldName + "Type", null, "org.apache.nifi", false, childFields);
break;
case SHORT:
schema = Schema.create(Type.INT);
break;
case STRING:
schema = Schema.create(Type.STRING);
break;
case TIME:
schema = Schema.create(Type.INT);
LogicalTypes.timeMillis().addToSchema(schema);
break;
case TIMESTAMP:
schema = Schema.create(Type.LONG);
LogicalTypes.timestampMillis().addToSchema(schema);
break;
default:
return null;
}
if (nullable) {
return nullable(schema);
} else {
return schema;
}
}
use of org.apache.nifi.serialization.record.type.MapDataType in project nifi by apache.
the class StandardSchemaValidator method isTypeCorrect.
private boolean isTypeCorrect(final Object value, final DataType dataType) {
switch(dataType.getFieldType()) {
case ARRAY:
if (!(value instanceof Object[])) {
return false;
}
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
final DataType elementType = arrayDataType.getElementType();
final Object[] array = (Object[]) value;
for (final Object arrayVal : array) {
if (!isTypeCorrect(arrayVal, elementType)) {
return false;
}
}
return true;
case MAP:
if (!(value instanceof Map)) {
return false;
}
final MapDataType mapDataType = (MapDataType) dataType;
final DataType valueDataType = mapDataType.getValueType();
final Map<?, ?> map = (Map<?, ?>) value;
for (final Object mapValue : map.values()) {
if (!isTypeCorrect(mapValue, valueDataType)) {
return false;
}
}
return true;
case RECORD:
return value instanceof Record;
case CHOICE:
final ChoiceDataType choiceDataType = (ChoiceDataType) dataType;
for (final DataType choice : choiceDataType.getPossibleSubTypes()) {
if (isTypeCorrect(value, choice)) {
return true;
}
}
return false;
case BIGINT:
return value instanceof BigInteger;
case BOOLEAN:
return value instanceof Boolean;
case BYTE:
return value instanceof Byte;
case CHAR:
return value instanceof Character;
case DATE:
return value instanceof java.sql.Date;
case DOUBLE:
return value instanceof Double;
case FLOAT:
// Same goes for Short/Integer
return value instanceof Float;
case INT:
return value instanceof Integer;
case LONG:
return value instanceof Long;
case SHORT:
return value instanceof Short;
case STRING:
return value instanceof String;
case TIME:
return value instanceof java.sql.Time;
case TIMESTAMP:
return value instanceof java.sql.Timestamp;
}
return false;
}
use of org.apache.nifi.serialization.record.type.MapDataType in project nifi by apache.
the class MultiMapKeyPath method evaluate.
@Override
@SuppressWarnings("unchecked")
public Stream<FieldValue> evaluate(final RecordPathEvaluationContext context) {
final Stream<FieldValue> parentResult = getParentPath().evaluate(context);
return parentResult.filter(Filters.fieldTypeFilter(RecordFieldType.MAP)).flatMap(fieldValue -> {
final Map<String, ?> map = (Map<String, ?>) fieldValue.getValue();
return mapKeys.stream().map(key -> {
final DataType valueType = ((MapDataType) fieldValue.getField().getDataType()).getValueType();
final RecordField elementField = new RecordField(fieldValue.getField().getFieldName(), valueType);
return new MapEntryFieldValue(map.get(key), elementField, fieldValue, key);
});
});
}
use of org.apache.nifi.serialization.record.type.MapDataType in project nifi by apache.
the class WildcardIndexPath method evaluate.
@Override
@SuppressWarnings("unchecked")
public Stream<FieldValue> evaluate(final RecordPathEvaluationContext context) {
final Stream<FieldValue> parentResult = getParentPath().evaluate(context);
return parentResult.filter(Filters.fieldTypeFilter(RecordFieldType.MAP, RecordFieldType.ARRAY)).flatMap(fieldValue -> {
final RecordFieldType fieldType = fieldValue.getField().getDataType().getFieldType();
final Object value = fieldValue.getValue();
if (value == null) {
return Stream.empty();
}
if (fieldType == RecordFieldType.MAP) {
final Map<String, ?> map = (Map<String, ?>) value;
return map.entrySet().stream().map(entry -> {
final DataType valueType = ((MapDataType) fieldValue.getField().getDataType()).getValueType();
final RecordField elementField = new RecordField(fieldValue.getField().getFieldName(), valueType);
return new MapEntryFieldValue(entry.getValue(), elementField, fieldValue, entry.getKey());
});
} else {
final Object[] array = (Object[]) value;
return IntStream.range(0, array.length).mapToObj(index -> {
final DataType elementDataType = ((ArrayDataType) fieldValue.getField().getDataType()).getElementType();
final RecordField elementField = new RecordField(fieldValue.getField().getFieldName(), elementDataType);
return new ArrayIndexFieldValue(array[index], elementField, fieldValue, index);
});
}
});
}
Aggregations