use of org.ehrbase.serialisation.util.SnakeCase in project openEHR_SDK by ehrbase.
the class ClassGenerator method addSimpleField.
private void addSimpleField(ClassGeneratorContext context, TypeSpec.Builder classBuilder, String path, WebTemplateNode endNode) {
Class<?> clazz = extractClass(endNode);
if (clazz == null) {
logger.warn("No class for path {} ", path);
return;
}
ValueSet valueSet = buildValueSet(endNode);
RmClassGeneratorConfig classGeneratorConfig = configMap.get(clazz);
if (classGeneratorConfig == null && !clazz.getName().contains("java.lang")) {
logger.debug("No ClassGenerator for {}", clazz);
}
boolean expand = classGeneratorConfig != null && classGeneratorConfig.isExpandField();
if (endNode.getRmType().equals("DV_CODED_TEXT") && !List.of("transition", "language", "setting", "category", "territory", "math_function", "null_flavour").contains(endNode.getId(false))) {
expand = expand && endNode.getInputs().stream().filter(i -> i.getType().equals("CODED_TEXT")).map(WebTemplateInput::getList).flatMap(List::stream).findAny().isPresent();
}
if (!expand) {
TypeName className = Optional.ofNullable(clazz).map(ClassName::get).orElse(ClassName.get(Object.class));
if (endNode.isMulti() && !context.nodeDeque.peek().getRmType().equals("ELEMENT")) {
className = ParameterizedTypeName.get(ClassName.get(List.class), className);
}
addField(context, classBuilder, path, endNode, className, valueSet, false);
} else {
Map<String, Field> fieldMap = Arrays.stream(FieldUtils.getAllFields(clazz)).filter(f -> !f.isSynthetic()).collect(Collectors.toMap(Field::getName, f -> f));
Set<String> expandFields = classGeneratorConfig.getExpandFields();
expandFields.forEach(fieldName -> addField(context, classBuilder, path + "|" + new SnakeCase(fieldName).camelToSnake(), endNode, ClassName.get(fieldMap.get(fieldName).getType()), valueSet, false));
}
}
use of org.ehrbase.serialisation.util.SnakeCase in project openEHR_SDK by ehrbase.
the class LinkedTreeMapAdapter method writeNode.
@SuppressWarnings("unchecked")
private void writeNode(LinkedTreeMap<String, Object> map, JsonWriter writer) throws IOException {
// some hacking for some specific entries...
reformatMapForCanonical(map);
for (Map.Entry<String, Object> entry : map.entrySet()) {
Object value = entry.getValue();
if (value == null)
continue;
String key = entry.getKey();
if (new OptOut(key).skipIt())
continue;
String jsonKey = new RawJsonKey(key).toRawJson();
final String archetypeNodeId = new NodeId(key).predicate();
// required to deal with DV_MULTIMEDIA embedded document in data
if (value instanceof ArrayList && key.equals("data") && map.get("_type").equals(ArchieRMInfoLookup.getInstance().getTypeInfo(DvMultimedia.class).getRmName())) {
// prepare a store for the value
Double[] dataStore = new Double[((ArrayList) value).size()];
value = ((ArrayList<Double>) value).toArray(dataStore);
}
if (value instanceof ArrayList) {
if (key.equals(CompositionSerializer.TAG_NAME)) {
new ValueArrayList(writer, value, key).write();
} else if (key.equals(TAG_CLASS)) {
writer.name(AT_TYPE).value(new SnakeCase((String) ((ArrayList) value).get(0)).camelToUpperSnake());
} else if (key.equals(CompositionSerializer.TAG_ARCHETYPE_NODE_ID)) {
// same as name above, this is due to usage of MultiValueMap which is backed by ArrayList
new ValueArrayList(writer, value, key).write();
} else {
// make sure we service a non empty array list value
if (!new ArrayChildren((ArrayList) value).isNull()) {
writer.name(jsonKey);
writer.beginArray();
if (isNodePredicate(key)) {
((ArrayList<Object>) value).stream().filter(o -> Map.class.isAssignableFrom(o.getClass())).forEach(m -> ((Map<String, Object>) m).put(I_DvTypeAdapter.ARCHETYPE_NODE_ID, archetypeNodeId));
}
new ArrayListAdapter().write(writer, (ArrayList) value);
writer.endArray();
}
}
} else if (value instanceof LinkedTreeMap) {
LinkedTreeMap<String, Object> valueMap = (LinkedTreeMap<String, Object>) value;
String elementType = new ElementType(valueMap).type();
if (elementType.equals("History")) {
// promote events[...]
LinkedTreeMap<String, Object> eventMap = (LinkedTreeMap<String, Object>) valueMap.get(CompositionSerializer.TAG_EVENTS);
valueMap.remove(CompositionSerializer.TAG_EVENTS);
valueMap.putAll(eventMap);
valueMap.put(AT_TYPE, new SnakeCase(elementType).camelToUpperSnake());
} else if (archetypeNodeId.equals(CompositionSerializer.TAG_TIMING) && elementType.equals("DvParsable")) {
// promote value and formalism
LinkedTreeMap<String, Object> timingValueMap = (LinkedTreeMap<String, Object>) valueMap.get(CompositionSerializer.TAG_VALUE);
if (timingValueMap != null) {
valueMap.put(CompositionSerializer.TAG_VALUE, timingValueMap.get("value"));
valueMap.put("/formalism", timingValueMap.get("formalism"));
}
}
if (key.equals(CompositionSerializer.TAG_VALUE)) {
// get the class and add it to the value map
String type = (String) map.get(TAG_CLASS);
if (type != null && !type.isEmpty()) {
// pushed into the value map for the next recursion
valueMap.put(AT_TYPE, new SnakeCase(type).camelToUpperSnake());
// the value structure
if (new GenericRmType(type).isSpecialized()) {
// composite
valueMap = new GenericRmType(new SnakeCase(type).camelToUpperSnake()).inferSpecialization(valueMap);
}
}
}
// get the value point type and add it to the value map
if (valueMap.containsKey(TAG_CLASS)) {
valueMap.put(AT_TYPE, new SnakeCase(elementType).camelToUpperSnake());
valueMap.remove(TAG_CLASS);
if (key.contains("/time")) {
valueMap.remove(CompositionSerializer.TAG_NAME);
}
}
if (// contains an archetype node predicate
isNodePredicate(key)) {
valueMap.put(ARCHETYPE_NODE_ID, archetypeNodeId);
} else if (key.equals(CompositionSerializer.TAG_ORIGIN) || key.equals(CompositionSerializer.TAG_TIME)) {
// compact time expression
valueMap = compactTimeMap(valueMap);
}
writer.name(jsonKey);
new LinkedTreeMapAdapter().write(writer, valueMap);
} else if (value instanceof String) {
switch(key) {
case TAG_CLASS:
if (Arrays.asList(structuralClasses).contains(value))
writer.name(AT_TYPE).value(new SnakeCase(((String) value)).camelToUpperSnake());
break;
case // this is an element
CompositionSerializer.TAG_PATH:
String archetypeNodeId2 = new FlatPath((String) value).getLast().getAtCode();
if (archetypeNodeId2 != null)
writer.name(AT_TYPE).value(ELEMENT);
// CHC 20191003: removed writer for archetype_node_id as it was not applicable here
break;
case CompositionSerializer.TAG_NAME:
new NameValue(writer, value.toString()).write();
break;
default:
writer.name(jsonKey).value((String) value);
break;
}
} else if (value instanceof Double) {
writer.name(new SnakeCase(key).camelToSnake()).value((Double) value);
} else if (value instanceof Long) {
writer.name(new SnakeCase(key).camelToSnake()).value((Long) value);
} else if (value instanceof Number) {
writer.name(new SnakeCase(key).camelToSnake()).value((Number) value);
} else if (value instanceof Boolean) {
writer.name(new SnakeCase(key).camelToSnake()).value((Boolean) value);
} else if (value instanceof Double[]) {
writer.name(new SnakeCase(key).camelToSnake());
writer.beginArray();
for (Double pix : (Double[]) value) {
writer.value(pix.byteValue());
}
writer.endArray();
} else
throw new IllegalArgumentException("Could not handle value type for key:" + key + ", value:" + value);
}
}
use of org.ehrbase.serialisation.util.SnakeCase in project openEHR_SDK by ehrbase.
the class DvTextAdapter method write.
@Override
public void write(JsonWriter writer, DvText dvalue) throws IOException {
if (dvalue == null) {
writer.nullValue();
return;
}
if (adapterType == I_DvTypeAdapter.AdapterType.PG_JSONB) {
TermMappingAdapter termMappingAdapter = new TermMappingAdapter();
writer.beginObject();
writer.name("value").value(dvalue.getValue());
writer.name(I_DvTypeAdapter.TAG_CLASS_RAW_JSON).value(new SnakeCase(DvText.class.getSimpleName()).camelToUpperSnake());
termMappingAdapter.write(writer, dvalue.getMappings());
writer.endObject();
} else if (adapterType == I_DvTypeAdapter.AdapterType.RAW_JSON) {
writer.beginObject();
writer.name(I_DvTypeAdapter.TAG_CLASS_RAW_JSON).value(new ObjectSnakeCase(dvalue).camelToUpperSnake());
writer.name("value").value(dvalue.getValue());
writer.endObject();
}
}
use of org.ehrbase.serialisation.util.SnakeCase in project openEHR_SDK by ehrbase.
the class AttributeCodesetMapping method actualAttributeId.
public String actualAttributeId(String terminology, String attribute, String language) {
if (attribute == null) {
return null;
}
String snakeAttribute = new SnakeCase(attribute).camelToSnake();
String fixTerminlogy = fixTerminlogy(terminology);
if (!getMappers().get(fixTerminlogy).containsKey(snakeAttribute))
throw new IllegalArgumentException("attribute:" + attribute + ", is not defined in terminology:" + terminology);
if (!getMappers().get(fixTerminlogy).get(snakeAttribute).getIdMap().containsKey(language))
// default to English
language = "en";
return getMappers().get(fixTerminlogy).get(snakeAttribute).getIdMap().get(language);
}
use of org.ehrbase.serialisation.util.SnakeCase in project openEHR_SDK by ehrbase.
the class AttributeCodesetMapping method isLocalizedAttribute.
public boolean isLocalizedAttribute(String terminology, String attribute, String language) {
if (attribute == null) {
return false;
}
String snakeAttribute = new SnakeCase(attribute).camelToSnake();
String fixTerminlogy = fixTerminlogy(terminology);
if (!getMappers().containsKey(fixTerminlogy))
throw new IllegalArgumentException("Invalid terminology id:" + terminology);
if (!getMappers().get(fixTerminlogy).containsKey(snakeAttribute))
throw new IllegalArgumentException("attribute:" + attribute + ", is not defined in terminology:" + terminology);
// default to English
return getMappers().get(fixTerminlogy).get(snakeAttribute).getIdMap().containsKey(language);
}
Aggregations