use of org.apache.flink.api.common.typeinfo.Types in project flink by apache.
the class JsonRowSchemaConverter method convertType.
private static TypeInformation<?> convertType(String location, JsonNode node, JsonNode root) {
// we use a set here to unify types (e.g. types that just add metadata such as 'multipleOf')
final Set<TypeInformation<?>> typeSet = new HashSet<>();
// search for ref
final Optional<JsonNode> ref;
if (node.has(REF) && node.get(REF).isTextual()) {
// try a simple ref resolver to solve those cases where entities are defined at
// the beginning and then used throughout a document
ref = Optional.of(resolveReference(node.get(REF).asText(), node, root));
} else {
ref = Optional.empty();
}
// use TYPE of this node
if (node.has(TYPE)) {
final JsonNode typeNode = node.get(TYPE);
List<String> types = new ArrayList<>();
// array of types
if (typeNode.isArray()) {
final Iterator<JsonNode> elements = typeNode.elements();
while (elements.hasNext()) {
types.add(elements.next().asText());
}
} else // single type
if (typeNode.isTextual()) {
types.add(typeNode.asText());
}
for (String type : types) {
// set field type
switch(type) {
case TYPE_NULL:
typeSet.add(Types.VOID);
break;
case TYPE_BOOLEAN:
typeSet.add(Types.BOOLEAN);
break;
case TYPE_STRING:
if (node.has(FORMAT)) {
typeSet.add(convertStringFormat(location, node.get(FORMAT)));
} else if (node.has(CONTENT_ENCODING)) {
typeSet.add(convertStringEncoding(location, node.get(CONTENT_ENCODING)));
} else {
typeSet.add(Types.STRING);
}
break;
case TYPE_NUMBER:
typeSet.add(Types.BIG_DEC);
break;
case TYPE_INTEGER:
// use BigDecimal for easier interoperability
// without affecting the correctness of the result
typeSet.add(Types.BIG_DEC);
break;
case TYPE_OBJECT:
typeSet.add(convertObject(location, node, root));
break;
case TYPE_ARRAY:
typeSet.add(convertArray(location, node, root));
break;
default:
throw new IllegalArgumentException("Unsupported type '" + node.get(TYPE).asText() + "' in node: " + location);
}
}
} else // use TYPE of reference as fallback if present
{
ref.filter(r -> r.has(TYPE)).ifPresent(r -> typeSet.add(convertType(node.get(REF).asText(), r, root)));
}
// simple interpretation of ONE_OF for supporting "object or null"
if (node.has(ONE_OF) && node.get(ONE_OF).isArray()) {
final TypeInformation<?>[] types = convertTypes(location + '/' + ONE_OF, node.get(ONE_OF), root);
typeSet.addAll(Arrays.asList(types));
} else // use ONE_OF of reference as fallback
if (ref.isPresent() && ref.get().has(ONE_OF) && ref.get().get(ONE_OF).isArray()) {
final TypeInformation<?>[] types = convertTypes(node.get(REF).asText() + '/' + ONE_OF, ref.get().get(ONE_OF), root);
typeSet.addAll(Arrays.asList(types));
}
// validate no union types or extending
if (node.has(ALL_OF) || node.has(ANY_OF) || node.has(NOT) || node.has(EXTENDS) || node.has(DISALLOW)) {
throw new IllegalArgumentException("Union types are such as '" + ALL_OF + "', '" + ANY_OF + "' etc. " + "and extending are not supported yet.");
}
// only a type (with null) is supported yet
final List<TypeInformation<?>> types = new ArrayList<>(typeSet);
if (types.size() == 0) {
throw new IllegalArgumentException("No type could be found in node:" + location);
} else if (types.size() > 2 || (types.size() == 2 && !types.contains(Types.VOID))) {
throw new IllegalArgumentException("Union types with more than just a null type are not supported yet.");
}
// return the first non-void type or void
if (types.size() == 2 && types.get(0) == Types.VOID) {
return types.get(1);
} else {
return types.get(0);
}
}
Aggregations