use of io.confluent.kafka.schemaregistry.protobuf.diff.Context.TypeElementInfo in project schema-registry by confluentinc.
the class ProtobufSchemaUtils method toString.
private static String toString(Context ctx, ProtoFileElement protoFile, boolean normalize) {
StringBuilder sb = new StringBuilder();
if (protoFile.getSyntax() != null) {
sb.append("syntax = \"");
sb.append(protoFile.getSyntax());
sb.append("\";\n");
}
if (protoFile.getPackageName() != null) {
sb.append("package ");
sb.append(protoFile.getPackageName());
sb.append(";\n");
}
if (!protoFile.getImports().isEmpty() || !protoFile.getPublicImports().isEmpty()) {
sb.append('\n');
List<String> imports = protoFile.getImports();
if (normalize) {
imports = imports.stream().sorted().distinct().collect(Collectors.toList());
}
for (String file : imports) {
sb.append("import \"");
sb.append(file);
sb.append("\";\n");
}
List<String> publicImports = protoFile.getPublicImports();
if (normalize) {
publicImports = publicImports.stream().sorted().distinct().collect(Collectors.toList());
}
for (String file : publicImports) {
sb.append("import public \"");
sb.append(file);
sb.append("\";\n");
}
}
if (!protoFile.getOptions().isEmpty()) {
sb.append('\n');
List<OptionElement> options = protoFile.getOptions();
if (normalize) {
options = new ArrayList<>(options);
options.sort(Comparator.comparing(OptionElement::getName));
}
for (OptionElement option : options) {
sb.append(toOptionString(option, normalize));
}
}
if (!protoFile.getTypes().isEmpty()) {
sb.append('\n');
// the non-normalized schema to serialize message indexes
for (TypeElement typeElement : protoFile.getTypes()) {
if (typeElement instanceof MessageElement) {
if (normalize) {
TypeElementInfo typeInfo = ctx.getType(typeElement.getName(), true);
if (typeInfo != null && typeInfo.isMap()) {
// don't emit synthetic map message
continue;
}
}
try (Context.NamedScope nameScope = ctx.enterName(typeElement.getName())) {
sb.append(toString(ctx, (MessageElement) typeElement, normalize));
}
}
}
for (TypeElement typeElement : protoFile.getTypes()) {
if (typeElement instanceof EnumElement) {
try (Context.NamedScope nameScope = ctx.enterName(typeElement.getName())) {
sb.append(toString(ctx, (EnumElement) typeElement, normalize));
}
}
}
}
if (!protoFile.getExtendDeclarations().isEmpty()) {
sb.append('\n');
for (ExtendElement extendDeclaration : protoFile.getExtendDeclarations()) {
sb.append(extendDeclaration.toSchema());
}
}
if (!protoFile.getServices().isEmpty()) {
sb.append('\n');
// we don't sort message/enum elements
for (ServiceElement service : protoFile.getServices()) {
sb.append(toString(ctx, service, normalize));
}
}
return sb.toString();
}
use of io.confluent.kafka.schemaregistry.protobuf.diff.Context.TypeElementInfo in project schema-registry by confluentinc.
the class FieldSchemaDiff method compareLabelsAndTypes.
static void compareLabelsAndTypes(final Context ctx, Field.Label originalLabel, Field.Label updateLabel, ProtoType originalType, ProtoType updateType) {
TypeElementInfo originalTypeInfo = ctx.getType(originalType.toString(), true);
if (originalTypeInfo != null && originalTypeInfo.isMap()) {
originalType = originalTypeInfo.getMapType();
}
TypeElementInfo updateTypeInfo = ctx.getType(updateType.toString(), false);
if (updateTypeInfo != null && updateTypeInfo.isMap()) {
updateType = updateTypeInfo.getMapType();
}
Kind originalKind = kind(ctx, originalType, true);
Kind updateKind = kind(ctx, updateType, false);
if (!Objects.equals(originalKind, updateKind)) {
ctx.addDifference(FIELD_KIND_CHANGED);
} else {
switch(originalKind) {
case SCALAR:
compareScalarTypes(ctx, originalLabel, updateLabel, originalType, updateType);
break;
case MESSAGE:
compareMessageTypes(ctx, originalType, updateType);
break;
case MAP:
compareMapTypes(ctx, originalType, updateType);
break;
default:
break;
}
}
}
use of io.confluent.kafka.schemaregistry.protobuf.diff.Context.TypeElementInfo in project schema-registry by confluentinc.
the class SchemaDiff method compareTypeElements.
public static void compareTypeElements(final Context ctx, final List<TypeElement> original, final List<TypeElement> update) {
Map<String, MessageElement> originalMessages = new HashMap<>();
Map<String, MessageElement> updateMessages = new HashMap<>();
Map<String, Integer> originalMessageIndexes = new HashMap<>();
Map<String, Integer> updateMessageIndexes = new HashMap<>();
Map<String, EnumElement> originalEnums = new HashMap<>();
Map<String, EnumElement> updateEnums = new HashMap<>();
compareMessageElements(original, originalMessages, originalMessageIndexes, originalEnums);
compareMessageElements(update, updateMessages, updateMessageIndexes, updateEnums);
Set<String> allMessageNames = new HashSet<>(originalMessages.keySet());
allMessageNames.addAll(updateMessages.keySet());
Set<String> allEnumNames = new HashSet<>(originalEnums.keySet());
allEnumNames.addAll(updateEnums.keySet());
for (String name : allMessageNames) {
try (Context.NamedScope nameScope = ctx.enterName(name)) {
MessageElement originalMessage = originalMessages.get(name);
MessageElement updateMessage = updateMessages.get(name);
if (updateMessage == null) {
TypeElementInfo originalType = ctx.getType(name, true);
if (originalType != null && !originalType.isMap()) {
ctx.addDifference(MESSAGE_REMOVED);
}
} else if (originalMessage == null) {
TypeElementInfo updateType = ctx.getType(name, false);
if (updateType != null && !updateType.isMap()) {
ctx.addDifference(MESSAGE_ADDED);
}
} else {
MessageSchemaDiff.compare(ctx, originalMessage, updateMessage);
Integer originalMessageIndex = originalMessageIndexes.get(name);
Integer updateMessageIndex = updateMessageIndexes.get(name);
if (originalMessageIndex == null || !originalMessageIndex.equals(updateMessageIndex)) {
// Moving or reordering a message is compatible since serialized message indexes
// are w.r.t. the schema of the corresponding ID
ctx.addDifference(MESSAGE_MOVED);
}
}
}
}
for (String name : allEnumNames) {
try (Context.NamedScope nameScope = ctx.enterName(name)) {
EnumElement originalEnum = originalEnums.get(name);
EnumElement updateEnum = updateEnums.get(name);
if (updateEnum == null) {
ctx.addDifference(ENUM_REMOVED);
} else if (originalEnum == null) {
ctx.addDifference(ENUM_ADDED);
} else {
EnumSchemaDiff.compare(ctx, originalEnum, updateEnum);
}
}
}
}
use of io.confluent.kafka.schemaregistry.protobuf.diff.Context.TypeElementInfo in project schema-registry by confluentinc.
the class ProtobufSchemaUtils method toString.
private static String toString(Context ctx, FieldElement field, boolean normalize) {
StringBuilder sb = new StringBuilder();
Label label = field.getLabel();
String fieldType = field.getType();
ProtoType fieldProtoType = ProtoType.get(fieldType);
if (normalize) {
if (!fieldProtoType.isScalar() && !fieldProtoType.isMap()) {
// See if the fieldType resolves to a message representing a map
fieldType = resolve(ctx, fieldType);
TypeElementInfo typeInfo = ctx.getTypeForFullName(fieldType, true);
if (typeInfo != null && typeInfo.isMap()) {
fieldProtoType = typeInfo.getMapType();
} else {
fieldProtoType = ProtoType.get(fieldType);
}
}
ProtoType mapValueType = fieldProtoType.getValueType();
if (fieldProtoType.isMap() && mapValueType != null) {
// Ensure the value of the map is fully resolved
String valueType = ctx.resolve(mapValueType.toString(), true);
if (valueType != null) {
fieldProtoType = ProtoType.get(// Note we add a leading dot to valueType
"map<" + fieldProtoType.getKeyType() + ", ." + valueType + ">");
}
// don't emit label for map
label = null;
}
fieldType = fieldProtoType.toString();
}
if (label != null) {
sb.append(label.name().toLowerCase(Locale.US));
sb.append(" ");
}
sb.append(fieldType);
sb.append(" ");
sb.append(field.getName());
sb.append(" = ");
sb.append(field.getTag());
List<OptionElement> optionsWithSpecialValues = new ArrayList<>(field.getOptions());
String defaultValue = field.getDefaultValue();
if (defaultValue != null) {
optionsWithSpecialValues.add(OptionElement.Companion.create("default", toKind(fieldProtoType), defaultValue));
}
String jsonName = field.getJsonName();
if (jsonName != null) {
optionsWithSpecialValues.add(OptionElement.Companion.create("json_name", Kind.STRING, jsonName));
}
if (!optionsWithSpecialValues.isEmpty()) {
sb.append(" ");
if (normalize) {
optionsWithSpecialValues.sort(Comparator.comparing(OptionElement::getName));
}
appendOptions(sb, optionsWithSpecialValues, normalize);
}
sb.append(";\n");
return sb.toString();
}
use of io.confluent.kafka.schemaregistry.protobuf.diff.Context.TypeElementInfo in project schema-registry by confluentinc.
the class ProtobufSchemaUtils method toString.
private static String toString(Context ctx, MessageElement type, boolean normalize) {
StringBuilder sb = new StringBuilder();
sb.append("message ");
sb.append(type.getName());
sb.append(" {");
if (!type.getReserveds().isEmpty()) {
sb.append('\n');
List<ReservedElement> reserveds = type.getReserveds();
if (normalize) {
reserveds = reserveds.stream().flatMap(r -> r.getValues().stream().map(o -> new ReservedElement(r.getLocation(), r.getDocumentation(), Collections.singletonList(o)))).collect(Collectors.toList());
Comparator<Object> cmp = Comparator.comparing(r -> {
Object o = ((ReservedElement) r).getValues().get(0);
if (o instanceof IntRange) {
return ((IntRange) o).getStart();
} else if (o instanceof Integer) {
return (Integer) o;
} else {
return Integer.MAX_VALUE;
}
}).thenComparing(r -> ((ReservedElement) r).getValues().get(0).toString());
reserveds.sort(cmp);
}
for (ReservedElement reserved : reserveds) {
appendIndented(sb, toString(ctx, reserved, normalize));
}
}
if (!type.getOptions().isEmpty()) {
sb.append('\n');
List<OptionElement> options = type.getOptions();
if (normalize) {
options = new ArrayList<>(options);
options.sort(Comparator.comparing(OptionElement::getName));
}
for (OptionElement option : options) {
appendIndented(sb, toOptionString(option, normalize));
}
}
if (!type.getFields().isEmpty()) {
sb.append('\n');
List<FieldElement> fields = type.getFields();
if (normalize) {
fields = new ArrayList<>(fields);
fields.sort(Comparator.comparing(FieldElement::getTag));
}
for (FieldElement field : fields) {
appendIndented(sb, toString(ctx, field, normalize));
}
}
if (!type.getOneOfs().isEmpty()) {
sb.append('\n');
List<OneOfElement> oneOfs = type.getOneOfs();
if (normalize) {
oneOfs = oneOfs.stream().filter(o -> !o.getFields().isEmpty()).map(o -> {
List<FieldElement> fields = new ArrayList<>(o.getFields());
fields.sort(Comparator.comparing(FieldElement::getTag));
return new OneOfElement(o.getName(), o.getDocumentation(), fields, o.getGroups(), o.getOptions());
}).collect(Collectors.toList());
oneOfs.sort(Comparator.comparing(o -> o.getFields().get(0).getTag()));
}
for (OneOfElement oneOf : oneOfs) {
appendIndented(sb, toString(ctx, oneOf, normalize));
}
}
if (!type.getGroups().isEmpty()) {
sb.append('\n');
for (GroupElement group : type.getGroups()) {
appendIndented(sb, group.toSchema());
}
}
if (!type.getExtensions().isEmpty()) {
sb.append('\n');
for (ExtensionsElement extension : type.getExtensions()) {
appendIndented(sb, extension.toSchema());
}
}
if (!type.getNestedTypes().isEmpty()) {
sb.append('\n');
// the non-normalized schema to serialize message indexes
for (TypeElement typeElement : type.getNestedTypes()) {
if (typeElement instanceof MessageElement) {
if (normalize) {
TypeElementInfo typeInfo = ctx.getType(typeElement.getName(), true);
if (typeInfo != null && typeInfo.isMap()) {
// don't emit synthetic map message
continue;
}
}
try (Context.NamedScope nameScope = ctx.enterName(typeElement.getName())) {
appendIndented(sb, toString(ctx, (MessageElement) typeElement, normalize));
}
}
}
for (TypeElement typeElement : type.getNestedTypes()) {
if (typeElement instanceof EnumElement) {
try (Context.NamedScope nameScope = ctx.enterName(typeElement.getName())) {
appendIndented(sb, toString(ctx, (EnumElement) typeElement, normalize));
}
}
}
}
sb.append("}\n");
return sb.toString();
}
Aggregations