Search in sources :

Example 1 with CompatibilityChecker

use of io.confluent.kafka.schemaregistry.CompatibilityChecker in project schema-registry by confluentinc.

the class AvroCompatibilityTest method testBasicForwardsTransitiveCompatibility.

/*
   * Forward transitive compatibility: A new schema is forward compatible if all previous schemas can read data written
   * in this schema.
   */
@Test
public void testBasicForwardsTransitiveCompatibility() {
    CompatibilityChecker checker = CompatibilityChecker.FORWARD_TRANSITIVE_CHECKER;
    // All compatible
    assertTrue("iteratively removing fields with defaults is a compatible change", checker.isCompatible(schema1, Arrays.asList(schema8, schema2)).isEmpty());
    // 1 == 2, 2 == 3, 3 != 1
    assertTrue("adding default to a field is a compatible change", checker.isCompatible(schema2, Collections.singletonList(schema3)).isEmpty());
    assertTrue("removing a field with a default is a compatible change", checker.isCompatible(schema1, Arrays.asList(schema2)).isEmpty());
    assertFalse("removing a default is not a transitively compatible change", checker.isCompatible(schema1, Arrays.asList(schema2, schema3)).isEmpty());
}
Also used : CompatibilityChecker(io.confluent.kafka.schemaregistry.CompatibilityChecker) Test(org.junit.Test)

Example 2 with CompatibilityChecker

use of io.confluent.kafka.schemaregistry.CompatibilityChecker in project schema-registry by confluentinc.

the class AvroCompatibilityTest method testBasicForwardsCompatibility.

/*
   * Forward compatibility: A new schema is forward compatible if the previous schema can read data written in this
   * schema.
   */
@Test
public void testBasicForwardsCompatibility() {
    CompatibilityChecker checker = CompatibilityChecker.FORWARD_CHECKER;
    assertTrue("adding a field is a forward compatible change", checker.isCompatible(schema2, Collections.singletonList(schema1)).isEmpty());
    assertTrue("adding a field is a forward compatible change", checker.isCompatible(schema3, Collections.singletonList(schema1)).isEmpty());
    assertTrue("adding a field is a forward compatible change", checker.isCompatible(schema3, Collections.singletonList(schema2)).isEmpty());
    assertTrue("adding a field is a forward compatible change", checker.isCompatible(schema2, Collections.singletonList(schema3)).isEmpty());
    // Only schema 2 is checked
    assertTrue("removing a default is not a transitively compatible change", checker.isCompatible(schema1, Arrays.asList(schema3, schema2)).isEmpty());
}
Also used : CompatibilityChecker(io.confluent.kafka.schemaregistry.CompatibilityChecker) Test(org.junit.Test)

Example 3 with CompatibilityChecker

use of io.confluent.kafka.schemaregistry.CompatibilityChecker in project schema-registry by confluentinc.

the class TestLocalCompatibilityMojo method execute.

public void execute() throws MojoExecutionException {
    List<SchemaProvider> providers = MojoUtils.defaultSchemaProviders();
    Map<String, SchemaProvider> schemaProviders = providers.stream().collect(Collectors.toMap(SchemaProvider::schemaType, p -> p));
    getLog().debug(String.format("Loading Schema at %s", schemaPath));
    ParsedSchema schema = loadSchema(schemaPath, schemaProviders);
    getLog().debug("Loading Previous Schemas");
    ArrayList<ParsedSchema> previousSchemas = new ArrayList<>();
    for (File previousSchemaPath : previousSchemaPaths) {
        previousSchemas.add(loadSchema(previousSchemaPath, schemaProviders));
    }
    CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel);
    List<String> errorMessages = checker.isCompatible(schema, previousSchemas);
    if (previousSchemas.size() > 1 && (compatibilityLevel == CompatibilityLevel.BACKWARD || compatibilityLevel == CompatibilityLevel.FORWARD || compatibilityLevel == CompatibilityLevel.FULL)) {
        getLog().info(String.format("Checking only with latest Schema at %s", previousSchemaPaths.get(previousSchemaPaths.size() - 1)));
    }
    success = errorMessages.isEmpty();
    if (success) {
        getLog().info(String.format("Schema is %s compatible with previous schemas", compatibilityLevel.name.toLowerCase()));
    } else {
        String errorLog = String.format("Schema is not %s compatible with previous schemas %n", compatibilityLevel.name.toLowerCase()) + errorMessages.get(0);
        getLog().error(errorLog);
    }
}
Also used : IOException(java.io.IOException) CompatibilityChecker(io.confluent.kafka.schemaregistry.CompatibilityChecker) Parameter(org.apache.maven.plugins.annotations.Parameter) MojoExecutionException(org.apache.maven.plugin.MojoExecutionException) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) Collectors(java.util.stream.Collectors) File(java.io.File) StandardCharsets(java.nio.charset.StandardCharsets) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) ArrayList(java.util.ArrayList) Mojo(org.apache.maven.plugins.annotations.Mojo) List(java.util.List) CompatibilityLevel(io.confluent.kafka.schemaregistry.CompatibilityLevel) SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) Map(java.util.Map) Optional(java.util.Optional) SchemaReference(io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference) AbstractMojo(org.apache.maven.plugin.AbstractMojo) ArrayList(java.util.ArrayList) SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) File(java.io.File) CompatibilityChecker(io.confluent.kafka.schemaregistry.CompatibilityChecker)

Example 4 with CompatibilityChecker

use of io.confluent.kafka.schemaregistry.CompatibilityChecker in project schema-registry by confluentinc.

the class AvroCompatibilityTest method testBasicFullTransitiveCompatibility.

/*
   * Full transitive compatibility: A new schema is fully compatible if it’s both transitively backward
   * and transitively forward compatible with the entire schema history.
   */
@Test
public void testBasicFullTransitiveCompatibility() {
    CompatibilityChecker checker = CompatibilityChecker.FULL_TRANSITIVE_CHECKER;
    // Simple check
    assertTrue("iteratively adding fields with defaults is a compatible change", checker.isCompatible(schema8, Arrays.asList(schema1, schema2)).isEmpty());
    assertTrue("iteratively removing fields with defaults is a compatible change", checker.isCompatible(schema1, Arrays.asList(schema8, schema2)).isEmpty());
    assertTrue("adding default to a field is a compatible change", checker.isCompatible(schema2, Collections.singletonList(schema3)).isEmpty());
    assertTrue("removing a field with a default is a compatible change", checker.isCompatible(schema1, Arrays.asList(schema2)).isEmpty());
    assertTrue("adding a field with default is a compatible change", checker.isCompatible(schema2, Collections.singletonList(schema1)).isEmpty());
    assertTrue("removing a default from a field compatible change", checker.isCompatible(schema3, Arrays.asList(schema2)).isEmpty());
    assertFalse("transitively adding a field without a default is not a compatible change", checker.isCompatible(schema3, Arrays.asList(schema2, schema1)).isEmpty());
    assertFalse("transitively removing a field without a default is not a compatible change", checker.isCompatible(schema1, Arrays.asList(schema2, schema3)).isEmpty());
}
Also used : CompatibilityChecker(io.confluent.kafka.schemaregistry.CompatibilityChecker) Test(org.junit.Test)

Example 5 with CompatibilityChecker

use of io.confluent.kafka.schemaregistry.CompatibilityChecker in project schema-registry by confluentinc.

the class AvroCompatibilityTest method testBasicFullCompatibility.

/*
   * Full compatibility: A new schema is fully compatible if it’s both backward and forward compatible.
   */
@Test
public void testBasicFullCompatibility() {
    CompatibilityChecker checker = CompatibilityChecker.FULL_CHECKER;
    assertTrue("adding a field with default is a backward and a forward compatible change", checker.isCompatible(schema2, Collections.singletonList(schema1)).isEmpty());
    // Only schema 2 is checked!
    assertTrue("transitively adding a field without a default is not a compatible change", checker.isCompatible(schema3, Arrays.asList(schema1, schema2)).isEmpty());
    // Only schema 2 is checked!
    assertTrue("transitively removing a field without a default is not a compatible change", checker.isCompatible(schema1, Arrays.asList(schema3, schema2)).isEmpty());
}
Also used : CompatibilityChecker(io.confluent.kafka.schemaregistry.CompatibilityChecker) Test(org.junit.Test)

Aggregations

CompatibilityChecker (io.confluent.kafka.schemaregistry.CompatibilityChecker)7 Test (org.junit.Test)6 CompatibilityLevel (io.confluent.kafka.schemaregistry.CompatibilityLevel)1 ParsedSchema (io.confluent.kafka.schemaregistry.ParsedSchema)1 SchemaProvider (io.confluent.kafka.schemaregistry.SchemaProvider)1 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)1 SchemaReference (io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference)1 File (java.io.File)1 IOException (java.io.IOException)1 StandardCharsets (java.nio.charset.StandardCharsets)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Map (java.util.Map)1 Optional (java.util.Optional)1 Collectors (java.util.stream.Collectors)1 AbstractMojo (org.apache.maven.plugin.AbstractMojo)1 MojoExecutionException (org.apache.maven.plugin.MojoExecutionException)1 Mojo (org.apache.maven.plugins.annotations.Mojo)1 Parameter (org.apache.maven.plugins.annotations.Parameter)1