Search in sources :

Example 1 with QualifiedSubject

use of io.confluent.kafka.schemaregistry.utils.QualifiedSubject in project schema-registry by confluentinc.

the class KafkaSchemaRegistry method lookUpSchemaUnderSubjectUsingContexts.

public Schema lookUpSchemaUnderSubjectUsingContexts(String subject, Schema schema, boolean normalize, boolean lookupDeletedSchema) throws SchemaRegistryException {
    Schema matchingSchema = lookUpSchemaUnderSubject(subject, schema, normalize, lookupDeletedSchema);
    if (matchingSchema != null) {
        return matchingSchema;
    }
    QualifiedSubject qs = QualifiedSubject.create(tenant(), subject);
    boolean isQualifiedSubject = qs != null && !DEFAULT_CONTEXT.equals(qs.getContext());
    if (isQualifiedSubject) {
        return null;
    }
    // Try qualifying the subject with each known context
    try (CloseableIterator<SchemaRegistryValue> iter = allContexts()) {
        while (iter.hasNext()) {
            ContextValue v = (ContextValue) iter.next();
            QualifiedSubject qualSub = new QualifiedSubject(v.getTenant(), v.getContext(), qs.getSubject());
            Schema qualSchema = new Schema(qualSub.toQualifiedSubject(), schema.getVersion(), schema.getId(), schema.getSchemaType(), schema.getReferences(), schema.getSchema());
            matchingSchema = lookUpSchemaUnderSubject(qualSub.toQualifiedSubject(), qualSchema, normalize, lookupDeletedSchema);
            if (matchingSchema != null) {
                return matchingSchema;
            }
        }
    }
    return null;
}
Also used : QualifiedSubject(io.confluent.kafka.schemaregistry.utils.QualifiedSubject) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) Schema(io.confluent.kafka.schemaregistry.client.rest.entities.Schema) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema)

Example 2 with QualifiedSubject

use of io.confluent.kafka.schemaregistry.utils.QualifiedSubject in project schema-registry by confluentinc.

the class KafkaSchemaRegistry method register.

@Override
public int register(String subject, Schema schema, boolean normalize) throws SchemaRegistryException {
    try {
        checkRegisterMode(subject, schema);
        // Ensure cache is up-to-date before any potential writes
        kafkaStore.waitUntilKafkaReaderReachesLastOffset(subject, kafkaStoreTimeoutMs);
        int schemaId = schema.getId();
        ParsedSchema parsedSchema = canonicalizeSchema(schema, schemaId < 0, normalize);
        // see if the schema to be registered already exists
        SchemaIdAndSubjects schemaIdAndSubjects = this.lookupCache.schemaIdAndSubjects(schema);
        if (schemaIdAndSubjects != null) {
            if (schemaId >= 0 && schemaId != schemaIdAndSubjects.getSchemaId()) {
                throw new IdDoesNotMatchException(schemaIdAndSubjects.getSchemaId(), schema.getId());
            }
            if (schemaIdAndSubjects.hasSubject(subject) && !isSubjectVersionDeleted(subject, schemaIdAndSubjects.getVersion(subject))) {
                // return only if the schema was previously registered under the input subject
                return schemaIdAndSubjects.getSchemaId();
            } else {
                // need to register schema under the input subject
                schemaId = schemaIdAndSubjects.getSchemaId();
            }
        }
        // determine the latest version of the schema in the subject
        List<SchemaValue> allVersions = getAllSchemaValues(subject);
        Collections.reverse(allVersions);
        List<SchemaValue> deletedVersions = new ArrayList<>();
        List<ParsedSchema> undeletedVersions = new ArrayList<>();
        int newVersion = MIN_VERSION;
        for (SchemaValue schemaValue : allVersions) {
            newVersion = Math.max(newVersion, schemaValue.getVersion() + 1);
            if (schemaValue.isDeleted()) {
                deletedVersions.add(schemaValue);
            } else {
                ParsedSchema undeletedSchema = parseSchema(getSchemaEntityFromSchemaValue(schemaValue));
                if (parsedSchema.references().isEmpty() && !undeletedSchema.references().isEmpty() && parsedSchema.deepEquals(undeletedSchema)) {
                    // This handles the case where a schema is sent with all references resolved
                    return schemaValue.getId();
                }
                undeletedVersions.add(undeletedSchema);
            }
        }
        Collections.reverse(undeletedVersions);
        final List<String> compatibilityErrorLogs = isCompatibleWithPrevious(subject, parsedSchema, undeletedVersions);
        final boolean isCompatible = compatibilityErrorLogs.isEmpty();
        if (normalize) {
            parsedSchema = parsedSchema.normalize();
        }
        // Allow schema providers to modify the schema during compatibility checks
        schema.setSchema(parsedSchema.canonicalString());
        schema.setReferences(parsedSchema.references());
        if (isCompatible) {
            // save the context key
            QualifiedSubject qs = QualifiedSubject.create(tenant(), subject);
            if (qs != null && !DEFAULT_CONTEXT.equals(qs.getContext())) {
                ContextKey contextKey = new ContextKey(qs.getTenant(), qs.getContext());
                if (kafkaStore.get(contextKey) == null) {
                    ContextValue contextValue = new ContextValue(qs.getTenant(), qs.getContext());
                    kafkaStore.put(contextKey, contextValue);
                }
            }
            // assign a guid and put the schema in the kafka store
            if (schema.getVersion() <= 0) {
                schema.setVersion(newVersion);
            }
            SchemaKey schemaKey = new SchemaKey(subject, schema.getVersion());
            if (schemaId >= 0) {
                checkIfSchemaWithIdExist(schemaId, schema);
                schema.setId(schemaId);
                kafkaStore.put(schemaKey, new SchemaValue(schema));
            } else {
                int retries = 0;
                while (retries++ < kafkaStoreMaxRetries) {
                    int newId = idGenerator.id(new SchemaValue(schema));
                    // Verify id is not already in use
                    if (lookupCache.schemaKeyById(newId, subject) == null) {
                        schema.setId(newId);
                        if (retries > 1) {
                            log.warn(String.format("Retrying to register the schema with ID %s", newId));
                        }
                        kafkaStore.put(schemaKey, new SchemaValue(schema));
                        break;
                    }
                }
                if (retries >= kafkaStoreMaxRetries) {
                    throw new SchemaRegistryStoreException("Error while registering the schema due " + "to generating an ID that is already in use.");
                }
            }
            for (SchemaValue deleted : deletedVersions) {
                if (deleted.getId().equals(schema.getId()) && deleted.getVersion().compareTo(schema.getVersion()) < 0) {
                    // Tombstone previous version with the same ID
                    SchemaKey key = new SchemaKey(deleted.getSubject(), deleted.getVersion());
                    kafkaStore.put(key, null);
                }
            }
            return schema.getId();
        } else {
            throw new IncompatibleSchemaException(compatibilityErrorLogs.toString());
        }
    } catch (StoreTimeoutException te) {
        throw new SchemaRegistryTimeoutException("Write to the Kafka store timed out while", te);
    } catch (StoreException e) {
        throw new SchemaRegistryStoreException("Error while registering the schema in the" + " backend Kafka store", e);
    }
}
Also used : QualifiedSubject(io.confluent.kafka.schemaregistry.utils.QualifiedSubject) ArrayList(java.util.ArrayList) SchemaRegistryStoreException(io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException) SchemaString(io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString) SchemaRegistryStoreException(io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException) StoreException(io.confluent.kafka.schemaregistry.storage.exceptions.StoreException) IncompatibleSchemaException(io.confluent.kafka.schemaregistry.exceptions.IncompatibleSchemaException) IdDoesNotMatchException(io.confluent.kafka.schemaregistry.exceptions.IdDoesNotMatchException) StoreTimeoutException(io.confluent.kafka.schemaregistry.storage.exceptions.StoreTimeoutException) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) SchemaRegistryTimeoutException(io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryTimeoutException)

Example 3 with QualifiedSubject

use of io.confluent.kafka.schemaregistry.utils.QualifiedSubject in project schema-registry by confluentinc.

the class SubjectKeyComparator method compare.

@Override
@SuppressWarnings("unchecked")
public int compare(K o1, K o2) {
    if (o1 instanceof SubjectKey && o2 instanceof SubjectKey) {
        SubjectKey s1 = (SubjectKey) o1;
        SubjectKey s2 = (SubjectKey) o2;
        int cmp = s1.keyType.compareTo(s2.keyType);
        if (cmp != 0) {
            return cmp;
        }
        // If the lookup cache is null, the tenant will be derived from the subject
        String tenant = lookupCache != null ? lookupCache.tenant() : null;
        QualifiedSubject qs1 = QualifiedSubject.create(tenant, s1.getSubject());
        QualifiedSubject qs2 = QualifiedSubject.create(tenant, s2.getSubject());
        if (qs1 == null && qs2 == null) {
            return 0;
        } else if (qs1 == null) {
            return -1;
        } else if (qs2 == null) {
            return 1;
        } else {
            cmp = qs1.compareTo(qs2);
            if (cmp != 0) {
                return cmp;
            }
            if (s1 instanceof SchemaKey && s2 instanceof SchemaKey) {
                SchemaKey sk1 = (SchemaKey) o1;
                SchemaKey sk2 = (SchemaKey) o2;
                return sk1.getVersion() - sk2.getVersion();
            } else {
                return 0;
            }
        }
    } else {
        return ((Comparable) o1).compareTo(o2);
    }
}
Also used : QualifiedSubject(io.confluent.kafka.schemaregistry.utils.QualifiedSubject)

Example 4 with QualifiedSubject

use of io.confluent.kafka.schemaregistry.utils.QualifiedSubject in project schema-registry by confluentinc.

the class KafkaSchemaRegistry method getUsingContexts.

public Schema getUsingContexts(String subject, int version, boolean returnDeletedSchema) throws SchemaRegistryException {
    Schema schema = get(subject, version, returnDeletedSchema);
    if (schema != null) {
        return schema;
    }
    QualifiedSubject qs = QualifiedSubject.create(tenant(), subject);
    boolean isQualifiedSubject = qs != null && !DEFAULT_CONTEXT.equals(qs.getContext());
    if (isQualifiedSubject) {
        return null;
    }
    // Try qualifying the subject with each known context
    try (CloseableIterator<SchemaRegistryValue> iter = allContexts()) {
        while (iter.hasNext()) {
            ContextValue v = (ContextValue) iter.next();
            QualifiedSubject qualSub = new QualifiedSubject(v.getTenant(), v.getContext(), qs.getSubject());
            schema = get(qualSub.toQualifiedSubject(), version, returnDeletedSchema);
            if (schema != null) {
                return schema;
            }
        }
    }
    return null;
}
Also used : QualifiedSubject(io.confluent.kafka.schemaregistry.utils.QualifiedSubject) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) Schema(io.confluent.kafka.schemaregistry.client.rest.entities.Schema) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema)

Example 5 with QualifiedSubject

use of io.confluent.kafka.schemaregistry.utils.QualifiedSubject in project schema-registry by confluentinc.

the class KafkaSchemaRegistry method getSchemaKeyUsingContexts.

private SchemaKey getSchemaKeyUsingContexts(int id, String subject) throws StoreException, SchemaRegistryException {
    SchemaKey subjectVersionKey = lookupCache.schemaKeyById(id, subject);
    if (subjectVersionKey != null) {
        return subjectVersionKey;
    }
    if (subject == null) {
        return null;
    }
    QualifiedSubject qs = QualifiedSubject.create(tenant(), subject);
    boolean isQualifiedSubject = qs != null && !DEFAULT_CONTEXT.equals(qs.getContext());
    if (isQualifiedSubject) {
        return null;
    }
    // Try qualifying the subject with each known context
    try (CloseableIterator<SchemaRegistryValue> iter = allContexts()) {
        while (iter.hasNext()) {
            ContextValue v = (ContextValue) iter.next();
            QualifiedSubject qualSub = new QualifiedSubject(v.getTenant(), v.getContext(), qs.getSubject());
            SchemaKey key = lookupCache.schemaKeyById(id, qualSub.toQualifiedSubject());
            if (key != null) {
                return key;
            }
        }
    }
    return null;
}
Also used : QualifiedSubject(io.confluent.kafka.schemaregistry.utils.QualifiedSubject)

Aggregations

QualifiedSubject (io.confluent.kafka.schemaregistry.utils.QualifiedSubject)5 ParsedSchema (io.confluent.kafka.schemaregistry.ParsedSchema)3 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)2 Schema (io.confluent.kafka.schemaregistry.client.rest.entities.Schema)2 SchemaString (io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString)1 IdDoesNotMatchException (io.confluent.kafka.schemaregistry.exceptions.IdDoesNotMatchException)1 IncompatibleSchemaException (io.confluent.kafka.schemaregistry.exceptions.IncompatibleSchemaException)1 SchemaRegistryStoreException (io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException)1 SchemaRegistryTimeoutException (io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryTimeoutException)1 StoreException (io.confluent.kafka.schemaregistry.storage.exceptions.StoreException)1 StoreTimeoutException (io.confluent.kafka.schemaregistry.storage.exceptions.StoreTimeoutException)1 ArrayList (java.util.ArrayList)1