Search in sources :

Example 11 with SchemaProvider

use of io.confluent.kafka.schemaregistry.SchemaProvider in project kafka-rest by confluentinc.

the class SchemaManagerImpl method getSchemaFromRawSchema.

private RegisteredSchema getSchemaFromRawSchema(String topicName, EmbeddedFormat format, Optional<String> subject, Optional<SubjectNameStrategy> subjectNameStrategy, String rawSchema, boolean isKey) {
    try {
        checkArgument(format.requiresSchema(), "%s does not support schemas.", format);
    } catch (IllegalArgumentException e) {
        throw new BadRequestException(e.getMessage(), e);
    }
    SchemaProvider schemaProvider;
    try {
        schemaProvider = format.getSchemaProvider();
    } catch (UnsupportedOperationException e) {
        throw new BadRequestException(String.format("Raw schema not supported with format = %s", format), e);
    }
    ParsedSchema schema;
    try {
        schema = schemaProvider.parseSchema(rawSchema, /* references= */
        emptyList(), /* isNew= */
        true).orElseThrow(() -> Errors.invalidSchemaException(String.format("Error when parsing raw schema. format = %s, schema = %s", format, rawSchema)));
    } catch (SchemaParseException e) {
        throw new BadRequestException(String.format("Error parsing schema with format = %s", format), e);
    }
    String actualSubject = subject.orElse(subjectNameStrategy.orElse(defaultSubjectNameStrategy).subjectName(topicName, isKey, schema));
    int schemaId;
    try {
        try {
            // Check if the schema already exists first.
            schemaId = schemaRegistryClient.getId(actualSubject, schema);
        } catch (IOException | RestClientException e) {
            // Could not find the schema. We try to register the schema in that case.
            schemaId = schemaRegistryClient.register(actualSubject, schema);
        }
    } catch (IOException | RestClientException e) {
        throw Errors.messageSerializationException(String.format("Error when registering schema. format = %s, subject = %s, schema = %s", format, actualSubject, schema.canonicalString()), e);
    }
    int schemaVersion = getSchemaVersion(actualSubject, schema);
    return RegisteredSchema.create(actualSubject, schemaId, schemaVersion, schema);
}
Also used : SchemaParseException(org.apache.avro.SchemaParseException) RestClientException(io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException) BadRequestException(io.confluent.kafkarest.exceptions.BadRequestException) SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) IOException(java.io.IOException)

Example 12 with SchemaProvider

use of io.confluent.kafka.schemaregistry.SchemaProvider in project kafka-rest by confluentinc.

the class SchemaManagerImpl method getSchemaFromSchemaVersion.

private RegisteredSchema getSchemaFromSchemaVersion(String topicName, Optional<String> subject, Optional<SubjectNameStrategy> subjectNameStrategy, int schemaVersion, boolean isKey) {
    String actualSubject = subject.orElse(getSchemaSubjectUnsafe(topicName, isKey, subjectNameStrategy));
    Schema schema;
    try {
        schema = schemaRegistryClient.getByVersion(actualSubject, schemaVersion, /* lookupDeletedSchema= */
        false);
    } catch (RuntimeException e) {
        throw new BadRequestException(String.format("Schema does not exist for subject: %s, version: %s", actualSubject, schemaVersion), e);
    }
    SchemaProvider schemaProvider;
    try {
        schemaProvider = EmbeddedFormat.forSchemaType(schema.getSchemaType()).getSchemaProvider();
    } catch (UnsupportedOperationException e) {
        throw new BadRequestException(String.format("Schema version not supported for %s", schema.getSchemaType()), e);
    }
    ParsedSchema parsedSchema;
    try {
        parsedSchema = schemaProvider.parseSchema(schema.getSchema(), schema.getReferences(), /* isNew= */
        false).orElseThrow(() -> Errors.invalidSchemaException(String.format("Error when fetching schema by version. subject = %s, version = %d", actualSubject, schemaVersion)));
    } catch (SchemaParseException e) {
        throw new BadRequestException(String.format("Error parsing schema for %s", schema.getSchemaType()), e);
    }
    return RegisteredSchema.create(schema.getSubject(), schema.getId(), schemaVersion, parsedSchema);
}
Also used : SchemaParseException(org.apache.avro.SchemaParseException) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) Schema(io.confluent.kafka.schemaregistry.client.rest.entities.Schema) RegisteredSchema(io.confluent.kafkarest.entities.RegisteredSchema) BadRequestException(io.confluent.kafkarest.exceptions.BadRequestException) SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema)

Example 13 with SchemaProvider

use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.

the class KafkaSchemaRegistry method initProviders.

private Map<String, SchemaProvider> initProviders(SchemaRegistryConfig config) {
    Map<String, Object> schemaProviderConfigs = config.originalsWithPrefix(SchemaRegistryConfig.SCHEMA_PROVIDERS_CONFIG + ".");
    schemaProviderConfigs.put(SchemaProvider.SCHEMA_VERSION_FETCHER_CONFIG, this);
    List<SchemaProvider> defaultSchemaProviders = Arrays.asList(new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider());
    for (SchemaProvider provider : defaultSchemaProviders) {
        provider.configure(schemaProviderConfigs);
    }
    Map<String, SchemaProvider> providerMap = new HashMap<>();
    registerProviders(providerMap, defaultSchemaProviders);
    List<SchemaProvider> customSchemaProviders = config.getConfiguredInstances(SchemaRegistryConfig.SCHEMA_PROVIDERS_CONFIG, SchemaProvider.class, schemaProviderConfigs);
    // Allow custom providers to override default providers
    registerProviders(providerMap, customSchemaProviders);
    metricsContainer.getCustomSchemaProviderCount().set(customSchemaProviders.size());
    return providerMap;
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) JsonSchemaProvider(io.confluent.kafka.schemaregistry.json.JsonSchemaProvider) ProtobufSchemaProvider(io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider) SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) JsonSchemaProvider(io.confluent.kafka.schemaregistry.json.JsonSchemaProvider) AvroSchemaProvider(io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider) SchemaString(io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString) ProtobufSchemaProvider(io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider) AvroSchemaProvider(io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider)

Example 14 with SchemaProvider

use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.

the class KafkaStoreMessageHandler method validateUpdate.

/**
 * Invoked before every new K,V pair written to the store
 *
 * @param key   Key associated with the data
 * @param value Data written to the store
 */
@Override
public ValidationStatus validateUpdate(SchemaRegistryKey key, SchemaRegistryValue value, TopicPartition tp, long offset, long timestamp) {
    if (value == null) {
        return ValidationStatus.SUCCESS;
    }
    // Store the offset and timestamp in the cached value
    value.setOffset(offset);
    value.setTimestamp(timestamp);
    if (key.getKeyType() == SchemaRegistryKeyType.SCHEMA) {
        SchemaValue schemaObj = (SchemaValue) value;
        String schemaType = schemaObj.getSchemaType();
        if (canonicalizeSchemaTypes.contains(schemaType)) {
            SchemaProvider schemaProvider = schemaRegistry.schemaProvider(schemaType);
            if (schemaProvider != null) {
                canonicalize(schemaProvider, schemaObj);
            }
        }
        try {
            SchemaKey oldKey = lookupCache.schemaKeyById(schemaObj.getId(), schemaObj.getSubject());
            if (oldKey != null) {
                SchemaValue oldSchema;
                oldSchema = (SchemaValue) lookupCache.get(oldKey);
                if (oldSchema != null && !oldSchema.getSchema().equals(schemaObj.getSchema())) {
                    log.error("Found a schema with duplicate ID {}.  This schema will not be " + "registered since a schema already exists with this ID.", schemaObj.getId());
                    return schemaRegistry.isLeader() ? ValidationStatus.ROLLBACK_FAILURE : ValidationStatus.IGNORE_FAILURE;
                }
            }
        } catch (StoreException e) {
            log.error("Error while retrieving schema", e);
            return schemaRegistry.isLeader() ? ValidationStatus.ROLLBACK_FAILURE : ValidationStatus.IGNORE_FAILURE;
        }
    } else if (key.getKeyType() == SchemaRegistryKeyType.CONFIG || key.getKeyType() == SchemaRegistryKeyType.MODE) {
        SubjectValue subjectValue = (SubjectValue) value;
        if (subjectValue.getSubject() == null) {
            // handle legacy values that don't have subject in the value
            subjectValue.setSubject(((SubjectKey) key).getSubject());
        }
    }
    return ValidationStatus.SUCCESS;
}
Also used : SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) StoreException(io.confluent.kafka.schemaregistry.storage.exceptions.StoreException)

Example 15 with SchemaProvider

use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.

the class JsonSchemaTest method testParseSchemaSuppressException.

@Test
public void testParseSchemaSuppressException() {
    SchemaProvider jsonSchemaProvider = new JsonSchemaProvider();
    Optional<ParsedSchema> parsedSchema = jsonSchemaProvider.parseSchema(invalidSchemaString, new ArrayList<>(), false);
    assertFalse(parsedSchema.isPresent());
}
Also used : SchemaProvider(io.confluent.kafka.schemaregistry.SchemaProvider) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) Test(org.junit.Test)

Aggregations

SchemaProvider (io.confluent.kafka.schemaregistry.SchemaProvider)22 ParsedSchema (io.confluent.kafka.schemaregistry.ParsedSchema)9 AvroSchemaProvider (io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider)7 JsonSchemaProvider (io.confluent.kafka.schemaregistry.json.JsonSchemaProvider)7 ProtobufSchemaProvider (io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider)7 BadRequestException (io.confluent.kafkarest.exceptions.BadRequestException)5 IOException (java.io.IOException)5 Test (org.junit.Test)5 CachedSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient)4 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)4 RestClientException (io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException)3 EmbeddedFormat (io.confluent.kafkarest.entities.EmbeddedFormat)3 Test (org.junit.jupiter.api.Test)3 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)2 SchemaString (io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString)2 StoreException (io.confluent.kafka.schemaregistry.storage.exceptions.StoreException)2 TopicNameStrategy (io.confluent.kafka.serializers.subject.TopicNameStrategy)2 HashMap (java.util.HashMap)2 SchemaParseException (org.apache.avro.SchemaParseException)2 CompatibilityChecker (io.confluent.kafka.schemaregistry.CompatibilityChecker)1