use of io.confluent.kafka.schemaregistry.SchemaProvider in project kafka-rest by confluentinc.
the class SchemaManagerImpl method getSchemaFromRawSchema.
private RegisteredSchema getSchemaFromRawSchema(String topicName, EmbeddedFormat format, Optional<String> subject, Optional<SubjectNameStrategy> subjectNameStrategy, String rawSchema, boolean isKey) {
try {
checkArgument(format.requiresSchema(), "%s does not support schemas.", format);
} catch (IllegalArgumentException e) {
throw new BadRequestException(e.getMessage(), e);
}
SchemaProvider schemaProvider;
try {
schemaProvider = format.getSchemaProvider();
} catch (UnsupportedOperationException e) {
throw new BadRequestException(String.format("Raw schema not supported with format = %s", format), e);
}
ParsedSchema schema;
try {
schema = schemaProvider.parseSchema(rawSchema, /* references= */
emptyList(), /* isNew= */
true).orElseThrow(() -> Errors.invalidSchemaException(String.format("Error when parsing raw schema. format = %s, schema = %s", format, rawSchema)));
} catch (SchemaParseException e) {
throw new BadRequestException(String.format("Error parsing schema with format = %s", format), e);
}
String actualSubject = subject.orElse(subjectNameStrategy.orElse(defaultSubjectNameStrategy).subjectName(topicName, isKey, schema));
int schemaId;
try {
try {
// Check if the schema already exists first.
schemaId = schemaRegistryClient.getId(actualSubject, schema);
} catch (IOException | RestClientException e) {
// Could not find the schema. We try to register the schema in that case.
schemaId = schemaRegistryClient.register(actualSubject, schema);
}
} catch (IOException | RestClientException e) {
throw Errors.messageSerializationException(String.format("Error when registering schema. format = %s, subject = %s, schema = %s", format, actualSubject, schema.canonicalString()), e);
}
int schemaVersion = getSchemaVersion(actualSubject, schema);
return RegisteredSchema.create(actualSubject, schemaId, schemaVersion, schema);
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project kafka-rest by confluentinc.
the class SchemaManagerImpl method getSchemaFromSchemaVersion.
private RegisteredSchema getSchemaFromSchemaVersion(String topicName, Optional<String> subject, Optional<SubjectNameStrategy> subjectNameStrategy, int schemaVersion, boolean isKey) {
String actualSubject = subject.orElse(getSchemaSubjectUnsafe(topicName, isKey, subjectNameStrategy));
Schema schema;
try {
schema = schemaRegistryClient.getByVersion(actualSubject, schemaVersion, /* lookupDeletedSchema= */
false);
} catch (RuntimeException e) {
throw new BadRequestException(String.format("Schema does not exist for subject: %s, version: %s", actualSubject, schemaVersion), e);
}
SchemaProvider schemaProvider;
try {
schemaProvider = EmbeddedFormat.forSchemaType(schema.getSchemaType()).getSchemaProvider();
} catch (UnsupportedOperationException e) {
throw new BadRequestException(String.format("Schema version not supported for %s", schema.getSchemaType()), e);
}
ParsedSchema parsedSchema;
try {
parsedSchema = schemaProvider.parseSchema(schema.getSchema(), schema.getReferences(), /* isNew= */
false).orElseThrow(() -> Errors.invalidSchemaException(String.format("Error when fetching schema by version. subject = %s, version = %d", actualSubject, schemaVersion)));
} catch (SchemaParseException e) {
throw new BadRequestException(String.format("Error parsing schema for %s", schema.getSchemaType()), e);
}
return RegisteredSchema.create(schema.getSubject(), schema.getId(), schemaVersion, parsedSchema);
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method initProviders.
private Map<String, SchemaProvider> initProviders(SchemaRegistryConfig config) {
Map<String, Object> schemaProviderConfigs = config.originalsWithPrefix(SchemaRegistryConfig.SCHEMA_PROVIDERS_CONFIG + ".");
schemaProviderConfigs.put(SchemaProvider.SCHEMA_VERSION_FETCHER_CONFIG, this);
List<SchemaProvider> defaultSchemaProviders = Arrays.asList(new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider());
for (SchemaProvider provider : defaultSchemaProviders) {
provider.configure(schemaProviderConfigs);
}
Map<String, SchemaProvider> providerMap = new HashMap<>();
registerProviders(providerMap, defaultSchemaProviders);
List<SchemaProvider> customSchemaProviders = config.getConfiguredInstances(SchemaRegistryConfig.SCHEMA_PROVIDERS_CONFIG, SchemaProvider.class, schemaProviderConfigs);
// Allow custom providers to override default providers
registerProviders(providerMap, customSchemaProviders);
metricsContainer.getCustomSchemaProviderCount().set(customSchemaProviders.size());
return providerMap;
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class KafkaStoreMessageHandler method validateUpdate.
/**
* Invoked before every new K,V pair written to the store
*
* @param key Key associated with the data
* @param value Data written to the store
*/
@Override
public ValidationStatus validateUpdate(SchemaRegistryKey key, SchemaRegistryValue value, TopicPartition tp, long offset, long timestamp) {
if (value == null) {
return ValidationStatus.SUCCESS;
}
// Store the offset and timestamp in the cached value
value.setOffset(offset);
value.setTimestamp(timestamp);
if (key.getKeyType() == SchemaRegistryKeyType.SCHEMA) {
SchemaValue schemaObj = (SchemaValue) value;
String schemaType = schemaObj.getSchemaType();
if (canonicalizeSchemaTypes.contains(schemaType)) {
SchemaProvider schemaProvider = schemaRegistry.schemaProvider(schemaType);
if (schemaProvider != null) {
canonicalize(schemaProvider, schemaObj);
}
}
try {
SchemaKey oldKey = lookupCache.schemaKeyById(schemaObj.getId(), schemaObj.getSubject());
if (oldKey != null) {
SchemaValue oldSchema;
oldSchema = (SchemaValue) lookupCache.get(oldKey);
if (oldSchema != null && !oldSchema.getSchema().equals(schemaObj.getSchema())) {
log.error("Found a schema with duplicate ID {}. This schema will not be " + "registered since a schema already exists with this ID.", schemaObj.getId());
return schemaRegistry.isLeader() ? ValidationStatus.ROLLBACK_FAILURE : ValidationStatus.IGNORE_FAILURE;
}
}
} catch (StoreException e) {
log.error("Error while retrieving schema", e);
return schemaRegistry.isLeader() ? ValidationStatus.ROLLBACK_FAILURE : ValidationStatus.IGNORE_FAILURE;
}
} else if (key.getKeyType() == SchemaRegistryKeyType.CONFIG || key.getKeyType() == SchemaRegistryKeyType.MODE) {
SubjectValue subjectValue = (SubjectValue) value;
if (subjectValue.getSubject() == null) {
// handle legacy values that don't have subject in the value
subjectValue.setSubject(((SubjectKey) key).getSubject());
}
}
return ValidationStatus.SUCCESS;
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class JsonSchemaTest method testParseSchemaSuppressException.
@Test
public void testParseSchemaSuppressException() {
SchemaProvider jsonSchemaProvider = new JsonSchemaProvider();
Optional<ParsedSchema> parsedSchema = jsonSchemaProvider.parseSchema(invalidSchemaString, new ArrayList<>(), false);
assertFalse(parsedSchema.isPresent());
}
Aggregations