use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method loadSchema.
private ParsedSchema loadSchema(String schemaType, String schema, List<io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference> references, boolean isNew) throws InvalidSchemaException {
if (schemaType == null) {
schemaType = AvroSchema.TYPE;
}
SchemaProvider provider = schemaProvider(schemaType);
if (provider == null) {
String errMsg = "Invalid schema type " + schemaType;
log.error(errMsg);
throw new InvalidSchemaException(errMsg);
}
final String type = schemaType;
try {
return provider.parseSchemaOrElseThrow(schema, references, isNew);
} catch (Exception e) {
throw new InvalidSchemaException("Invalid schema " + schema + " with refs " + references + " of type " + type + ", details: " + e.getMessage());
}
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class JsonSchemaTest method testParseSchemaThrowException.
@Test(expected = IllegalArgumentException.class)
public void testParseSchemaThrowException() {
SchemaProvider jsonSchemaProvider = new JsonSchemaProvider();
jsonSchemaProvider.parseSchemaOrElseThrow(invalidSchemaString, new ArrayList<>(), false);
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class TestLocalCompatibilityMojo method execute.
public void execute() throws MojoExecutionException {
List<SchemaProvider> providers = MojoUtils.defaultSchemaProviders();
Map<String, SchemaProvider> schemaProviders = providers.stream().collect(Collectors.toMap(SchemaProvider::schemaType, p -> p));
getLog().debug(String.format("Loading Schema at %s", schemaPath));
ParsedSchema schema = loadSchema(schemaPath, schemaProviders);
getLog().debug("Loading Previous Schemas");
ArrayList<ParsedSchema> previousSchemas = new ArrayList<>();
for (File previousSchemaPath : previousSchemaPaths) {
previousSchemas.add(loadSchema(previousSchemaPath, schemaProviders));
}
CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel);
List<String> errorMessages = checker.isCompatible(schema, previousSchemas);
if (previousSchemas.size() > 1 && (compatibilityLevel == CompatibilityLevel.BACKWARD || compatibilityLevel == CompatibilityLevel.FORWARD || compatibilityLevel == CompatibilityLevel.FULL)) {
getLog().info(String.format("Checking only with latest Schema at %s", previousSchemaPaths.get(previousSchemaPaths.size() - 1)));
}
success = errorMessages.isEmpty();
if (success) {
getLog().info(String.format("Schema is %s compatible with previous schemas", compatibilityLevel.name.toLowerCase()));
} else {
String errorLog = String.format("Schema is not %s compatible with previous schemas %n", compatibilityLevel.name.toLowerCase()) + errorMessages.get(0);
getLog().error(errorLog);
}
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project schema-registry by confluentinc.
the class SchemaMessageReader method parseSchema.
protected ParsedSchema parseSchema(SchemaRegistryClient schemaRegistry, String schema, List<SchemaReference> references) {
SchemaProvider provider = getProvider();
provider.configure(Collections.singletonMap(SchemaProvider.SCHEMA_VERSION_FETCHER_CONFIG, schemaRegistry));
return provider.parseSchema(schema, references).get();
}
use of io.confluent.kafka.schemaregistry.SchemaProvider in project akhq by tchiotludo.
the class KafkaModule method getRegistryClient.
public SchemaRegistryClient getRegistryClient(String clusterId) {
if (!this.registryClient.containsKey(clusterId)) {
Connection connection = this.getConnection(clusterId);
List<SchemaProvider> providers = new ArrayList<>();
providers.add(new AvroSchemaProvider());
providers.add(new JsonSchemaProvider());
providers.add(new ProtobufSchemaProvider());
SchemaRegistryClient client = new CachedSchemaRegistryClient(this.getRegistryRestClient(clusterId), 1000, providers, connection.getSchemaRegistry() != null ? connection.getSchemaRegistry().getProperties() : null, null);
this.registryClient.put(clusterId, client);
}
return this.registryClient.get(clusterId);
}
Aggregations