use of io.confluent.kafka.schemaregistry.json.JsonSchemaProvider in project ksql by confluentinc.
the class KsqlSchemaRegistryClientFactory method get.
public SchemaRegistryClient get() {
if (schemaRegistryUrl.equals("")) {
return new DefaultSchemaRegistryClient();
}
final RestService restService = serviceSupplier.get();
// This call sets a default sslSocketFactory.
final SchemaRegistryClient client = schemaRegistryClientFactory.create(restService, 1000, ImmutableList.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider()), schemaRegistryClientConfigs, httpHeaders);
// above.
if (sslContext != null) {
restService.setSslSocketFactory(sslContext.getSocketFactory());
}
return client;
}
use of io.confluent.kafka.schemaregistry.json.JsonSchemaProvider in project schema-registry by confluentinc.
the class RestApiTest method testSchemaReferences.
@Test
public void testSchemaReferences() throws Exception {
Map<String, String> schemas = getJsonSchemaWithReferences();
String subject = "reference";
registerAndVerifySchema(restApp.restClient, schemas.get("ref.json"), 1, subject);
RegisterSchemaRequest request = new RegisterSchemaRequest();
request.setSchema(schemas.get("main.json"));
request.setSchemaType(JsonSchema.TYPE);
SchemaReference ref = new SchemaReference("ref.json", "reference", 1);
request.setReferences(Collections.singletonList(ref));
int registeredId = restApp.restClient.registerSchema(request, "referrer", false);
assertEquals("Registering a new schema should succeed", 2, registeredId);
SchemaString schemaString = restApp.restClient.getId(2);
// the newly registered schema should be immediately readable on the leader
assertEquals("Registered schema should be found", MAPPER.readTree(schemas.get("main.json")), MAPPER.readTree(schemaString.getSchemaString()));
assertEquals("Schema references should be found", Collections.singletonList(ref), schemaString.getReferences());
List<Integer> refs = restApp.restClient.getReferencedBy("reference", 1);
assertEquals(2, refs.get(0).intValue());
CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restApp.restClient, 10, Collections.singletonList(new JsonSchemaProvider()), new HashMap<>(), null);
SchemaHolder holder = new SchemaHolder();
JsonSchema schema = JsonSchemaUtils.getSchema(holder, schemaRegistryClient);
Schema registeredSchema = restApp.restClient.lookUpSubjectVersion(schema.canonicalString(), JsonSchema.TYPE, schema.references(), "referrer", false);
assertEquals("Registered schema should be found", 2, registeredSchema.getId().intValue());
try {
restApp.restClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, "reference", String.valueOf(1));
fail("Deleting reference should fail with " + Errors.REFERENCE_EXISTS_ERROR_CODE);
} catch (RestClientException rce) {
assertEquals("Reference found", Errors.REFERENCE_EXISTS_ERROR_CODE, rce.getErrorCode());
}
assertEquals((Integer) 1, restApp.restClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, "referrer", "1"));
refs = restApp.restClient.getReferencedBy("reference", 1);
assertTrue(refs.isEmpty());
assertEquals((Integer) 1, restApp.restClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, "reference", "1"));
}
use of io.confluent.kafka.schemaregistry.json.JsonSchemaProvider in project schema-registry by confluentinc.
the class JsonSchemaConverter method configure.
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
this.isKey = isKey;
JsonSchemaConverterConfig jsonSchemaConverterConfig = new JsonSchemaConverterConfig(configs);
if (schemaRegistry == null) {
schemaRegistry = new CachedSchemaRegistryClient(jsonSchemaConverterConfig.getSchemaRegistryUrls(), jsonSchemaConverterConfig.getMaxSchemasPerSubject(), Collections.singletonList(new JsonSchemaProvider()), configs, jsonSchemaConverterConfig.requestHeaders());
}
serializer = new Serializer(configs, schemaRegistry);
deserializer = new Deserializer(configs, schemaRegistry);
jsonSchemaData = new JsonSchemaData(new JsonSchemaDataConfig(configs));
}
use of io.confluent.kafka.schemaregistry.json.JsonSchemaProvider in project schema-registry by confluentinc.
the class AbstractKafkaJsonSchemaDeserializer method configure.
/**
* Sets properties for this deserializer without overriding the schema registry client itself.
* Useful for testing, where a mock client is injected.
*/
@SuppressWarnings("unchecked")
protected void configure(KafkaJsonSchemaDeserializerConfig config, Class<T> type) {
configureClientProperties(config, new JsonSchemaProvider());
this.type = type;
boolean failUnknownProperties = config.getBoolean(KafkaJsonSchemaDeserializerConfig.FAIL_UNKNOWN_PROPERTIES);
this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, failUnknownProperties);
this.validate = config.getBoolean(KafkaJsonSchemaDeserializerConfig.FAIL_INVALID_SCHEMA);
this.typeProperty = config.getString(KafkaJsonSchemaDeserializerConfig.TYPE_PROPERTY);
}
use of io.confluent.kafka.schemaregistry.json.JsonSchemaProvider in project schema-registry by confluentinc.
the class AbstractKafkaJsonSchemaSerializer method configure.
protected void configure(KafkaJsonSchemaSerializerConfig config) {
configureClientProperties(config, new JsonSchemaProvider());
this.normalizeSchema = config.normalizeSchema();
this.autoRegisterSchema = config.autoRegisterSchema();
this.useSchemaId = config.useSchemaId();
this.idCompatStrict = config.getIdCompatibilityStrict();
this.useLatestVersion = config.useLatestVersion();
this.latestCompatStrict = config.getLatestCompatibilityStrict();
boolean prettyPrint = config.getBoolean(KafkaJsonSchemaSerializerConfig.JSON_INDENT_OUTPUT);
this.objectMapper.configure(SerializationFeature.INDENT_OUTPUT, prettyPrint);
boolean writeDatesAsIso8601 = config.getBoolean(KafkaJsonSchemaSerializerConfig.WRITE_DATES_AS_ISO8601);
this.objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, !writeDatesAsIso8601);
this.specVersion = SpecificationVersion.get(config.getString(KafkaJsonSchemaSerializerConfig.SCHEMA_SPEC_VERSION));
this.oneofForNullables = config.getBoolean(KafkaJsonSchemaSerializerConfig.ONEOF_FOR_NULLABLES);
this.failUnknownProperties = config.getBoolean(KafkaJsonSchemaDeserializerConfig.FAIL_UNKNOWN_PROPERTIES);
this.validate = config.getBoolean(KafkaJsonSchemaSerializerConfig.FAIL_INVALID_SCHEMA);
}
Aggregations