use of io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider in project ksql by confluentinc.
the class KsqlSchemaRegistryClientFactory method get.
public SchemaRegistryClient get() {
if (schemaRegistryUrl.equals("")) {
return new DefaultSchemaRegistryClient();
}
final RestService restService = serviceSupplier.get();
// This call sets a default sslSocketFactory.
final SchemaRegistryClient client = schemaRegistryClientFactory.create(restService, 1000, ImmutableList.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider()), schemaRegistryClientConfigs, httpHeaders);
// above.
if (sslContext != null) {
restService.setSslSocketFactory(sslContext.getSocketFactory());
}
return client;
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider in project schema-registry by confluentinc.
the class AvroConverter method configure.
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
this.isKey = isKey;
AvroConverterConfig avroConverterConfig = new AvroConverterConfig(configs);
if (schemaRegistry == null) {
schemaRegistry = new CachedSchemaRegistryClient(avroConverterConfig.getSchemaRegistryUrls(), avroConverterConfig.getMaxSchemasPerSubject(), Collections.singletonList(new AvroSchemaProvider()), configs, avroConverterConfig.requestHeaders());
}
serializer = new Serializer(configs, schemaRegistry);
deserializer = new Deserializer(configs, schemaRegistry);
avroData = new AvroData(new AvroDataConfig(configs));
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider in project schema-registry by confluentinc.
the class AvroSchemaTest method testInvalidDefault.
@Test
public void testInvalidDefault() throws Exception {
AvroSchemaProvider provider = new AvroSchemaProvider();
Map<String, String> configs = Collections.singletonMap(AvroSchemaProvider.AVRO_VALIDATE_DEFAULTS, "false");
provider.configure(configs);
Optional<ParsedSchema> schema = provider.parseSchema(recordInvalidDefaultSchema, Collections.emptyList(), true);
assertTrue(schema.isPresent());
configs = Collections.singletonMap(AvroSchemaProvider.AVRO_VALIDATE_DEFAULTS, "true");
provider.configure(configs);
schema = provider.parseSchema(recordInvalidDefaultSchema, Collections.emptyList(), true);
assertFalse(schema.isPresent());
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider in project schema-registry by confluentinc.
the class AbstractKafkaAvroDeserializer method configure.
/**
* Sets properties for this deserializer without overriding the schema registry client itself.
* Useful for testing, where a mock client is injected.
*/
protected void configure(KafkaAvroDeserializerConfig config) {
configureClientProperties(config, new AvroSchemaProvider());
useSpecificAvroReader = config.getBoolean(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG);
avroReflectionAllowNull = config.getBoolean(KafkaAvroDeserializerConfig.AVRO_REFLECTION_ALLOW_NULL_CONFIG);
avroUseLogicalTypeConverters = config.getBoolean(KafkaAvroSerializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG);
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider in project schema-registry by confluentinc.
the class AbstractKafkaAvroSerializer method configure.
protected void configure(KafkaAvroSerializerConfig config) {
configureClientProperties(config, new AvroSchemaProvider());
normalizeSchema = config.normalizeSchema();
autoRegisterSchema = config.autoRegisterSchema();
removeJavaProperties = config.getBoolean(KafkaAvroSerializerConfig.AVRO_REMOVE_JAVA_PROPS_CONFIG);
useSchemaId = config.useSchemaId();
idCompatStrict = config.getIdCompatibilityStrict();
useLatestVersion = config.useLatestVersion();
latestCompatStrict = config.getLatestCompatibilityStrict();
avroReflectionAllowNull = config.getBoolean(KafkaAvroSerializerConfig.AVRO_REFLECTION_ALLOW_NULL_CONFIG);
avroUseLogicalTypeConverters = config.getBoolean(KafkaAvroSerializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG);
}
Aggregations