use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class ConfigResource method getSubjectLevelConfig.
@Path("/{subject}")
@GET
@Operation(summary = "Get compatibility level for a subject.", responses = { @ApiResponse(responseCode = "404", description = "Subject not found"), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "data store") })
public Config getSubjectLevelConfig(@Parameter(description = "Name of the subject", required = true) @PathParam("subject") String subject, @Parameter(description = "Whether to return the global compatibility level " + " if subject compatibility level not found") @QueryParam("defaultToGlobal") boolean defaultToGlobal) {
subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject);
Config config;
try {
CompatibilityLevel compatibilityLevel = defaultToGlobal ? schemaRegistry.getCompatibilityLevelInScope(subject) : schemaRegistry.getCompatibilityLevel(subject);
if (compatibilityLevel == null) {
throw Errors.subjectLevelCompatibilityNotConfiguredException(subject);
}
config = new Config(compatibilityLevel.name);
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Failed to get the configs for subject " + subject, e);
}
return config;
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class ConfigResource method getTopLevelConfig.
@GET
@Operation(summary = "Get global compatibility level.", responses = { @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "data store") })
public Config getTopLevelConfig() {
Config config;
try {
CompatibilityLevel compatibilityLevel = schemaRegistry.getCompatibilityLevel(null);
config = new Config(compatibilityLevel == null ? null : compatibilityLevel.name);
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Failed to get compatibility level", e);
}
return config;
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaGroupLeaderElector method init.
@Override
public void init() throws SchemaRegistryTimeoutException, SchemaRegistryStoreException {
log.debug("Initializing schema registry group member");
executor = Executors.newSingleThreadExecutor();
executor.submit(new Runnable() {
@Override
public void run() {
try {
while (!stopped.get()) {
coordinator.poll(Integer.MAX_VALUE);
}
} catch (Throwable t) {
log.error("Unexpected exception in schema registry group processing thread", t);
}
}
});
try {
if (!joinedLatch.await(initTimeout, TimeUnit.MILLISECONDS)) {
throw new SchemaRegistryTimeoutException("Timed out waiting for join group to complete");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SchemaRegistryStoreException("Interrupted while waiting for join group to " + "complete", e);
}
log.debug("Schema registry group member initialized and joined group");
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method getModeInScope.
public Mode getModeInScope(String subject) throws SchemaRegistryStoreException {
try {
Mode globalMode = lookupCache.mode(null, true, defaultMode);
Mode subjectMode = lookupCache.mode(subject, true, defaultMode);
return globalMode == Mode.READONLY_OVERRIDE ? globalMode : subjectMode;
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Failed to write new config value to the store", e);
}
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method getReferencedBy.
public List<Integer> getReferencedBy(String subject, VersionId versionId) throws SchemaRegistryException {
try {
int version = versionId.getVersionId();
if (versionId.isLatest()) {
version = getLatestVersion(subject).getVersion();
}
SchemaKey key = new SchemaKey(subject, version);
List<Integer> ids = new ArrayList<>(lookupCache.referencesSchema(key));
Collections.sort(ids);
return ids;
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Error from the backend Kafka store", e);
}
}
Aggregations