use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method lookUpSchemaUnderSubject.
/**
* Checks if given schema was ever registered under a subject. If found, it returns the version of
* the schema under the subject. If not, returns -1
*/
public Schema lookUpSchemaUnderSubject(String subject, Schema schema, boolean normalize, boolean lookupDeletedSchema) throws SchemaRegistryException {
try {
ParsedSchema parsedSchema = canonicalizeSchema(schema, false, normalize);
SchemaIdAndSubjects schemaIdAndSubjects = this.lookupCache.schemaIdAndSubjects(schema);
if (schemaIdAndSubjects != null) {
if (schemaIdAndSubjects.hasSubject(subject) && (lookupDeletedSchema || !isSubjectVersionDeleted(subject, schemaIdAndSubjects.getVersion(subject)))) {
Schema matchingSchema = new Schema(subject, schemaIdAndSubjects.getVersion(subject), schemaIdAndSubjects.getSchemaId(), schema.getSchemaType(), schema.getReferences(), schema.getSchema());
return matchingSchema;
}
}
List<SchemaValue> allVersions = getAllSchemaValues(subject);
Collections.reverse(allVersions);
for (SchemaValue schemaValue : allVersions) {
if ((lookupDeletedSchema || !schemaValue.isDeleted()) && parsedSchema.references().isEmpty() && !schemaValue.getReferences().isEmpty()) {
Schema prev = getSchemaEntityFromSchemaValue(schemaValue);
ParsedSchema prevSchema = parseSchema(prev);
if (parsedSchema.deepEquals(prevSchema)) {
// This handles the case where a schema is sent with all references resolved
return prev;
}
}
}
return null;
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Error from the backend Kafka store", e);
}
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method getMode.
public Mode getMode(String subject) throws SchemaRegistryStoreException {
try {
Mode globalMode = lookupCache.mode(null, false, defaultMode);
Mode subjectMode = lookupCache.mode(subject, false, defaultMode);
return globalMode == Mode.READONLY_OVERRIDE ? globalMode : subjectMode;
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Failed to write new config value to the store", e);
}
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method updateCompatibilityLevel.
public void updateCompatibilityLevel(String subject, CompatibilityLevel newCompatibilityLevel) throws SchemaRegistryStoreException, OperationNotPermittedException, UnknownLeaderException {
if (isReadOnlyMode(subject)) {
throw new OperationNotPermittedException("Subject " + subject + " is in read-only mode");
}
ConfigKey configKey = new ConfigKey(subject);
try {
kafkaStore.waitUntilKafkaReaderReachesLastOffset(subject, kafkaStoreTimeoutMs);
kafkaStore.put(configKey, new ConfigValue(subject, newCompatibilityLevel));
log.debug("Wrote new compatibility level: " + newCompatibilityLevel.name + " to the" + " Kafka data store with key " + configKey.toString());
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Failed to write new config value to the store", e);
}
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method allVersions.
private CloseableIterator<SchemaRegistryValue> allVersions(String subjectOrPrefix, boolean isPrefix) throws SchemaRegistryException {
try {
String start;
String end;
int idx = subjectOrPrefix.indexOf(CONTEXT_WILDCARD);
if (idx >= 0) {
// Context wildcard match
String prefix = subjectOrPrefix.substring(0, idx);
start = prefix + CONTEXT_PREFIX + CONTEXT_DELIMITER;
end = prefix + CONTEXT_PREFIX + Character.MAX_VALUE + CONTEXT_DELIMITER;
} else {
start = subjectOrPrefix;
end = isPrefix ? subjectOrPrefix + Character.MAX_VALUE : subjectOrPrefix;
}
SchemaKey key1 = new SchemaKey(start, MIN_VERSION);
SchemaKey key2 = new SchemaKey(end, MAX_VERSION);
return kafkaStore.getAll(key1, key2);
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Error from the backend Kafka store", e);
}
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaRegistryStoreException in project schema-registry by confluentinc.
the class SchemasResource method getSubjects.
@GET
@Path("/ids/{id}/subjects")
@Operation(summary = "Get all the subjects associated with the input ID.", responses = { @ApiResponse(responseCode = "404", description = "Error code 40403 -- Schema not found\n"), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "data store\n") })
public Set<String> getSubjects(@Parameter(description = "Globally unique identifier of the schema", required = true) @PathParam("id") Integer id, @Parameter(description = "Filters results by the respective subject") @QueryParam("subject") String subject, @Parameter(description = "Whether to include subjects where the schema was deleted") @QueryParam("deleted") boolean lookupDeletedSchema) {
Set<String> subjects;
String errorMessage = "Error while retrieving all subjects associated with schema id " + id + " from the schema registry";
try {
subjects = schemaRegistry.listSubjectsForId(id, subject, lookupDeletedSchema);
} catch (SchemaRegistryStoreException e) {
log.debug(errorMessage, e);
throw Errors.storeException(errorMessage, e);
} catch (SchemaRegistryException e) {
throw Errors.schemaRegistryException(errorMessage, e);
}
if (subjects == null) {
throw Errors.schemaNotFoundException();
}
return subjects;
}
Aggregations