use of io.confluent.kafka.schemaregistry.exceptions.SchemaVersionNotSoftDeletedException in project schema-registry by confluentinc.
the class SubjectVersionsResource method deleteSchemaVersion.
@DELETE
@Path("/{version}")
@PerformanceMetric("subjects.versions.deleteSchemaVersion-schema")
@Operation(summary = "Deletes a specific version of the schema registered under this subject. " + "This only deletes the version and the schema ID remains intact making it still possible " + "to decode data using the schema ID. This API is recommended to be used only in " + "development environments or under extreme circumstances where-in, its required to delete " + "a previously registered schema for compatibility purposes or re-register previously " + "registered schema.", responses = { @ApiResponse(content = @Content(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = int.class))), @ApiResponse(responseCode = "404", description = "Error code 40401 -- Subject not found\n" + "Error code 40402 -- Version not found"), @ApiResponse(responseCode = "422", description = "Error code 42202 -- Invalid version"), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "data store") })
public void deleteSchemaVersion(@Suspended final AsyncResponse asyncResponse, @Context HttpHeaders headers, @Parameter(description = "Name of the subject", required = true) @PathParam("subject") String subject, @Parameter(description = VERSION_PARAM_DESC, required = true) @PathParam("version") String version, @Parameter(description = "Whether to perform a permanent delete") @QueryParam("permanent") boolean permanentDelete) {
log.info("Deleting schema version {} from subject {}", version, subject);
subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject);
VersionId versionId;
try {
versionId = new VersionId(version);
} catch (InvalidVersionException e) {
throw Errors.invalidVersionException(e.getMessage());
}
Schema schema;
String errorMessage = "Error while retrieving schema for subject " + subject + " with version " + version + " from the schema registry";
try {
if (schemaRegistry.schemaVersionExists(subject, versionId, true)) {
if (!permanentDelete && !schemaRegistry.schemaVersionExists(subject, versionId, false)) {
throw Errors.schemaVersionSoftDeletedException(subject, version);
}
}
schema = schemaRegistry.get(subject, versionId.getVersionId(), true);
if (schema == null) {
if (!schemaRegistry.hasSubjects(subject, true)) {
throw Errors.subjectNotFoundException(subject);
} else {
throw Errors.versionNotFoundException(versionId.getVersionId());
}
}
} catch (SchemaRegistryStoreException e) {
log.debug(errorMessage, e);
throw Errors.storeException(errorMessage, e);
} catch (InvalidVersionException e) {
throw Errors.invalidVersionException(e.getMessage());
} catch (SchemaRegistryException e) {
throw Errors.schemaRegistryException(errorMessage, e);
}
try {
Map<String, String> headerProperties = requestHeaderBuilder.buildRequestHeaders(headers, schemaRegistry.config().whitelistHeaders());
schemaRegistry.deleteSchemaVersionOrForward(headerProperties, subject, schema, permanentDelete);
} catch (SchemaVersionNotSoftDeletedException e) {
throw Errors.schemaVersionNotSoftDeletedException(e.getSubject(), e.getVersion());
} catch (SchemaRegistryTimeoutException e) {
throw Errors.operationTimeoutException("Delete Schema Version operation timed out", e);
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Delete Schema Version operation failed while writing" + " to the Kafka store", e);
} catch (SchemaRegistryRequestForwardingException e) {
throw Errors.requestForwardingFailedException("Error while forwarding delete schema version request" + " to the leader", e);
} catch (ReferenceExistsException e) {
throw Errors.referenceExistsException(e.getMessage());
} catch (UnknownLeaderException e) {
throw Errors.unknownLeaderException("Leader not known.", e);
} catch (SchemaRegistryException e) {
throw Errors.schemaRegistryException("Error while deleting Schema Version", e);
}
asyncResponse.resume(schema.getVersion());
}
use of io.confluent.kafka.schemaregistry.exceptions.SchemaVersionNotSoftDeletedException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method deleteSchemaVersion.
@Override
public void deleteSchemaVersion(String subject, Schema schema, boolean permanentDelete) throws SchemaRegistryException {
try {
if (isReadOnlyMode(subject)) {
throw new OperationNotPermittedException("Subject " + subject + " is in read-only mode");
}
SchemaKey key = new SchemaKey(subject, schema.getVersion());
if (!lookupCache.referencesSchema(key).isEmpty()) {
throw new ReferenceExistsException(key.toString());
}
SchemaValue schemaValue = (SchemaValue) lookupCache.get(key);
if (permanentDelete && schemaValue != null && !schemaValue.isDeleted()) {
throw new SchemaVersionNotSoftDeletedException(subject, schema.getVersion().toString());
}
// Ensure cache is up-to-date before any potential writes
kafkaStore.waitUntilKafkaReaderReachesLastOffset(subject, kafkaStoreTimeoutMs);
if (!permanentDelete) {
schemaValue = new SchemaValue(schema);
schemaValue.setDeleted(true);
kafkaStore.put(key, schemaValue);
if (!getAllVersions(subject, false).hasNext()) {
if (getMode(subject) != null) {
deleteMode(subject);
}
if (getCompatibilityLevel(subject) != null) {
deleteCompatibility(subject);
}
}
} else {
kafkaStore.put(key, null);
}
} catch (StoreTimeoutException te) {
throw new SchemaRegistryTimeoutException("Write to the Kafka store timed out while", te);
} catch (StoreException e) {
throw new SchemaRegistryStoreException("Error while deleting the schema for subject '" + subject + "' in the backend Kafka store", e);
}
}
Aggregations