use of io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException in project schema-registry by confluentinc.
the class SubjectVersionsResource method deleteSchemaVersion.
@DELETE
@Path("/{version}")
@PerformanceMetric("subjects.versions.deleteSchemaVersion-schema")
@Operation(summary = "Deletes a specific version of the schema registered under this subject. " + "This only deletes the version and the schema ID remains intact making it still possible " + "to decode data using the schema ID. This API is recommended to be used only in " + "development environments or under extreme circumstances where-in, its required to delete " + "a previously registered schema for compatibility purposes or re-register previously " + "registered schema.", responses = { @ApiResponse(content = @Content(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = int.class))), @ApiResponse(responseCode = "404", description = "Error code 40401 -- Subject not found\n" + "Error code 40402 -- Version not found"), @ApiResponse(responseCode = "422", description = "Error code 42202 -- Invalid version"), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "data store") })
public void deleteSchemaVersion(@Suspended final AsyncResponse asyncResponse, @Context HttpHeaders headers, @Parameter(description = "Name of the subject", required = true) @PathParam("subject") String subject, @Parameter(description = VERSION_PARAM_DESC, required = true) @PathParam("version") String version, @Parameter(description = "Whether to perform a permanent delete") @QueryParam("permanent") boolean permanentDelete) {
log.info("Deleting schema version {} from subject {}", version, subject);
subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject);
VersionId versionId;
try {
versionId = new VersionId(version);
} catch (InvalidVersionException e) {
throw Errors.invalidVersionException(e.getMessage());
}
Schema schema;
String errorMessage = "Error while retrieving schema for subject " + subject + " with version " + version + " from the schema registry";
try {
if (schemaRegistry.schemaVersionExists(subject, versionId, true)) {
if (!permanentDelete && !schemaRegistry.schemaVersionExists(subject, versionId, false)) {
throw Errors.schemaVersionSoftDeletedException(subject, version);
}
}
schema = schemaRegistry.get(subject, versionId.getVersionId(), true);
if (schema == null) {
if (!schemaRegistry.hasSubjects(subject, true)) {
throw Errors.subjectNotFoundException(subject);
} else {
throw Errors.versionNotFoundException(versionId.getVersionId());
}
}
} catch (SchemaRegistryStoreException e) {
log.debug(errorMessage, e);
throw Errors.storeException(errorMessage, e);
} catch (InvalidVersionException e) {
throw Errors.invalidVersionException(e.getMessage());
} catch (SchemaRegistryException e) {
throw Errors.schemaRegistryException(errorMessage, e);
}
try {
Map<String, String> headerProperties = requestHeaderBuilder.buildRequestHeaders(headers, schemaRegistry.config().whitelistHeaders());
schemaRegistry.deleteSchemaVersionOrForward(headerProperties, subject, schema, permanentDelete);
} catch (SchemaVersionNotSoftDeletedException e) {
throw Errors.schemaVersionNotSoftDeletedException(e.getSubject(), e.getVersion());
} catch (SchemaRegistryTimeoutException e) {
throw Errors.operationTimeoutException("Delete Schema Version operation timed out", e);
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Delete Schema Version operation failed while writing" + " to the Kafka store", e);
} catch (SchemaRegistryRequestForwardingException e) {
throw Errors.requestForwardingFailedException("Error while forwarding delete schema version request" + " to the leader", e);
} catch (ReferenceExistsException e) {
throw Errors.referenceExistsException(e.getMessage());
} catch (UnknownLeaderException e) {
throw Errors.unknownLeaderException("Leader not known.", e);
} catch (SchemaRegistryException e) {
throw Errors.schemaRegistryException("Error while deleting Schema Version", e);
}
asyncResponse.resume(schema.getVersion());
}
use of io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException in project schema-registry by confluentinc.
the class SubjectVersionsResource method register.
@POST
@PerformanceMetric("subjects.versions.register")
@Operation(summary = "Register a new schema under the specified subject. If successfully " + "registered, this returns the unique identifier of this schema in the registry. The " + "returned identifier should be used to retrieve this schema from the schemas resource and " + "is different from the schema's version which is associated with the subject. If the same " + "schema is registered under a different subject, the same identifier will be returned. " + "However, the version of the schema may be different under different subjects.\n" + "A schema should be compatible with the previously registered schema or schemas (if there " + "are any) as per the configured compatibility level. The configured compatibility level " + "can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns " + "null, then GET http:get:: /config\n" + "When there are multiple instances of Schema Registry running in the same cluster, the " + "schema registration request will be forwarded to one of the instances designated as " + "the primary. If the primary is not available, the client will get an error code " + "indicating that the forwarding has failed.", responses = { @ApiResponse(content = @Content(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = RegisterSchemaResponse.class))), @ApiResponse(responseCode = "409", description = "Incompatible schema"), @ApiResponse(responseCode = "422", description = "Error code 42201 -- Invalid schema or " + "schema type"), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store\n" + "Error code 50002 -- Operation timed out\n" + "Error code 50003 -- Error while forwarding the request to the primary") })
public void register(@Suspended final AsyncResponse asyncResponse, @Context HttpHeaders headers, @Parameter(description = "Name of the subject", required = true) @PathParam("subject") String subjectName, @Parameter(description = "Whether to register the normalized schema") @QueryParam("normalize") boolean normalize, @Parameter(description = "Schema", required = true) @NotNull RegisterSchemaRequest request) {
log.info("Registering new schema: subject {}, version {}, id {}, type {}, schema size {}", subjectName, request.getVersion(), request.getId(), request.getSchemaType(), request.getSchema() == null ? 0 : request.getSchema().length());
if (subjectName != null && (CharMatcher.javaIsoControl().matchesAnyOf(subjectName) || QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject().equals(GLOBAL_RESOURCE_NAME))) {
throw Errors.invalidSubjectException(subjectName);
}
subjectName = QualifiedSubject.normalize(schemaRegistry.tenant(), subjectName);
Map<String, String> headerProperties = requestHeaderBuilder.buildRequestHeaders(headers, schemaRegistry.config().whitelistHeaders());
Schema schema = new Schema(subjectName, request.getVersion() != null ? request.getVersion() : 0, request.getId() != null ? request.getId() : -1, request.getSchemaType() != null ? request.getSchemaType() : AvroSchema.TYPE, request.getReferences(), request.getSchema());
int id;
try {
id = schemaRegistry.registerOrForward(subjectName, schema, normalize, headerProperties);
} catch (IdDoesNotMatchException e) {
throw Errors.idDoesNotMatchException(e);
} catch (InvalidSchemaException e) {
throw Errors.invalidSchemaException(e);
} catch (OperationNotPermittedException e) {
throw Errors.operationNotPermittedException(e.getMessage());
} catch (SchemaRegistryTimeoutException e) {
throw Errors.operationTimeoutException("Register operation timed out", e);
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Register schema operation failed while writing" + " to the Kafka store", e);
} catch (SchemaRegistryRequestForwardingException e) {
throw Errors.requestForwardingFailedException("Error while forwarding register schema request" + " to the leader", e);
} catch (IncompatibleSchemaException e) {
throw Errors.incompatibleSchemaException("Schema being registered is incompatible with" + " an earlier schema for subject \"" + subjectName + "\", details: " + e.getMessage(), e);
} catch (UnknownLeaderException e) {
throw Errors.unknownLeaderException("Leader not known.", e);
} catch (SchemaRegistryException e) {
throw Errors.schemaRegistryException("Error while registering schema", e);
}
RegisterSchemaResponse registerSchemaResponse = new RegisterSchemaResponse();
registerSchemaResponse.setId(id);
asyncResponse.resume(registerSchemaResponse);
}
use of io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException in project schema-registry by confluentinc.
the class ConfigResource method deleteTopLevelConfig.
@DELETE
@Operation(summary = "Deletes the Global-level compatibility level config and " + "revert to the global default.", responses = { @ApiResponse(content = @Content(schema = @Schema(implementation = CompatibilityLevel.class))), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "datastore") })
public void deleteTopLevelConfig(@Suspended final AsyncResponse asyncResponse, @Context HttpHeaders headers) {
log.info("Deleting Global compatibility setting and reverting back to default");
Config deletedConfig;
try {
CompatibilityLevel currentCompatibility = schemaRegistry.getCompatibilityLevel(null);
Map<String, String> headerProperties = requestHeaderBuilder.buildRequestHeaders(headers, schemaRegistry.config().whitelistHeaders());
schemaRegistry.deleteCompatibilityConfigOrForward(null, headerProperties);
deletedConfig = new Config(currentCompatibility.name);
} catch (OperationNotPermittedException e) {
throw Errors.operationNotPermittedException(e.getMessage());
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Failed to delete compatibility level", e);
} catch (UnknownLeaderException e) {
throw Errors.unknownLeaderException("Failed to delete compatibility level", e);
} catch (SchemaRegistryRequestForwardingException e) {
throw Errors.requestForwardingFailedException("Error while forwarding delete config request" + " to the leader", e);
}
asyncResponse.resume(deletedConfig);
}
use of io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException in project schema-registry by confluentinc.
the class KafkaSchemaRegistry method registerOrForward.
public int registerOrForward(String subject, Schema schema, boolean normalize, Map<String, String> headerProperties) throws SchemaRegistryException {
Schema existingSchema = lookUpSchemaUnderSubject(subject, schema, normalize, false);
if (existingSchema != null) {
if (schema.getId() != null && schema.getId() >= 0 && !schema.getId().equals(existingSchema.getId())) {
throw new IdDoesNotMatchException(existingSchema.getId(), schema.getId());
}
return existingSchema.getId();
}
kafkaStore.lockFor(subject).lock();
try {
if (isLeader()) {
return register(subject, schema, normalize);
} else {
// forward registering request to the leader
if (leaderIdentity != null) {
return forwardRegisterRequestToLeader(subject, schema, normalize, headerProperties);
} else {
throw new UnknownLeaderException("Register schema request failed since leader is " + "unknown");
}
}
} finally {
kafkaStore.lockFor(subject).unlock();
}
}
use of io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException in project schema-registry by confluentinc.
the class ConfigResource method deleteSubjectConfig.
@DELETE
@Path("/{subject}")
@Operation(summary = "Deletes the specified subject-level compatibility level config and " + "revert to the global default.", responses = { @ApiResponse(content = @Content(schema = @Schema(implementation = CompatibilityLevel.class))), @ApiResponse(responseCode = "404", description = "Error code 40401 -- Subject not found"), @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + "datastore") })
public void deleteSubjectConfig(@Suspended final AsyncResponse asyncResponse, @Context HttpHeaders headers, @Parameter(description = "Name of the subject", required = true) @PathParam("subject") String subject) {
log.info("Deleting compatibility setting for subject {}", subject);
subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject);
Config deletedConfig;
try {
CompatibilityLevel currentCompatibility = schemaRegistry.getCompatibilityLevel(subject);
if (currentCompatibility == null) {
throw Errors.subjectNotFoundException(subject);
}
Map<String, String> headerProperties = requestHeaderBuilder.buildRequestHeaders(headers, schemaRegistry.config().whitelistHeaders());
schemaRegistry.deleteCompatibilityConfigOrForward(subject, headerProperties);
deletedConfig = new Config(currentCompatibility.name);
} catch (OperationNotPermittedException e) {
throw Errors.operationNotPermittedException(e.getMessage());
} catch (SchemaRegistryStoreException e) {
throw Errors.storeException("Failed to delete compatibility level", e);
} catch (UnknownLeaderException e) {
throw Errors.unknownLeaderException("Failed to delete compatibility level", e);
} catch (SchemaRegistryRequestForwardingException e) {
throw Errors.requestForwardingFailedException("Error while forwarding delete config request" + " to the leader", e);
}
asyncResponse.resume(deletedConfig);
}
Aggregations