use of io.confluent.kafka.serializers.subject.TopicNameStrategy in project kafka-rest by confluentinc.
the class SchemaManagerImplTest method errorFetchingLatestSchemaBySchemaVersionBadRequest.
@Test
public void errorFetchingLatestSchemaBySchemaVersionBadRequest() throws RestClientException, IOException {
SchemaRegistryClient schemaRegistryClientMock = mock(SchemaRegistryClient.class);
SchemaMetadata schemaMetadataMock = mock(SchemaMetadata.class);
expect(schemaRegistryClientMock.getLatestSchemaMetadata("subject1")).andReturn(schemaMetadataMock);
expect(schemaMetadataMock.getSchemaType()).andThrow(new UnsupportedOperationException(// this is faking the UnsupportedOperationException but I
"testing exception"));
// can't see another way to do this.
expect(schemaMetadataMock.getSchemaType()).andReturn("schemaType");
replay(schemaRegistryClientMock, schemaMetadataMock);
SchemaManager mySchemaManager = new SchemaManagerImpl(schemaRegistryClientMock, new TopicNameStrategy());
BadRequestException bre = assertThrows(BadRequestException.class, () -> mySchemaManager.getSchema(TOPIC_NAME, /* format= */
Optional.empty(), /* subject= */
Optional.of("subject1"), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.empty(), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.empty(), /* isKey= */
true));
assertEquals("Schema subject not supported for schema type = schemaType", bre.getMessage());
assertEquals(400, bre.getCode());
}
use of io.confluent.kafka.serializers.subject.TopicNameStrategy in project kafka-rest by confluentinc.
the class SchemaManagerImplTest method getSchemaFromSchemaVersionThrowsInvalidBadRequestException.
@Test
public void getSchemaFromSchemaVersionThrowsInvalidBadRequestException() {
SchemaRegistryClient schemaRegistryClientMock = mock(SchemaRegistryClient.class);
Schema schemaMock = mock(Schema.class);
expect(schemaRegistryClientMock.getByVersion("subject1", 0, false)).andReturn(schemaMock);
expect(schemaMock.getSchemaType()).andThrow(new UnsupportedOperationException("exception message"));
expect(schemaMock.getSchemaType()).andReturn("JSON");
replay(schemaRegistryClientMock, schemaMock);
SchemaManager mySchemaManager = new SchemaManagerImpl(schemaRegistryClientMock, new TopicNameStrategy());
BadRequestException iae = assertThrows(BadRequestException.class, () -> mySchemaManager.getSchema(TOPIC_NAME, /* format= */
Optional.empty(), /* subject= */
Optional.of("subject1"), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.empty(), /* schemaVersion= */
Optional.of(0), /* rawSchema= */
Optional.empty(), /* isKey= */
true));
assertEquals("Schema version not supported for JSON", iae.getMessage());
assertEquals(400, iae.getCode());
}
use of io.confluent.kafka.serializers.subject.TopicNameStrategy in project kafka-rest by confluentinc.
the class SchemaManagerImplTest method errorFetchingLatestSchemaBySchemaVersionInvalidSchema.
@Test
public void errorFetchingLatestSchemaBySchemaVersionInvalidSchema() throws RestClientException, IOException {
SchemaRegistryClient schemaRegistryClientMock = mock(SchemaRegistryClient.class);
SchemaMetadata schemaMetadataMock = mock(SchemaMetadata.class);
expect(schemaRegistryClientMock.getLatestSchemaMetadata("subject1")).andReturn(schemaMetadataMock);
expect(schemaMetadataMock.getSchemaType()).andReturn(EmbeddedFormat.AVRO.name());
expect(schemaMetadataMock.getSchema()).andReturn(TextNode.valueOf("schema").toString());
expect(schemaMetadataMock.getReferences()).andReturn(emptyList());
replay(schemaRegistryClientMock, schemaMetadataMock);
SchemaManager mySchemaManager = new SchemaManagerImpl(schemaRegistryClientMock, new TopicNameStrategy());
RestConstraintViolationException rcve = assertThrows(RestConstraintViolationException.class, () -> mySchemaManager.getSchema(TOPIC_NAME, /* format= */
Optional.empty(), /* subject= */
Optional.of("subject1"), /* subjectNameStrategy= */
Optional.empty(), /* schemaId= */
Optional.empty(), /* schemaVersion= */
Optional.empty(), /* rawSchema= */
Optional.empty(), /* isKey= */
true));
assertEquals("Invalid schema: Error when fetching latest schema version. subject = subject1", rcve.getMessage());
assertEquals(42205, rcve.getErrorCode());
}
use of io.confluent.kafka.serializers.subject.TopicNameStrategy in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaVersionAndSubjectStrategy.
@Test
public void produceJsonschemaWithSchemaVersionAndSubjectStrategy() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
JsonSchema keySchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyKey\", \"properties\": {\"foo\": " + "{\"type\": \"string\"}}}");
String keySubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
true, keySchema);
SchemaKey keySchemaKey = testEnv.schemaRegistry().createSchema(keySubject, keySchema);
JsonSchema valueSchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyValue\", \"properties\": {\"bar\": " + "{\"type\": \"string\"}}}");
String valueSubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
false, valueSchema);
SchemaKey valueSchemaKey = testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setSchemaVersion(keySchemaKey.getSchemaVersion()).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setSchemaVersion(valueSchemaKey.getSchemaVersion()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafka.serializers.subject.TopicNameStrategy in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithLatestSchemaAndSubjectStrategy.
@Test
public void produceJsonschemaWithLatestSchemaAndSubjectStrategy() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
JsonSchema keySchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyKey\", \"properties\": {\"foo\": " + "{\"type\": \"string\"}}}");
String keySubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
true, keySchema);
testEnv.schemaRegistry().createSchema(keySubject, keySchema);
JsonSchema valueSchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyValue\", \"properties\": {\"bar\": " + "{\"type\": \"string\"}}}");
String valueSubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
false, valueSchema);
testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
Aggregations