use of io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaId.
@Test
public void produceJsonschemaWithSchemaId() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
TextNode key = TextNode.valueOf("foo");
TextNode value = TextNode.valueOf("bar");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaId(keySchema.getSchemaId()).setData(key).build()).setValue(ProduceRequestData.builder().setSchemaId(valueSchema.getSchemaId()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceAvroWithSchemaVersionAndSubject.
@Test
public void produceAvroWithSchemaVersionAndSubject() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema("my-key-subject", new AvroSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema("my-value-subject", new AvroSchema("{\"type\": \"string\"}"));
String key = "foo";
String value = "bar";
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubject("my-key-subject").setSchemaVersion(keySchema.getSchemaVersion()).setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setSubject("my-value-subject").setSchemaVersion(valueSchema.getSchemaVersion()).setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
Aggregations