use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaIdAndSubjectStrategy.
@Test
public void produceJsonschemaWithSchemaIdAndSubjectStrategy() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
JsonSchema keySchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyKey\", \"properties\": {\"foo\": " + "{\"type\": \"string\"}}}");
String keySubject = new RecordNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
true, keySchema);
SchemaKey keySchemaKey = testEnv.schemaRegistry().createSchema(keySubject, keySchema);
JsonSchema valueSchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyValue\", \"properties\": {\"bar\": " + "{\"type\": \"string\"}}}");
String valueSubject = new RecordNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
false, valueSchema);
SchemaKey valueSchemaKey = testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.RECORD_NAME).setSchemaId(keySchemaKey.getSchemaId()).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.RECORD_NAME).setSchemaId(valueSchemaKey.getSchemaId()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaVersionAndSubject.
@Test
public void produceJsonschemaWithSchemaVersionAndSubject() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema("my-key-subject", new JsonSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema("my-value-subject", new JsonSchema("{\"type\": \"string\"}"));
TextNode key = TextNode.valueOf("foo");
TextNode value = TextNode.valueOf("bar");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubject("my-key-subject").setSchemaVersion(keySchema.getSchemaVersion()).setData(key).build()).setValue(ProduceRequestData.builder().setSubject("my-value-subject").setSchemaVersion(valueSchema.getSchemaVersion()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithLatestSchema.
@Test
public void produceJsonschemaWithLatestSchema() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
TextNode key = TextNode.valueOf("foo");
TextNode value = TextNode.valueOf("bar");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setData(key).build()).setValue(ProduceRequestData.builder().setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaVersion.
@Test
public void produceJsonschemaWithSchemaVersion() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
TextNode key = TextNode.valueOf("foo");
TextNode value = TextNode.valueOf("bar");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaVersion(keySchema.getSchemaVersion()).setData(key).build()).setValue(ProduceRequestData.builder().setSchemaVersion(valueSchema.getSchemaVersion()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.
the class RecordSerializerFacadeTest method serializeIntJsonschemaKey_returnsSerialized.
@Test
public void serializeIntJsonschemaKey_returnsSerialized() throws Exception {
JsonSchema schema = new JsonSchema("{\"type\": \"integer\"}");
String subject = SUBJECT_NAME_STRATEGY.subjectName(TOPIC_NAME, /* isKey= */
true, schema);
int schemaId = schemaRegistryClient.register(subject, schema);
ByteString serialized = recordSerializer.serialize(EmbeddedFormat.JSONSCHEMA, TOPIC_NAME, Optional.of(RegisteredSchema.create(subject, schemaId, SCHEMA_VERSION, schema)), IntNode.valueOf(123), /* isKey= */
true).get();
KafkaJsonSchemaDeserializer<Integer> deserializer = new KafkaJsonSchemaDeserializer<>();
deserializer.configure(SCHEMA_SERIALIZER_CONFIGS, /* isKey= */
true);
Object deserialized = deserializer.deserialize(TOPIC_NAME, serialized.toByteArray());
assertEquals(123, deserialized);
}
Aggregations