Search in sources :

Example 1 with JsonSchema

use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaIdAndSubjectStrategy.

@Test
public void produceJsonschemaWithSchemaIdAndSubjectStrategy() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    JsonSchema keySchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyKey\", \"properties\": {\"foo\": " + "{\"type\": \"string\"}}}");
    String keySubject = new RecordNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
    true, keySchema);
    SchemaKey keySchemaKey = testEnv.schemaRegistry().createSchema(keySubject, keySchema);
    JsonSchema valueSchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyValue\", \"properties\": {\"bar\": " + "{\"type\": \"string\"}}}");
    String valueSubject = new RecordNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
    false, valueSchema);
    SchemaKey valueSchemaKey = testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
    ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
    key.put("foo", "foz");
    ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
    value.put("bar", "baz");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.RECORD_NAME).setSchemaId(keySchemaKey.getSchemaId()).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.RECORD_NAME).setSchemaId(valueSchemaKey.getSchemaId()).setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) JsonSchema(io.confluent.kafka.schemaregistry.json.JsonSchema) ByteString(com.google.protobuf.ByteString) RecordNameStrategy(io.confluent.kafka.serializers.subject.RecordNameStrategy) SchemaKey(io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey) Test(org.junit.jupiter.api.Test)

Example 2 with JsonSchema

use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaVersionAndSubject.

@Test
public void produceJsonschemaWithSchemaVersionAndSubject() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    SchemaKey keySchema = testEnv.schemaRegistry().createSchema("my-key-subject", new JsonSchema("{\"type\": \"string\"}"));
    SchemaKey valueSchema = testEnv.schemaRegistry().createSchema("my-value-subject", new JsonSchema("{\"type\": \"string\"}"));
    TextNode key = TextNode.valueOf("foo");
    TextNode value = TextNode.valueOf("bar");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubject("my-key-subject").setSchemaVersion(keySchema.getSchemaVersion()).setData(key).build()).setValue(ProduceRequestData.builder().setSubject("my-value-subject").setSchemaVersion(valueSchema.getSchemaVersion()).setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) JsonSchema(io.confluent.kafka.schemaregistry.json.JsonSchema) TextNode(com.fasterxml.jackson.databind.node.TextNode) ByteString(com.google.protobuf.ByteString) SchemaKey(io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey) Test(org.junit.jupiter.api.Test)

Example 3 with JsonSchema

use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceJsonschemaWithLatestSchema.

@Test
public void produceJsonschemaWithLatestSchema() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
    testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
    TextNode key = TextNode.valueOf("foo");
    TextNode value = TextNode.valueOf("bar");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setData(key).build()).setValue(ProduceRequestData.builder().setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) JsonSchema(io.confluent.kafka.schemaregistry.json.JsonSchema) TextNode(com.fasterxml.jackson.databind.node.TextNode) ByteString(com.google.protobuf.ByteString) Test(org.junit.jupiter.api.Test)

Example 4 with JsonSchema

use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaVersion.

@Test
public void produceJsonschemaWithSchemaVersion() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
    SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
    TextNode key = TextNode.valueOf("foo");
    TextNode value = TextNode.valueOf("bar");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaVersion(keySchema.getSchemaVersion()).setData(key).build()).setValue(ProduceRequestData.builder().setSchemaVersion(valueSchema.getSchemaVersion()).setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) JsonSchema(io.confluent.kafka.schemaregistry.json.JsonSchema) TextNode(com.fasterxml.jackson.databind.node.TextNode) ByteString(com.google.protobuf.ByteString) SchemaKey(io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey) Test(org.junit.jupiter.api.Test)

Example 5 with JsonSchema

use of io.confluent.kafka.schemaregistry.json.JsonSchema in project kafka-rest by confluentinc.

the class RecordSerializerFacadeTest method serializeIntJsonschemaKey_returnsSerialized.

@Test
public void serializeIntJsonschemaKey_returnsSerialized() throws Exception {
    JsonSchema schema = new JsonSchema("{\"type\": \"integer\"}");
    String subject = SUBJECT_NAME_STRATEGY.subjectName(TOPIC_NAME, /* isKey= */
    true, schema);
    int schemaId = schemaRegistryClient.register(subject, schema);
    ByteString serialized = recordSerializer.serialize(EmbeddedFormat.JSONSCHEMA, TOPIC_NAME, Optional.of(RegisteredSchema.create(subject, schemaId, SCHEMA_VERSION, schema)), IntNode.valueOf(123), /* isKey= */
    true).get();
    KafkaJsonSchemaDeserializer<Integer> deserializer = new KafkaJsonSchemaDeserializer<>();
    deserializer.configure(SCHEMA_SERIALIZER_CONFIGS, /* isKey= */
    true);
    Object deserialized = deserializer.deserialize(TOPIC_NAME, serialized.toByteArray());
    assertEquals(123, deserialized);
}
Also used : KafkaJsonSchemaDeserializer(io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer) ByteString(com.google.protobuf.ByteString) JsonSchema(io.confluent.kafka.schemaregistry.json.JsonSchema) ByteString(com.google.protobuf.ByteString) Test(org.junit.jupiter.api.Test)

Aggregations

JsonSchema (io.confluent.kafka.schemaregistry.json.JsonSchema)61 Test (org.junit.jupiter.api.Test)27 ByteString (com.google.protobuf.ByteString)25 Schema (org.apache.kafka.connect.data.Schema)18 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)16 ArraySchema (org.everit.json.schema.ArraySchema)16 BooleanSchema (org.everit.json.schema.BooleanSchema)16 CombinedSchema (org.everit.json.schema.CombinedSchema)16 EnumSchema (org.everit.json.schema.EnumSchema)16 NullSchema (org.everit.json.schema.NullSchema)16 NumberSchema (org.everit.json.schema.NumberSchema)16 ObjectSchema (org.everit.json.schema.ObjectSchema)16 StringSchema (org.everit.json.schema.StringSchema)16 Test (org.junit.Test)16 KafkaJsonSchemaDeserializer (io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer)12 JsonNode (com.fasterxml.jackson.databind.JsonNode)9 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)9 ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)9 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)9 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)9