use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithRawSchemaAndNullData.
@Test
public void produceJsonschemaWithRawSchemaAndNullData() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.JSONSCHEMA).setRawSchema("{\"type\": \"string\"}").setData(NullNode.getInstance()).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.JSONSCHEMA).setRawSchema("{\"type\": \"string\"}").setData(NullNode.getInstance()).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertNull(produced.key());
assertNull(produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceAvroWithSchemaVersion.
@Test
public void produceAvroWithSchemaVersion() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new AvroSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new AvroSchema("{\"type\": \"string\"}"));
String key = "foo";
String value = "bar";
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaVersion(keySchema.getSchemaVersion()).setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setSchemaVersion(valueSchema.getSchemaVersion()).setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceProtobufWithRawSchemaAndNullData.
@Test
public void produceProtobufWithRawSchemaAndNullData() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProtobufSchema keySchema = new ProtobufSchema("syntax = \"proto3\"; message MyKey { string foo = 1; }");
ProtobufSchema valueSchema = new ProtobufSchema("syntax = \"proto3\"; message MyValue { string bar = 1; }");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.PROTOBUF).setRawSchema(keySchema.canonicalString()).setData(NullNode.getInstance()).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.PROTOBUF).setRawSchema(valueSchema.canonicalString()).setData(NullNode.getInstance()).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Message, Message> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createProtobufDeserializer(), testEnv.schemaRegistry().createProtobufDeserializer());
assertNull(produced.key());
assertNull(produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceAvroWithLatestSchemaAndSubject.
@Test
public void produceAvroWithLatestSchemaAndSubject() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
testEnv.schemaRegistry().createSchema("my-key-subject", new AvroSchema("{\"type\": \"string\"}"));
testEnv.schemaRegistry().createSchema("my-value-subject", new AvroSchema("{\"type\": \"string\"}"));
String key = "foo";
String value = "bar";
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubject("my-key-subject").setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setSubject("my-value-subject").setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceBinaryWithPartitionId.
@Test
public void produceBinaryWithPartitionId() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
int partitionId = 1;
ByteString key = ByteString.copyFromUtf8("foo");
ByteString value = ByteString.copyFromUtf8("bar");
ProduceRequest request = ProduceRequest.builder().setPartitionId(partitionId).setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(key.toByteArray())).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(value.toByteArray())).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<byte[], byte[]> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, partitionId, actual.getOffset(), new ByteArrayDeserializer(), new ByteArrayDeserializer());
assertEquals(key, ByteString.copyFrom(produced.key()));
assertEquals(value, ByteString.copyFrom(produced.value()));
}
Aggregations