use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceProtobufWithLatestSchema.
@Test
public void produceProtobufWithLatestSchema() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProtobufSchema keySchema = new ProtobufSchema("syntax = \"proto3\"; message MyKey { string foo = 1; }");
testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, keySchema);
ProtobufSchema valueSchema = new ProtobufSchema("syntax = \"proto3\"; message MyValue { string bar = 1; }");
testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setData(key).build()).setValue(ProduceRequestData.builder().setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Message, Message> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createProtobufDeserializer(), testEnv.schemaRegistry().createProtobufDeserializer());
DynamicMessage.Builder expectedKey = DynamicMessage.newBuilder(keySchema.toDescriptor());
expectedKey.setField(keySchema.toDescriptor().findFieldByName("foo"), "foz");
DynamicMessage.Builder expectedValue = DynamicMessage.newBuilder(valueSchema.toDescriptor());
expectedValue.setField(valueSchema.toDescriptor().findFieldByName("bar"), "baz");
assertEquals(expectedKey.build().toByteString(), produced.key().toByteString());
assertEquals(expectedValue.build().toByteString(), produced.value().toByteString());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithSchemaVersion.
@Test
public void produceJsonschemaWithSchemaVersion() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new JsonSchema("{\"type\": \"string\"}"));
TextNode key = TextNode.valueOf("foo");
TextNode value = TextNode.valueOf("bar");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaVersion(keySchema.getSchemaVersion()).setData(key).build()).setValue(ProduceRequestData.builder().setSchemaVersion(valueSchema.getSchemaVersion()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonBatch.
@Test
public void produceJsonBatch() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ArrayList<ProduceRequest> requests = new ArrayList<>();
for (int i = 0; i < 1000; i++) {
requests.add(ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.JSON).setData(TextNode.valueOf("key-" + i)).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.JSON).setData(TextNode.valueOf("value-" + i)).build()).setOriginalSize(0L).build());
}
StringBuilder batch = new StringBuilder();
ObjectMapper objectMapper = testEnv.kafkaRest().getObjectMapper();
for (ProduceRequest produceRequest : requests) {
batch.append(objectMapper.writeValueAsString(produceRequest));
}
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(batch.toString(), MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
List<ProduceResponse> actual = readProduceResponses(response);
KafkaJsonDeserializer<Object> deserializer = new KafkaJsonDeserializer<>();
deserializer.configure(emptyMap(), /* isKey= */
false);
for (int i = 0; i < 1000; i++) {
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.get(i).getPartitionId(), actual.get(i).getOffset(), deserializer, deserializer);
assertEquals(requests.get(i).getKey().map(ProduceRequestData::getData).map(JsonNode::asText).orElse(null), produced.key());
assertEquals(requests.get(i).getValue().map(ProduceRequestData::getData).map(JsonNode::asText).orElse(null), produced.value());
}
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceProtobufWithSchemaIdAndSubject.
@Test
public void produceProtobufWithSchemaIdAndSubject() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProtobufSchema keySchema = new ProtobufSchema("syntax = \"proto3\"; message MyKey { string foo = 1; }");
String keySubject = "my-key-schema";
SchemaKey keySchemaKey = testEnv.schemaRegistry().createSchema(keySubject, keySchema);
ProtobufSchema valueSchema = new ProtobufSchema("syntax = \"proto3\"; message MyValue { string bar = 1; }");
String valueSubject = "my-value-schema";
SchemaKey valueSchemaKey = testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubject(keySubject).setSchemaId(keySchemaKey.getSchemaId()).setData(key).build()).setValue(ProduceRequestData.builder().setSubject(valueSubject).setSchemaId(valueSchemaKey.getSchemaId()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Message, Message> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createProtobufDeserializer(), testEnv.schemaRegistry().createProtobufDeserializer());
DynamicMessage.Builder expectedKey = DynamicMessage.newBuilder(keySchema.toDescriptor());
expectedKey.setField(keySchema.toDescriptor().findFieldByName("foo"), "foz");
DynamicMessage.Builder expectedValue = DynamicMessage.newBuilder(valueSchema.toDescriptor());
expectedValue.setField(valueSchema.toDescriptor().findFieldByName("bar"), "baz");
assertEquals(expectedKey.build().toByteString(), produced.key().toByteString());
assertEquals(expectedValue.build().toByteString(), produced.value().toByteString());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceNothing.
@Test
public void produceNothing() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProduceRequest request = ProduceRequest.builder().setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<byte[], byte[]> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), new ByteArrayDeserializer(), new ByteArrayDeserializer());
assertNull(produced.key());
assertNull(produced.value());
}
Aggregations