use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceProtobufWithLatestSchemaAndSubject.
@Test
public void produceProtobufWithLatestSchemaAndSubject() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProtobufSchema keySchema = new ProtobufSchema("syntax = \"proto3\"; message MyKey { string foo = 1; }");
String keySubject = "my-key-subject";
testEnv.schemaRegistry().createSchema(keySubject, keySchema);
ProtobufSchema valueSchema = new ProtobufSchema("syntax = \"proto3\"; message MyValue { string bar = 1; }");
String valueSubject = "my-value-subject";
testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubject(keySubject).setData(key).build()).setValue(ProduceRequestData.builder().setSubject(valueSubject).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Message, Message> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createProtobufDeserializer(), testEnv.schemaRegistry().createProtobufDeserializer());
DynamicMessage.Builder expectedKey = DynamicMessage.newBuilder(keySchema.toDescriptor());
expectedKey.setField(keySchema.toDescriptor().findFieldByName("foo"), "foz");
DynamicMessage.Builder expectedValue = DynamicMessage.newBuilder(valueSchema.toDescriptor());
expectedValue.setField(valueSchema.toDescriptor().findFieldByName("bar"), "baz");
assertEquals(expectedKey.build().toByteString(), produced.key().toByteString());
assertEquals(expectedValue.build().toByteString(), produced.value().toByteString());
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceBinaryWithHeaders.
@Test
public void produceBinaryWithHeaders() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ByteString key = ByteString.copyFromUtf8("foo");
ByteString value = ByteString.copyFromUtf8("bar");
ProduceRequest request = ProduceRequest.builder().setHeaders(Arrays.asList(ProduceRequestHeader.create("header-1", ByteString.copyFromUtf8("value-1")), ProduceRequestHeader.create("header-1", ByteString.copyFromUtf8("value-2")), ProduceRequestHeader.create("header-2", ByteString.copyFromUtf8("value-3")))).setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(key.toByteArray())).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(value.toByteArray())).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<byte[], byte[]> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), new ByteArrayDeserializer(), new ByteArrayDeserializer());
assertEquals(key, ByteString.copyFrom(produced.key()));
assertEquals(value, ByteString.copyFrom(produced.value()));
assertEquals(Arrays.asList(new RecordHeader("header-1", ByteString.copyFromUtf8("value-1").toByteArray()), new RecordHeader("header-1", ByteString.copyFromUtf8("value-2").toByteArray())), ImmutableList.copyOf(produced.headers().headers("header-1")));
assertEquals(singletonList(new RecordHeader("header-2", ByteString.copyFromUtf8("value-3").toByteArray())), ImmutableList.copyOf(produced.headers().headers("header-2")));
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceBinaryAndAvro.
@Test
public void produceBinaryAndAvro() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ByteString key = ByteString.copyFromUtf8("foo");
String value = "bar";
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(key.toByteArray())).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<byte[], Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), new ByteArrayDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
assertEquals(key, ByteString.copyFrom(produced.key()));
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceJsonschemaWithLatestSchemaAndSubjectStrategy.
@Test
public void produceJsonschemaWithLatestSchemaAndSubjectStrategy() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
JsonSchema keySchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyKey\", \"properties\": {\"foo\": " + "{\"type\": \"string\"}}}");
String keySubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
true, keySchema);
testEnv.schemaRegistry().createSchema(keySubject, keySchema);
JsonSchema valueSchema = new JsonSchema("{\"type\": \"object\", \"title\": \"MyValue\", \"properties\": {\"bar\": " + "{\"type\": \"string\"}}}");
String valueSubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
false, valueSchema);
testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceProtobufWithSchemaIdAndSubjectStrategy.
@Test
public void produceProtobufWithSchemaIdAndSubjectStrategy() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ProtobufSchema keySchema = new ProtobufSchema("syntax = \"proto3\"; message MyKey { string foo = 1; }");
String keySubject = new RecordNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
true, keySchema);
SchemaKey keySchemaKey = testEnv.schemaRegistry().createSchema(keySubject, keySchema);
ProtobufSchema valueSchema = new ProtobufSchema("syntax = \"proto3\"; message MyValue { string bar = 1; }");
String valueSubject = new RecordNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
false, valueSchema);
SchemaKey valueSchemaKey = testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.RECORD_NAME).setSchemaId(keySchemaKey.getSchemaId()).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.RECORD_NAME).setSchemaId(valueSchemaKey.getSchemaId()).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Message, Message> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createProtobufDeserializer(), testEnv.schemaRegistry().createProtobufDeserializer());
DynamicMessage.Builder expectedKey = DynamicMessage.newBuilder(keySchema.toDescriptor());
expectedKey.setField(keySchema.toDescriptor().findFieldByName("foo"), "foz");
DynamicMessage.Builder expectedValue = DynamicMessage.newBuilder(valueSchema.toDescriptor());
expectedValue.setField(valueSchema.toDescriptor().findFieldByName("bar"), "baz");
assertEquals(expectedKey.build().toByteString(), produced.key().toByteString());
assertEquals(expectedValue.build().toByteString(), produced.value().toByteString());
}
Aggregations