use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceAvroWithSchemaId.
@Test
public void produceAvroWithSchemaId() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new AvroSchema("{\"type\": \"string\"}"));
SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new AvroSchema("{\"type\": \"string\"}"));
String key = "foo";
String value = "bar";
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaId(keySchema.getSchemaId()).setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setSchemaId(valueSchema.getSchemaId()).setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceAvroWithLatestSchemaAndSubjectStrategy.
@Test
public void produceAvroWithLatestSchemaAndSubjectStrategy() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
AvroSchema keySchema = new AvroSchema("{\"type\": \"record\", \"name\": \"MyKey\", \"fields\": [{\"name\": \"foo\", " + "\"type\": \"string\"}]}");
String keySubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
true, keySchema);
testEnv.schemaRegistry().createSchema(keySubject, keySchema);
AvroSchema valueSchema = new AvroSchema("{\"type\": \"record\", \"name\": \"MyValue\", \"fields\": [{\"name\": \"bar\", " + "\"type\": \"string\"}]}");
String valueSubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
false, valueSchema);
testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
key.put("foo", "foz");
ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
value.put("bar", "baz");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(value).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
GenericRecord expectedKey = new GenericData.Record(keySchema.rawSchema());
expectedKey.put("foo", "foz");
GenericRecord expectedValue = new GenericData.Record(valueSchema.rawSchema());
expectedValue.put("bar", "baz");
assertEquals(expectedKey, produced.key());
assertEquals(expectedValue, produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceAvroWithRawSchema.
@Test
public void produceAvroWithRawSchema() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
String key = "foo";
String value = "bar";
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
assertEquals(key, produced.key());
assertEquals(value, produced.value());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class TopicsResourceBinaryProduceTest method testProduceSecurityException.
private void testProduceSecurityException(List<CompletableFuture<RecordMetadata>> produceResults, List<PartitionOffset> produceExceptionResults, Response.Status expectedStatus) {
Response rawResponse = produceToTopic(PRODUCE_EXCEPTION_DATA, produceResults);
assertEquals(expectedStatus.getStatusCode(), rawResponse.getStatus());
ProduceResponse response = TestUtils.tryReadEntityOrLog(rawResponse, ProduceResponse.class);
assertEquals(produceExceptionResults, response.getOffsets());
assertNull(response.getKeySchemaId());
assertNull(response.getValueSchemaId());
}
use of io.confluent.kafkarest.entities.v3.ProduceResponse in project kafka-rest by confluentinc.
the class ProduceActionIntegrationTest method produceBinaryKeyOnly.
@Test
public void produceBinaryKeyOnly() throws Exception {
String clusterId = testEnv.kafkaCluster().getClusterId();
ByteString key = ByteString.copyFromUtf8("foo");
ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(key.toByteArray())).build()).setOriginalSize(0L).build();
Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
assertEquals(Status.OK.getStatusCode(), response.getStatus());
ProduceResponse actual = readProduceResponse(response);
ConsumerRecord<byte[], byte[]> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), new ByteArrayDeserializer(), new ByteArrayDeserializer());
assertEquals(key, ByteString.copyFrom(produced.key()));
assertNull(produced.value());
}
Aggregations