Search in sources :

Example 11 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceAvroWithSchemaId.

@Test
public void produceAvroWithSchemaId() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    SchemaKey keySchema = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, new AvroSchema("{\"type\": \"string\"}"));
    SchemaKey valueSchema = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, new AvroSchema("{\"type\": \"string\"}"));
    String key = "foo";
    String value = "bar";
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaId(keySchema.getSchemaId()).setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setSchemaId(valueSchema.getSchemaId()).setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ByteString(com.google.protobuf.ByteString) SchemaKey(io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey) Test(org.junit.jupiter.api.Test)

Example 12 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceAvroWithLatestSchemaAndSubjectStrategy.

@Test
public void produceAvroWithLatestSchemaAndSubjectStrategy() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    AvroSchema keySchema = new AvroSchema("{\"type\": \"record\", \"name\": \"MyKey\", \"fields\": [{\"name\": \"foo\", " + "\"type\": \"string\"}]}");
    String keySubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
    true, keySchema);
    testEnv.schemaRegistry().createSchema(keySubject, keySchema);
    AvroSchema valueSchema = new AvroSchema("{\"type\": \"record\", \"name\": \"MyValue\", \"fields\": [{\"name\": \"bar\", " + "\"type\": \"string\"}]}");
    String valueSubject = new TopicNameStrategy().subjectName(TOPIC_NAME, /* isKey= */
    false, valueSchema);
    testEnv.schemaRegistry().createSchema(valueSubject, valueSchema);
    ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
    key.put("foo", "foz");
    ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
    value.put("bar", "baz");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(key).build()).setValue(ProduceRequestData.builder().setSubjectNameStrategy(EnumSubjectNameStrategy.TOPIC_NAME).setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
    GenericRecord expectedKey = new GenericData.Record(keySchema.rawSchema());
    expectedKey.put("foo", "foz");
    GenericRecord expectedValue = new GenericData.Record(valueSchema.rawSchema());
    expectedValue.put("bar", "baz");
    assertEquals(expectedKey, produced.key());
    assertEquals(expectedValue, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) TopicNameStrategy(io.confluent.kafka.serializers.subject.TopicNameStrategy) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) GenericRecord(org.apache.avro.generic.GenericRecord) ByteString(com.google.protobuf.ByteString) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.jupiter.api.Test)

Example 13 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceAvroWithRawSchema.

@Test
public void produceAvroWithRawSchema() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    String key = "foo";
    String value = "bar";
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createAvroDeserializer(), testEnv.schemaRegistry().createAvroDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ByteString(com.google.protobuf.ByteString) Test(org.junit.jupiter.api.Test)

Example 14 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class TopicsResourceBinaryProduceTest method testProduceSecurityException.

private void testProduceSecurityException(List<CompletableFuture<RecordMetadata>> produceResults, List<PartitionOffset> produceExceptionResults, Response.Status expectedStatus) {
    Response rawResponse = produceToTopic(PRODUCE_EXCEPTION_DATA, produceResults);
    assertEquals(expectedStatus.getStatusCode(), rawResponse.getStatus());
    ProduceResponse response = TestUtils.tryReadEntityOrLog(rawResponse, ProduceResponse.class);
    assertEquals(produceExceptionResults, response.getOffsets());
    assertNull(response.getKeySchemaId());
    assertNull(response.getValueSchemaId());
}
Also used : TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) Response(javax.ws.rs.core.Response) TestUtils.assertErrorResponse(io.confluent.kafkarest.TestUtils.assertErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse)

Example 15 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceBinaryKeyOnly.

@Test
public void produceBinaryKeyOnly() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    ByteString key = ByteString.copyFromUtf8("foo");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(BinaryNode.valueOf(key.toByteArray())).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<byte[], byte[]> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), new ByteArrayDeserializer(), new ByteArrayDeserializer());
    assertEquals(key, ByteString.copyFrom(produced.key()));
    assertNull(produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ByteString(com.google.protobuf.ByteString) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ByteString(com.google.protobuf.ByteString) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Test(org.junit.jupiter.api.Test)

Aggregations

Response (javax.ws.rs.core.Response)61 Test (org.junit.jupiter.api.Test)58 ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)57 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)57 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)54 ByteString (com.google.protobuf.ByteString)50 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)18 SchemaKey (io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey)17 DynamicMessage (com.google.protobuf.DynamicMessage)11 Message (com.google.protobuf.Message)11 ProtobufSchema (io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema)11 ProduceResponse (io.confluent.kafkarest.entities.v2.ProduceResponse)11 TextNode (com.fasterxml.jackson.databind.node.TextNode)10 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)10 TestUtils.assertOKResponse (io.confluent.kafkarest.TestUtils.assertOKResponse)10 JsonSchema (io.confluent.kafka.schemaregistry.json.JsonSchema)9 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)9 ResultOrError (io.confluent.kafkarest.response.StreamingResponse.ResultOrError)6 RequestRateLimiter (io.confluent.kafkarest.ratelimit.RequestRateLimiter)4 ChunkedOutputFactory (io.confluent.kafkarest.response.ChunkedOutputFactory)4