Search in sources :

Example 36 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class AbstractProducerTest method testProduceToAuthorizationError.

protected void testProduceToAuthorizationError(String topicName, TopicRequestT request, String contentType, Map<String, String> queryParams) {
    Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(request, contentType));
    assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus());
    final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
    for (PartitionOffset pOffset : produceResponse.getOffsets()) {
        assertEquals(Errors.KAFKA_AUTHORIZATION_ERROR_CODE, (int) pOffset.getErrorCode());
    }
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) Response(javax.ws.rs.core.Response) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) PartitionOffset(io.confluent.kafkarest.entities.v2.PartitionOffset)

Example 37 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class AbstractProducerTest method testProduceToTopicFails.

protected void testProduceToTopicFails(String topicName, TopicRequestT request, Map<String, String> queryParams) {
    Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(request, Versions.KAFKA_V2_JSON_BINARY));
    assertOKResponse(response, Versions.KAFKA_V2_JSON);
    final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
    for (PartitionOffset pOffset : produceResponse.getOffsets()) {
        assertNotNull(pOffset.getError());
    }
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) Response(javax.ws.rs.core.Response) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) PartitionOffset(io.confluent.kafkarest.entities.v2.PartitionOffset)

Example 38 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class AbstractProducerTest method testProduceToPartition.

protected <K, V> void testProduceToPartition(String topicName, int partition, PartitionRequestT request, String keySerializerClassName, String valueSerializerClassName, List<PartitionOffset> offsetResponse, Map<String, String> queryParams, List<ProduceRecord<K, V>> expected) {
    Response response = request("/topics/" + topicName + "/partitions/0", queryParams).post(Entity.entity(request, getEmbeddedContentType()));
    assertOKResponse(response, Versions.KAFKA_V2_JSON);
    final ProduceResponse poffsetResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
    assertEquals(offsetResponse, poffsetResponse.getOffsets());
    TestUtils.assertTopicContains(plaintextBrokerList, topicName, expected, partition, keySerializerClassName, valueSerializerClassName, true);
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) Response(javax.ws.rs.core.Response) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse)

Example 39 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class AvroProducerTest method testProduceToTopic.

protected <K, V> void testProduceToTopic(List<SchemaTopicProduceRecord> records, List<PartitionOffset> offsetResponses, Map<String, String> queryParams) {
    SchemaTopicProduceRequest payload = SchemaTopicProduceRequest.create(records, keySchemaStr, /* keySchemaId= */
    null, valueSchemaStr, /* valueSchemaId= */
    null);
    Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(payload, Versions.KAFKA_V2_JSON_AVRO));
    assertOKResponse(response, Versions.KAFKA_V2_JSON);
    final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
    TestUtils.assertPartitionOffsetsEqual(offsetResponses, produceResponse.getOffsets());
    TestUtils.assertTopicContains(plaintextBrokerList, topicName, payload.toProduceRequest().getRecords(), null, KafkaAvroDeserializer.class.getName(), KafkaAvroDeserializer.class.getName(), deserializerProps, false);
    assertEquals(produceResponse.getKeySchemaId(), (Integer) 1);
    assertEquals(produceResponse.getValueSchemaId(), (Integer) 2);
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) Response(javax.ws.rs.core.Response) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) SchemaTopicProduceRequest(io.confluent.kafkarest.entities.v2.SchemaTopicProduceRequest)

Example 40 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class SchemaProduceConsumeTest method produceThenConsume_returnsExactlyProduced.

@Test
public void produceThenConsume_returnsExactlyProduced() throws Exception {
    testEnv.kafkaCluster().createTopic(TOPIC, /* numPartitions= */
    1, /* replicationFactor= */
    (short) 3);
    Response createConsumerInstanceResponse = testEnv.kafkaRest().target().path(String.format("/consumers/%s", CONSUMER_GROUP)).request().post(Entity.entity(new CreateConsumerInstanceRequest(/* id= */
    null, /* name= */
    null, getFormat() != null ? getFormat().name() : null, /* autoOffsetReset= */
    null, /* autoCommitEnable= */
    null, /* responseMinBytes= */
    null, /* requestWaitMs= */
    null), Versions.KAFKA_V2_JSON));
    assertEquals(Status.OK.getStatusCode(), createConsumerInstanceResponse.getStatus());
    CreateConsumerInstanceResponse createConsumerInstance = createConsumerInstanceResponse.readEntity(CreateConsumerInstanceResponse.class);
    Response subscribeResponse = testEnv.kafkaRest().target().path(String.format("/consumers/%s/instances/%s/subscription", CONSUMER_GROUP, createConsumerInstance.getInstanceId())).request().post(Entity.entity(new ConsumerSubscriptionRecord(singletonList(TOPIC), null), Versions.KAFKA_V2_JSON));
    assertEquals(Status.NO_CONTENT.getStatusCode(), subscribeResponse.getStatus());
    // Needs to consume empty once before producing.
    testEnv.kafkaRest().target().path(String.format("/consumers/%s/instances/%s/records", CONSUMER_GROUP, createConsumerInstance.getInstanceId())).request().accept(getContentType()).get();
    SchemaTopicProduceRequest produceRequest = new SchemaTopicProduceRequest(getProduceRecords(), getKeySchema().canonicalString(), null, getValueSchema().canonicalString(), null);
    ProduceResponse produceResponse = testEnv.kafkaRest().target().path(String.format("/topics/%s", TOPIC)).request().post(Entity.entity(produceRequest, getContentType())).readEntity(ProduceResponse.class);
    assertEquals(Status.OK, produceResponse.getRequestStatus());
    Response readRecordsResponse = testEnv.kafkaRest().target().path(String.format("/consumers/%s/instances/%s/records", CONSUMER_GROUP, createConsumerInstance.getInstanceId())).request().accept(getContentType()).get();
    assertEquals(Status.OK.getStatusCode(), readRecordsResponse.getStatus());
    List<SchemaConsumerRecord> readRecords = readRecordsResponse.readEntity(new GenericType<List<SchemaConsumerRecord>>() {
    });
    assertMapEquals(producedToMap(getProduceRecords()), consumedToMap(readRecords));
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) Response(javax.ws.rs.core.Response) CreateConsumerInstanceResponse(io.confluent.kafkarest.entities.v2.CreateConsumerInstanceResponse) ConsumerSubscriptionRecord(io.confluent.kafkarest.entities.v2.ConsumerSubscriptionRecord) SchemaConsumerRecord(io.confluent.kafkarest.entities.v2.SchemaConsumerRecord) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) CreateConsumerInstanceRequest(io.confluent.kafkarest.entities.v2.CreateConsumerInstanceRequest) Collections.singletonList(java.util.Collections.singletonList) List(java.util.List) CreateConsumerInstanceResponse(io.confluent.kafkarest.entities.v2.CreateConsumerInstanceResponse) SchemaTopicProduceRequest(io.confluent.kafkarest.entities.v2.SchemaTopicProduceRequest) Test(org.junit.jupiter.api.Test)

Aggregations

Response (javax.ws.rs.core.Response)61 Test (org.junit.jupiter.api.Test)58 ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)57 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)57 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)54 ByteString (com.google.protobuf.ByteString)50 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)18 SchemaKey (io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey)17 DynamicMessage (com.google.protobuf.DynamicMessage)11 Message (com.google.protobuf.Message)11 ProtobufSchema (io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema)11 ProduceResponse (io.confluent.kafkarest.entities.v2.ProduceResponse)11 TextNode (com.fasterxml.jackson.databind.node.TextNode)10 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)10 TestUtils.assertOKResponse (io.confluent.kafkarest.TestUtils.assertOKResponse)10 JsonSchema (io.confluent.kafka.schemaregistry.json.JsonSchema)9 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)9 ResultOrError (io.confluent.kafkarest.response.StreamingResponse.ResultOrError)6 RequestRateLimiter (io.confluent.kafkarest.ratelimit.RequestRateLimiter)4 ChunkedOutputFactory (io.confluent.kafkarest.response.ChunkedOutputFactory)4