Search in sources :

Example 26 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceBinaryWithNullData.

@Test
public void produceBinaryWithNullData() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(NullNode.getInstance()).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.BINARY).setData(NullNode.getInstance()).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<byte[], byte[]> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), new ByteArrayDeserializer(), new ByteArrayDeserializer());
    assertNull(produced.key());
    assertNull(produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ByteString(com.google.protobuf.ByteString) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) Test(org.junit.jupiter.api.Test)

Example 27 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceJsonschemaWithRawSchema.

@Test
public void produceJsonschemaWithRawSchema() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    TextNode key = TextNode.valueOf("foo");
    TextNode value = TextNode.valueOf("bar");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.JSONSCHEMA).setRawSchema("{\"type\": \"string\"}").setData(key).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.JSONSCHEMA).setRawSchema("{\"type\": \"string\"}").setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Object, Object> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createJsonSchemaDeserializer(), testEnv.schemaRegistry().createJsonSchemaDeserializer());
    assertEquals(key, produced.key());
    assertEquals(value, produced.value());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) TextNode(com.fasterxml.jackson.databind.node.TextNode) ByteString(com.google.protobuf.ByteString) Test(org.junit.jupiter.api.Test)

Example 28 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionIntegrationTest method produceProtobufWithSchemaId.

@Test
public void produceProtobufWithSchemaId() throws Exception {
    String clusterId = testEnv.kafkaCluster().getClusterId();
    ProtobufSchema keySchema = new ProtobufSchema("syntax = \"proto3\"; message MyKey { string foo = 1; }");
    SchemaKey keySchemaKey = testEnv.schemaRegistry().createSchema(DEFAULT_KEY_SUBJECT, keySchema);
    ProtobufSchema valueSchema = new ProtobufSchema("syntax = \"proto3\"; message MyValue { string bar = 1; }");
    SchemaKey valueSchemaKey = testEnv.schemaRegistry().createSchema(DEFAULT_VALUE_SUBJECT, valueSchema);
    ObjectNode key = new ObjectNode(JsonNodeFactory.instance);
    key.put("foo", "foz");
    ObjectNode value = new ObjectNode(JsonNodeFactory.instance);
    value.put("bar", "baz");
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setSchemaId(keySchemaKey.getSchemaId()).setData(key).build()).setValue(ProduceRequestData.builder().setSchemaId(valueSchemaKey.getSchemaId()).setData(value).build()).setOriginalSize(0L).build();
    Response response = testEnv.kafkaRest().target().path("/v3/clusters/" + clusterId + "/topics/" + TOPIC_NAME + "/records").request().accept(MediaType.APPLICATION_JSON).post(Entity.entity(request, MediaType.APPLICATION_JSON));
    assertEquals(Status.OK.getStatusCode(), response.getStatus());
    ProduceResponse actual = readProduceResponse(response);
    ConsumerRecord<Message, Message> produced = testEnv.kafkaCluster().getRecord(TOPIC_NAME, actual.getPartitionId(), actual.getOffset(), testEnv.schemaRegistry().createProtobufDeserializer(), testEnv.schemaRegistry().createProtobufDeserializer());
    DynamicMessage.Builder expectedKey = DynamicMessage.newBuilder(keySchema.toDescriptor());
    expectedKey.setField(keySchema.toDescriptor().findFieldByName("foo"), "foz");
    DynamicMessage.Builder expectedValue = DynamicMessage.newBuilder(valueSchema.toDescriptor());
    expectedValue.setField(valueSchema.toDescriptor().findFieldByName("bar"), "baz");
    assertEquals(expectedKey.build().toByteString(), produced.key().toByteString());
    assertEquals(expectedValue.build().toByteString(), produced.value().toByteString());
}
Also used : Response(javax.ws.rs.core.Response) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) DynamicMessage(com.google.protobuf.DynamicMessage) Message(com.google.protobuf.Message) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ProtobufSchema(io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema) ByteString(com.google.protobuf.ByteString) DynamicMessage(com.google.protobuf.DynamicMessage) SchemaKey(io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey) Test(org.junit.jupiter.api.Test)

Example 29 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionTest method produceNoLimit.

@Test
public void produceNoLimit() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "falsse");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 2, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS);
    // expected results
    ProduceResponse produceResponse = getProduceResponse(0);
    ResultOrError resultOrErrorOK1 = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful first produce
    mockedChunkedOutput.write(resultOrErrorOK1);
    mockedChunkedOutput.close();
    ProduceResponse produceResponse2 = getProduceResponse(1);
    ResultOrError resultOrErrorOK2 = ResultOrError.result(produceResponse2);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful second produce
    mockedChunkedOutput.write(resultOrErrorOK2);
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    FakeAsyncResponse fakeAsyncResponse2 = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse2, "clusterId", "topicName", requests);
    // check results
    verify(requests, mockedChunkedOutput, countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) Properties(java.util.Properties) Test(org.junit.jupiter.api.Test)

Example 30 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceActionTest method produceWithCountLimit.

@Test
public void produceWithCountLimit() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    EasyMock.expectLastCall().andThrow(new RateLimitExceededException());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 1, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS);
    // expected results
    ProduceResponse produceResponse = getProduceResponse(0);
    ResultOrError resultOrErrorOK = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful first produce
    mockedChunkedOutput.write(resultOrErrorOK);
    mockedChunkedOutput.close();
    ErrorResponse err = ErrorResponse.create(429, "Request rate limit exceeded: The rate limit of requests per second has been exceeded.");
    ResultOrError resultOrErrorFail = ResultOrError.error(err);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // failing second produce
    mockedChunkedOutput.write(resultOrErrorFail);
    // error close
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    FakeAsyncResponse fakeAsyncResponse2 = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse2, "clusterId", "topicName", requests);
    // check results
    verify(requests, mockedChunkedOutput, countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) Properties(java.util.Properties) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) Test(org.junit.jupiter.api.Test)

Aggregations

Response (javax.ws.rs.core.Response)61 Test (org.junit.jupiter.api.Test)58 ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)57 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)57 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)54 ByteString (com.google.protobuf.ByteString)50 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)18 SchemaKey (io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey)17 DynamicMessage (com.google.protobuf.DynamicMessage)11 Message (com.google.protobuf.Message)11 ProtobufSchema (io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema)11 ProduceResponse (io.confluent.kafkarest.entities.v2.ProduceResponse)11 TextNode (com.fasterxml.jackson.databind.node.TextNode)10 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)10 TestUtils.assertOKResponse (io.confluent.kafkarest.TestUtils.assertOKResponse)10 JsonSchema (io.confluent.kafka.schemaregistry.json.JsonSchema)9 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)9 ResultOrError (io.confluent.kafkarest.response.StreamingResponse.ResultOrError)6 RequestRateLimiter (io.confluent.kafkarest.ratelimit.RequestRateLimiter)4 ChunkedOutputFactory (io.confluent.kafkarest.response.ChunkedOutputFactory)4