Search in sources :

Example 1 with ChunkedOutputFactory

use of io.confluent.kafkarest.response.ChunkedOutputFactory in project kafka-rest by confluentinc.

the class ProduceActionTest method produceNoLimit.

@Test
public void produceNoLimit() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "falsse");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 2, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS);
    // expected results
    ProduceResponse produceResponse = getProduceResponse(0);
    ResultOrError resultOrErrorOK1 = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful first produce
    mockedChunkedOutput.write(resultOrErrorOK1);
    mockedChunkedOutput.close();
    ProduceResponse produceResponse2 = getProduceResponse(1);
    ResultOrError resultOrErrorOK2 = ResultOrError.result(produceResponse2);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful second produce
    mockedChunkedOutput.write(resultOrErrorOK2);
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    FakeAsyncResponse fakeAsyncResponse2 = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse2, "clusterId", "topicName", requests);
    // check results
    verify(requests, mockedChunkedOutput, countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) Properties(java.util.Properties) Test(org.junit.jupiter.api.Test)

Example 2 with ChunkedOutputFactory

use of io.confluent.kafkarest.response.ChunkedOutputFactory in project kafka-rest by confluentinc.

the class ProduceActionTest method produceWithCountLimit.

@Test
public void produceWithCountLimit() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    EasyMock.expectLastCall().andThrow(new RateLimitExceededException());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 1, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS);
    // expected results
    ProduceResponse produceResponse = getProduceResponse(0);
    ResultOrError resultOrErrorOK = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful first produce
    mockedChunkedOutput.write(resultOrErrorOK);
    mockedChunkedOutput.close();
    ErrorResponse err = ErrorResponse.create(429, "Request rate limit exceeded: The rate limit of requests per second has been exceeded.");
    ResultOrError resultOrErrorFail = ResultOrError.error(err);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // failing second produce
    mockedChunkedOutput.write(resultOrErrorFail);
    // error close
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    FakeAsyncResponse fakeAsyncResponse2 = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse2, "clusterId", "topicName", requests);
    // check results
    verify(requests, mockedChunkedOutput, countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) Properties(java.util.Properties) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) Test(org.junit.jupiter.api.Test)

Example 3 with ChunkedOutputFactory

use of io.confluent.kafkarest.response.ChunkedOutputFactory in project kafka-rest by confluentinc.

the class ProduceActionTest method streamingRequests.

@Test
public void streamingRequests() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS_PROD1 = 1;
    final int TOTAL_NUMBER_OF_STREAMING_CALLS = 4;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, Integer.toString(10000));
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, Integer.toString(999999999));
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, Integer.toString(3600000));
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS_PROD1);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction1 = getProduceAction(properties, chunkedOutputFactory, TOTAL_NUMBER_OF_STREAMING_CALLS, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getStreamingProduceRequestsMappingIterator(4);
    // expected results
    ProduceResponse produceResponse1 = getProduceResponse(0);
    ResultOrError resultOrErrorOKProd1 = ResultOrError.result(produceResponse1);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.write(resultOrErrorOKProd1);
    ProduceResponse produceResponse2 = getProduceResponse(1);
    ResultOrError resultOrErrorOKProd2 = ResultOrError.result(produceResponse2);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.write(resultOrErrorOKProd2);
    ProduceResponse produceResponse3 = getProduceResponse(2);
    ResultOrError resultOrErrorOKProd3 = ResultOrError.result(produceResponse3);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.write(resultOrErrorOKProd3);
    ProduceResponse produceResponse4 = getProduceResponse(3);
    ResultOrError resultOrErrorOKProd4 = ResultOrError.result(produceResponse4);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.write(resultOrErrorOKProd4);
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse1 = new FakeAsyncResponse();
    produceAction1.produce(fakeAsyncResponse1, "clusterId", "topicName", requests);
    // check results
    EasyMock.verify(requests);
    EasyMock.verify(mockedChunkedOutput);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) Properties(java.util.Properties) Test(org.junit.jupiter.api.Test)

Example 4 with ChunkedOutputFactory

use of io.confluent.kafkarest.response.ChunkedOutputFactory in project kafka-rest by confluentinc.

the class ProduceActionTest method produceNoSchemaRegistryDefined.

@Test
public void produceNoSchemaRegistryDefined() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 1;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, Integer.toString(999999999));
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(SCHEMA_REGISTRY_URL_CONFIG, "");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 1, countLimitProvider, bytesLimitProvider, true);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIteratorWithSchemaNeeded();
    // expected results
    ErrorResponse err = ErrorResponse.create(422, "Error: 42206 : Payload error. Schema Registry must be configured when using schemas.");
    ResultOrError resultOrErrorFail = ResultOrError.error(err);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.write(resultOrErrorFail);
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    // check results
    EasyMock.verify(requests);
    EasyMock.verify(mockedChunkedOutput);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) Properties(java.util.Properties) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) Test(org.junit.jupiter.api.Test)

Example 5 with ChunkedOutputFactory

use of io.confluent.kafkarest.response.ChunkedOutputFactory in project kafka-rest by confluentinc.

the class ProduceActionTest method testHasNextOnNullData.

@Test
public void testHasNextOnNullData() throws Exception {
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 1, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = null;
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    RestConstraintViolationException e = assertThrows(RestConstraintViolationException.class, () -> produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests));
    assertEquals("Payload error. Null input provided. Data is required.", e.getMessage());
    assertEquals(42206, e.getErrorCode());
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) RestConstraintViolationException(io.confluent.rest.exceptions.RestConstraintViolationException) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) Properties(java.util.Properties) Test(org.junit.jupiter.api.Test)

Aggregations

ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)6 RequestRateLimiter (io.confluent.kafkarest.ratelimit.RequestRateLimiter)6 ChunkedOutputFactory (io.confluent.kafkarest.response.ChunkedOutputFactory)6 FakeAsyncResponse (io.confluent.kafkarest.response.FakeAsyncResponse)6 Properties (java.util.Properties)6 Test (org.junit.jupiter.api.Test)6 ResultOrError (io.confluent.kafkarest.response.StreamingResponse.ResultOrError)5 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)4 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)3 RateLimitExceededException (io.confluent.kafkarest.ratelimit.RateLimitExceededException)2 RestConstraintViolationException (io.confluent.rest.exceptions.RestConstraintViolationException)1