Search in sources :

Example 1 with RateLimitExceededException

use of io.confluent.kafkarest.ratelimit.RateLimitExceededException in project kafka-rest by confluentinc.

the class ProduceRateLimitersTest method rateLimitedOnCountExceptionThrown.

@Test
@Inject
public void rateLimitedOnCountExceptionThrown() {
    Properties properties = new Properties();
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, Integer.toString(3600000));
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    EasyMock.expectLastCall().andThrow(new RateLimitExceededException());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceRateLimiters produceRateLimiters = new ProduceRateLimiters(countLimitProvider, bytesLimitProvider, Boolean.parseBoolean(properties.getProperty(PRODUCE_RATE_LIMIT_ENABLED)), Duration.ofMillis(Integer.parseInt(properties.getProperty(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS))));
    produceRateLimiters.rateLimit("clusterId", 10L);
    RateLimitExceededException e = assertThrows(RateLimitExceededException.class, () -> produceRateLimiters.rateLimit("clusterId", 10L));
    assertEquals("The rate limit of requests per second has been exceeded.", e.getMessage());
    verify(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) ProduceRateLimiters(io.confluent.kafkarest.resources.v3.ProduceRateLimiters) Properties(java.util.Properties) Inject(javax.inject.Inject) Test(org.junit.jupiter.api.Test)

Example 2 with RateLimitExceededException

use of io.confluent.kafkarest.ratelimit.RateLimitExceededException in project kafka-rest by confluentinc.

the class ProduceActionTest method produceWithCountLimit.

@Test
public void produceWithCountLimit() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    EasyMock.expectLastCall().andThrow(new RateLimitExceededException());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 1, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS);
    // expected results
    ProduceResponse produceResponse = getProduceResponse(0);
    ResultOrError resultOrErrorOK = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful first produce
    mockedChunkedOutput.write(resultOrErrorOK);
    mockedChunkedOutput.close();
    ErrorResponse err = ErrorResponse.create(429, "Request rate limit exceeded: The rate limit of requests per second has been exceeded.");
    ResultOrError resultOrErrorFail = ResultOrError.error(err);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // failing second produce
    mockedChunkedOutput.write(resultOrErrorFail);
    // error close
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    FakeAsyncResponse fakeAsyncResponse2 = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse2, "clusterId", "topicName", requests);
    // check results
    verify(requests, mockedChunkedOutput, countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) Properties(java.util.Properties) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) Test(org.junit.jupiter.api.Test)

Example 3 with RateLimitExceededException

use of io.confluent.kafkarest.ratelimit.RateLimitExceededException in project kafka-rest by confluentinc.

the class ProduceAction method produce.

private CompletableFuture<ProduceResponse> produce(String clusterId, String topicName, ProduceRequest request, ProduceController controller) {
    try {
        produceRateLimiters.rateLimit(clusterId, request.getOriginalSize());
    } catch (RateLimitExceededException e) {
        // KREST-4356 Use our own CompletionException that will avoid the costly stack trace fill.
        throw new StacklessCompletionException(e);
    }
    Instant requestInstant = Instant.now();
    Optional<RegisteredSchema> keySchema = request.getKey().flatMap(key -> getSchema(topicName, /* isKey= */
    true, key));
    Optional<EmbeddedFormat> keyFormat = keySchema.map(schema -> Optional.of(schema.getFormat())).orElse(request.getKey().flatMap(ProduceRequestData::getFormat));
    Optional<ByteString> serializedKey = serialize(topicName, keyFormat, keySchema, request.getKey(), /* isKey= */
    true);
    Optional<RegisteredSchema> valueSchema = request.getValue().flatMap(value -> getSchema(topicName, /* isKey= */
    false, value));
    Optional<EmbeddedFormat> valueFormat = valueSchema.map(schema -> Optional.of(schema.getFormat())).orElse(request.getValue().flatMap(ProduceRequestData::getFormat));
    Optional<ByteString> serializedValue = serialize(topicName, valueFormat, valueSchema, request.getValue(), /* isKey= */
    false);
    recordRequestMetrics(request.getOriginalSize());
    CompletableFuture<ProduceResult> produceResult = controller.produce(clusterId, topicName, request.getPartitionId(), request.getHeaders().stream().collect(PRODUCE_REQUEST_HEADER_COLLECTOR), serializedKey, serializedValue, request.getTimestamp().orElse(Instant.now()));
    return produceResult.handleAsync((result, error) -> {
        if (error != null) {
            long latency = Duration.between(requestInstant, Instant.now()).toMillis();
            recordErrorMetrics(latency);
            throw new StacklessCompletionException(error);
        }
        return result;
    }, executorService).thenApplyAsync(result -> {
        ProduceResponse response = toProduceResponse(clusterId, topicName, keyFormat, keySchema, valueFormat, valueSchema, result);
        long latency = Duration.between(requestInstant, result.getCompletionTimestamp()).toMillis();
        recordResponseMetrics(latency);
        return response;
    }, executorService);
}
Also used : PathParam(javax.ws.rs.PathParam) Provider(javax.inject.Provider) Produces(javax.ws.rs.Produces) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) SerializationException(org.apache.kafka.common.errors.SerializationException) MappingIterator(com.fasterxml.jackson.databind.MappingIterator) StacklessCompletionException(io.confluent.kafkarest.exceptions.StacklessCompletionException) Path(javax.ws.rs.Path) LoggerFactory(org.slf4j.LoggerFactory) CompletableFuture(java.util.concurrent.CompletableFuture) PerformanceMetric(io.confluent.rest.annotations.PerformanceMetric) ProduceRequestHeader(io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestHeader) Function(java.util.function.Function) Inject(javax.inject.Inject) NullNode(com.fasterxml.jackson.databind.node.NullNode) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) MediaType(javax.ws.rs.core.MediaType) Consumes(javax.ws.rs.Consumes) ProduceRequestData(io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestData) Objects.requireNonNull(java.util.Objects.requireNonNull) Duration(java.time.Duration) ProduceResponseData(io.confluent.kafkarest.entities.v3.ProduceResponse.ProduceResponseData) Collector(java.util.stream.Collector) ImmutableMultimap(com.google.common.collect.ImmutableMultimap) RegisteredSchema(io.confluent.kafkarest.entities.RegisteredSchema) BadRequestException(io.confluent.kafkarest.exceptions.BadRequestException) ProducerMetricsRegistry(io.confluent.kafkarest.ProducerMetricsRegistry) ExecutorService(java.util.concurrent.ExecutorService) EmbeddedFormat(io.confluent.kafkarest.entities.EmbeddedFormat) SchemaManager(io.confluent.kafkarest.controllers.SchemaManager) POST(javax.ws.rs.POST) Logger(org.slf4j.Logger) ProduceController(io.confluent.kafkarest.controllers.ProduceController) ProduceResult(io.confluent.kafkarest.entities.ProduceResult) AsyncResponse(javax.ws.rs.container.AsyncResponse) DoNotRateLimit(io.confluent.kafkarest.ratelimit.DoNotRateLimit) ResourceName(io.confluent.kafkarest.extension.ResourceAccesslistFeature.ResourceName) Instant(java.time.Instant) Suspended(javax.ws.rs.container.Suspended) ByteString(com.google.protobuf.ByteString) ProducerMetrics(io.confluent.kafkarest.ProducerMetrics) StreamingResponseFactory(io.confluent.kafkarest.response.StreamingResponseFactory) Errors(io.confluent.kafkarest.Errors) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) Optional(java.util.Optional) RecordSerializer(io.confluent.kafkarest.controllers.RecordSerializer) ProduceResponseThreadPool(io.confluent.kafkarest.resources.v3.V3ResourcesModule.ProduceResponseThreadPool) Collections(java.util.Collections) EmbeddedFormat(io.confluent.kafkarest.entities.EmbeddedFormat) ByteString(com.google.protobuf.ByteString) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) Instant(java.time.Instant) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) RegisteredSchema(io.confluent.kafkarest.entities.RegisteredSchema) ProduceResult(io.confluent.kafkarest.entities.ProduceResult) StacklessCompletionException(io.confluent.kafkarest.exceptions.StacklessCompletionException)

Example 4 with RateLimitExceededException

use of io.confluent.kafkarest.ratelimit.RateLimitExceededException in project kafka-rest by confluentinc.

the class ProduceRateLimitersTest method rateLimitedOnBytesExceptionThrown.

@Test
@Inject
public void rateLimitedOnBytesExceptionThrown() {
    Properties properties = new Properties();
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, Integer.toString(3600000));
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    EasyMock.expectLastCall().andThrow(new RateLimitExceededException());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceRateLimiters produceRateLimiters = new ProduceRateLimiters(countLimitProvider, bytesLimitProvider, Boolean.parseBoolean(properties.getProperty(PRODUCE_RATE_LIMIT_ENABLED)), Duration.ofMillis(Integer.parseInt(properties.getProperty(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS))));
    produceRateLimiters.rateLimit("clusterId", 10L);
    RateLimitExceededException e = assertThrows(RateLimitExceededException.class, () -> produceRateLimiters.rateLimit("clusterId", 10L));
    assertEquals("The rate limit of requests per second has been exceeded.", e.getMessage());
    verify(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) ProduceRateLimiters(io.confluent.kafkarest.resources.v3.ProduceRateLimiters) Properties(java.util.Properties) Inject(javax.inject.Inject) Test(org.junit.jupiter.api.Test)

Example 5 with RateLimitExceededException

use of io.confluent.kafkarest.ratelimit.RateLimitExceededException in project kafka-rest by confluentinc.

the class ProduceActionTest method produceWithByteLimit.

@Test
public void produceWithByteLimit() throws Exception {
    // config
    final int TOTAL_NUMBER_OF_PRODUCE_CALLS = 2;
    Properties properties = new Properties();
    properties.put(PRODUCE_MAX_REQUESTS_PER_SECOND, "100");
    properties.put(PRODUCE_MAX_BYTES_PER_SECOND, // first record is 25 bytes long
    Integer.toString(30));
    properties.put(PRODUCE_RATE_LIMIT_CACHE_EXPIRY_MS, "3600000");
    properties.put(PRODUCE_RATE_LIMIT_ENABLED, "true");
    // setup
    ChunkedOutputFactory chunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = getChunkedOutput(chunkedOutputFactory, TOTAL_NUMBER_OF_PRODUCE_CALLS);
    Provider<RequestRateLimiter> countLimitProvider = mock(Provider.class);
    Provider<RequestRateLimiter> bytesLimitProvider = mock(Provider.class);
    RequestRateLimiter rateLimiterForCount = mock(RequestRateLimiter.class);
    RequestRateLimiter rateLimiterForBytes = mock(RequestRateLimiter.class);
    expect(countLimitProvider.get()).andReturn(rateLimiterForCount);
    expect(bytesLimitProvider.get()).andReturn(rateLimiterForBytes);
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    rateLimiterForCount.rateLimit(anyInt());
    rateLimiterForBytes.rateLimit(anyInt());
    EasyMock.expectLastCall().andThrow(new RateLimitExceededException());
    replay(countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
    ProduceAction produceAction = getProduceAction(properties, chunkedOutputFactory, 1, countLimitProvider, bytesLimitProvider);
    MappingIterator<ProduceRequest> requests = getProduceRequestsMappingIterator(TOTAL_NUMBER_OF_PRODUCE_CALLS);
    // expected results
    ProduceResponse produceResponse = getProduceResponse(0);
    ResultOrError resultOrErrorOK = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // successful first produce
    mockedChunkedOutput.write(resultOrErrorOK);
    mockedChunkedOutput.close();
    ErrorResponse err = ErrorResponse.create(429, "Request rate limit exceeded: The rate limit of requests per second has been exceeded.");
    ResultOrError resultOrErrorFail = ResultOrError.error(err);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    // failing second produce
    mockedChunkedOutput.write(resultOrErrorFail);
    // error close
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, chunkedOutputFactory);
    // run test
    FakeAsyncResponse fakeAsyncResponse = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse, "clusterId", "topicName", requests);
    FakeAsyncResponse fakeAsyncResponse2 = new FakeAsyncResponse();
    produceAction.produce(fakeAsyncResponse2, "clusterId", "topicName", requests);
    // check results
    // check results
    verify(requests, mockedChunkedOutput, countLimitProvider, bytesLimitProvider, rateLimiterForCount, rateLimiterForBytes);
}
Also used : ChunkedOutputFactory(io.confluent.kafkarest.response.ChunkedOutputFactory) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) FakeAsyncResponse(io.confluent.kafkarest.response.FakeAsyncResponse) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) Properties(java.util.Properties) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) RequestRateLimiter(io.confluent.kafkarest.ratelimit.RequestRateLimiter) Test(org.junit.jupiter.api.Test)

Aggregations

RateLimitExceededException (io.confluent.kafkarest.ratelimit.RateLimitExceededException)5 RequestRateLimiter (io.confluent.kafkarest.ratelimit.RequestRateLimiter)4 Properties (java.util.Properties)4 Test (org.junit.jupiter.api.Test)4 ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)3 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)3 Inject (javax.inject.Inject)3 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)2 ProduceRateLimiters (io.confluent.kafkarest.resources.v3.ProduceRateLimiters)2 ChunkedOutputFactory (io.confluent.kafkarest.response.ChunkedOutputFactory)2 FakeAsyncResponse (io.confluent.kafkarest.response.FakeAsyncResponse)2 ResultOrError (io.confluent.kafkarest.response.StreamingResponse.ResultOrError)2 MappingIterator (com.fasterxml.jackson.databind.MappingIterator)1 NullNode (com.fasterxml.jackson.databind.node.NullNode)1 ImmutableMultimap (com.google.common.collect.ImmutableMultimap)1 ByteString (com.google.protobuf.ByteString)1 Errors (io.confluent.kafkarest.Errors)1 ProducerMetrics (io.confluent.kafkarest.ProducerMetrics)1 ProducerMetricsRegistry (io.confluent.kafkarest.ProducerMetricsRegistry)1 ProduceController (io.confluent.kafkarest.controllers.ProduceController)1