Search in sources :

Example 31 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class StreamingResponseTest method testWriteToChunkedOutput.

@Test
public void testWriteToChunkedOutput() throws IOException {
    String key = "foo";
    String value = "bar";
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
    MappingIterator<ProduceRequest> requestsMappingIterator = mock(MappingIterator.class);
    expect(requestsMappingIterator.hasNext()).andReturn(true);
    expect(requestsMappingIterator.nextValue()).andReturn(request);
    expect(requestsMappingIterator.hasNext()).andReturn(false);
    requestsMappingIterator.close();
    replay(requestsMappingIterator);
    ChunkedOutputFactory mockedChunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = mock(ChunkedOutput.class);
    ProduceResponse produceResponse = ProduceResponse.builder().setClusterId("clusterId").setTopicName("topicName").setPartitionId(1).setOffset(1L).build();
    ResultOrError resultOrError = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutputFactory.getChunkedOutput()).andReturn(mockedChunkedOutput);
    mockedChunkedOutput.write(resultOrError);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.close();
    replay(mockedChunkedOutput, mockedChunkedOutputFactory);
    StreamingResponseFactory streamingResponseFactory = new StreamingResponseFactory(mockedChunkedOutputFactory);
    StreamingResponse<ProduceRequest> streamingResponse = streamingResponseFactory.from(requestsMappingIterator);
    CompletableFuture<ProduceResponse> produceResponseFuture = new CompletableFuture<>();
    produceResponseFuture.complete(produceResponse);
    FakeAsyncResponse response = new FakeAsyncResponse();
    streamingResponse.compose(result -> produceResponseFuture).resume(response);
    EasyMock.verify(mockedChunkedOutput);
    EasyMock.verify(mockedChunkedOutputFactory);
    EasyMock.verify(requestsMappingIterator);
}
Also used : ChunkedOutput(org.glassfish.jersey.server.ChunkedOutput) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) MappingIterator(com.fasterxml.jackson.databind.MappingIterator) EasyMock.mock(org.easymock.EasyMock.mock) IOException(java.io.IOException) CompletableFuture(java.util.concurrent.CompletableFuture) EasyMock.expect(org.easymock.EasyMock.expect) EasyMock(org.easymock.EasyMock) TextNode(com.fasterxml.jackson.databind.node.TextNode) Test(org.junit.jupiter.api.Test) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) ProduceRequestData(io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestData) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) EasyMock.replay(org.easymock.EasyMock.replay) RuntimeJsonMappingException(com.fasterxml.jackson.databind.RuntimeJsonMappingException) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) EmbeddedFormat(io.confluent.kafkarest.entities.EmbeddedFormat) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) CompletableFuture(java.util.concurrent.CompletableFuture) Test(org.junit.jupiter.api.Test)

Example 32 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class StreamingResponseTest method testGracePeriodExceededExceptionThrown.

@Test
public void testGracePeriodExceededExceptionThrown() throws IOException {
    String key = "foo";
    String value = "bar";
    ProduceRequest request = ProduceRequest.builder().setKey(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(key)).build()).setValue(ProduceRequestData.builder().setFormat(EmbeddedFormat.AVRO).setRawSchema("{\"type\": \"string\"}").setData(TextNode.valueOf(value)).build()).setOriginalSize(0L).build();
    MappingIterator<ProduceRequest> requests = mock(MappingIterator.class);
    expect(requests.hasNext()).andReturn(true);
    expect(requests.nextValue()).andReturn(request);
    expect(requests.hasNext()).andReturn(false);
    requests.close();
    replay(requests);
    ChunkedOutputFactory mockedChunkedOutputFactory = mock(ChunkedOutputFactory.class);
    ChunkedOutput<ResultOrError> mockedChunkedOutput = mock(ChunkedOutput.class);
    ProduceResponse produceResponse = ProduceResponse.builder().setClusterId("clusterId").setTopicName("topicName").setPartitionId(1).setOffset(1L).build();
    ResultOrError resultOrError = ResultOrError.result(produceResponse);
    expect(mockedChunkedOutputFactory.getChunkedOutput()).andReturn(mockedChunkedOutput);
    mockedChunkedOutput.write(resultOrError);
    expect(mockedChunkedOutput.isClosed()).andReturn(false);
    mockedChunkedOutput.close();
    replay(mockedChunkedOutputFactory);
    replay(mockedChunkedOutput);
    StreamingResponseFactory streamingResponseFactory = new StreamingResponseFactory(mockedChunkedOutputFactory);
    StreamingResponse<ProduceRequest> streamingResponse = streamingResponseFactory.from(requests);
    CompletableFuture<ProduceResponse> produceResponseFuture = new CompletableFuture<>();
    produceResponseFuture.complete(produceResponse);
    FakeAsyncResponse response = new FakeAsyncResponse();
    streamingResponse.compose(result -> produceResponseFuture).resume(response);
    EasyMock.verify(mockedChunkedOutput);
    EasyMock.verify(mockedChunkedOutputFactory);
    EasyMock.verify(requests);
}
Also used : ChunkedOutput(org.glassfish.jersey.server.ChunkedOutput) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) MappingIterator(com.fasterxml.jackson.databind.MappingIterator) EasyMock.mock(org.easymock.EasyMock.mock) IOException(java.io.IOException) CompletableFuture(java.util.concurrent.CompletableFuture) EasyMock.expect(org.easymock.EasyMock.expect) EasyMock(org.easymock.EasyMock) TextNode(com.fasterxml.jackson.databind.node.TextNode) Test(org.junit.jupiter.api.Test) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) ProduceRequestData(io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestData) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) EasyMock.replay(org.easymock.EasyMock.replay) RuntimeJsonMappingException(com.fasterxml.jackson.databind.RuntimeJsonMappingException) ErrorResponse(io.confluent.kafkarest.exceptions.v3.ErrorResponse) EmbeddedFormat(io.confluent.kafkarest.entities.EmbeddedFormat) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) ResultOrError(io.confluent.kafkarest.response.StreamingResponse.ResultOrError) CompletableFuture(java.util.concurrent.CompletableFuture) Test(org.junit.jupiter.api.Test)

Example 33 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class ProduceAction method produce.

private CompletableFuture<ProduceResponse> produce(String clusterId, String topicName, ProduceRequest request, ProduceController controller) {
    try {
        produceRateLimiters.rateLimit(clusterId, request.getOriginalSize());
    } catch (RateLimitExceededException e) {
        // KREST-4356 Use our own CompletionException that will avoid the costly stack trace fill.
        throw new StacklessCompletionException(e);
    }
    Instant requestInstant = Instant.now();
    Optional<RegisteredSchema> keySchema = request.getKey().flatMap(key -> getSchema(topicName, /* isKey= */
    true, key));
    Optional<EmbeddedFormat> keyFormat = keySchema.map(schema -> Optional.of(schema.getFormat())).orElse(request.getKey().flatMap(ProduceRequestData::getFormat));
    Optional<ByteString> serializedKey = serialize(topicName, keyFormat, keySchema, request.getKey(), /* isKey= */
    true);
    Optional<RegisteredSchema> valueSchema = request.getValue().flatMap(value -> getSchema(topicName, /* isKey= */
    false, value));
    Optional<EmbeddedFormat> valueFormat = valueSchema.map(schema -> Optional.of(schema.getFormat())).orElse(request.getValue().flatMap(ProduceRequestData::getFormat));
    Optional<ByteString> serializedValue = serialize(topicName, valueFormat, valueSchema, request.getValue(), /* isKey= */
    false);
    recordRequestMetrics(request.getOriginalSize());
    CompletableFuture<ProduceResult> produceResult = controller.produce(clusterId, topicName, request.getPartitionId(), request.getHeaders().stream().collect(PRODUCE_REQUEST_HEADER_COLLECTOR), serializedKey, serializedValue, request.getTimestamp().orElse(Instant.now()));
    return produceResult.handleAsync((result, error) -> {
        if (error != null) {
            long latency = Duration.between(requestInstant, Instant.now()).toMillis();
            recordErrorMetrics(latency);
            throw new StacklessCompletionException(error);
        }
        return result;
    }, executorService).thenApplyAsync(result -> {
        ProduceResponse response = toProduceResponse(clusterId, topicName, keyFormat, keySchema, valueFormat, valueSchema, result);
        long latency = Duration.between(requestInstant, result.getCompletionTimestamp()).toMillis();
        recordResponseMetrics(latency);
        return response;
    }, executorService);
}
Also used : PathParam(javax.ws.rs.PathParam) Provider(javax.inject.Provider) Produces(javax.ws.rs.Produces) ProduceRequest(io.confluent.kafkarest.entities.v3.ProduceRequest) SerializationException(org.apache.kafka.common.errors.SerializationException) MappingIterator(com.fasterxml.jackson.databind.MappingIterator) StacklessCompletionException(io.confluent.kafkarest.exceptions.StacklessCompletionException) Path(javax.ws.rs.Path) LoggerFactory(org.slf4j.LoggerFactory) CompletableFuture(java.util.concurrent.CompletableFuture) PerformanceMetric(io.confluent.rest.annotations.PerformanceMetric) ProduceRequestHeader(io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestHeader) Function(java.util.function.Function) Inject(javax.inject.Inject) NullNode(com.fasterxml.jackson.databind.node.NullNode) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) MediaType(javax.ws.rs.core.MediaType) Consumes(javax.ws.rs.Consumes) ProduceRequestData(io.confluent.kafkarest.entities.v3.ProduceRequest.ProduceRequestData) Objects.requireNonNull(java.util.Objects.requireNonNull) Duration(java.time.Duration) ProduceResponseData(io.confluent.kafkarest.entities.v3.ProduceResponse.ProduceResponseData) Collector(java.util.stream.Collector) ImmutableMultimap(com.google.common.collect.ImmutableMultimap) RegisteredSchema(io.confluent.kafkarest.entities.RegisteredSchema) BadRequestException(io.confluent.kafkarest.exceptions.BadRequestException) ProducerMetricsRegistry(io.confluent.kafkarest.ProducerMetricsRegistry) ExecutorService(java.util.concurrent.ExecutorService) EmbeddedFormat(io.confluent.kafkarest.entities.EmbeddedFormat) SchemaManager(io.confluent.kafkarest.controllers.SchemaManager) POST(javax.ws.rs.POST) Logger(org.slf4j.Logger) ProduceController(io.confluent.kafkarest.controllers.ProduceController) ProduceResult(io.confluent.kafkarest.entities.ProduceResult) AsyncResponse(javax.ws.rs.container.AsyncResponse) DoNotRateLimit(io.confluent.kafkarest.ratelimit.DoNotRateLimit) ResourceName(io.confluent.kafkarest.extension.ResourceAccesslistFeature.ResourceName) Instant(java.time.Instant) Suspended(javax.ws.rs.container.Suspended) ByteString(com.google.protobuf.ByteString) ProducerMetrics(io.confluent.kafkarest.ProducerMetrics) StreamingResponseFactory(io.confluent.kafkarest.response.StreamingResponseFactory) Errors(io.confluent.kafkarest.Errors) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) Optional(java.util.Optional) RecordSerializer(io.confluent.kafkarest.controllers.RecordSerializer) ProduceResponseThreadPool(io.confluent.kafkarest.resources.v3.V3ResourcesModule.ProduceResponseThreadPool) Collections(java.util.Collections) EmbeddedFormat(io.confluent.kafkarest.entities.EmbeddedFormat) ByteString(com.google.protobuf.ByteString) ProduceResponse(io.confluent.kafkarest.entities.v3.ProduceResponse) Instant(java.time.Instant) RateLimitExceededException(io.confluent.kafkarest.ratelimit.RateLimitExceededException) RegisteredSchema(io.confluent.kafkarest.entities.RegisteredSchema) ProduceResult(io.confluent.kafkarest.entities.ProduceResult) StacklessCompletionException(io.confluent.kafkarest.exceptions.StacklessCompletionException)

Example 34 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class AvroProducerTest method testProduceToPartition.

protected <K, V> void testProduceToPartition(List<SchemaPartitionProduceRecord> records, List<PartitionOffset> offsetResponse, Map<String, String> queryParams) {
    SchemaPartitionProduceRequest payload = SchemaPartitionProduceRequest.create(records, /* keySchema= */
    null, /* keySchemaId= */
    null, /* valueSchema= */
    valueSchemaStr, /* valueSchemaId= */
    null);
    Response response = request("/topics/" + topicName + "/partitions/0", queryParams).post(Entity.entity(payload, Versions.KAFKA_V2_JSON_AVRO));
    assertOKResponse(response, Versions.KAFKA_V2_JSON);
    final ProduceResponse poffsetResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
    assertEquals(offsetResponse, poffsetResponse.getOffsets());
    TestUtils.assertTopicContains(plaintextBrokerList, topicName, payload.toProduceRequest().getRecords(), 0, KafkaAvroDeserializer.class.getName(), KafkaAvroDeserializer.class.getName(), deserializerProps, false);
    assertEquals((Integer) 1, poffsetResponse.getValueSchemaId());
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) Response(javax.ws.rs.core.Response) SchemaPartitionProduceRequest(io.confluent.kafkarest.entities.v2.SchemaPartitionProduceRequest) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer)

Example 35 with ProduceResponse

use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.

the class AbstractProducerTest method testProduceToTopic.

protected <K, V> void testProduceToTopic(String topicName, TopicRequestT request, String keyDeserializerClassName, String valueDeserializerClassName, List<PartitionOffset> offsetResponses, boolean matchPartitions, Map<String, String> queryParams, List<ProduceRecord<K, V>> expected) {
    Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(request, getEmbeddedContentType()));
    assertOKResponse(response, Versions.KAFKA_V2_JSON);
    final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
    if (matchPartitions) {
        TestUtils.assertPartitionsEqual(offsetResponses, produceResponse.getOffsets());
    }
    TestUtils.assertPartitionOffsetsEqual(offsetResponses, produceResponse.getOffsets());
    TestUtils.assertTopicContains(plaintextBrokerList, topicName, expected, null, keyDeserializerClassName, valueDeserializerClassName, true);
}
Also used : ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse) TestUtils.assertOKResponse(io.confluent.kafkarest.TestUtils.assertOKResponse) Response(javax.ws.rs.core.Response) ProduceResponse(io.confluent.kafkarest.entities.v2.ProduceResponse)

Aggregations

Response (javax.ws.rs.core.Response)61 Test (org.junit.jupiter.api.Test)58 ProduceRequest (io.confluent.kafkarest.entities.v3.ProduceRequest)57 ProduceResponse (io.confluent.kafkarest.entities.v3.ProduceResponse)57 ErrorResponse (io.confluent.kafkarest.exceptions.v3.ErrorResponse)54 ByteString (com.google.protobuf.ByteString)50 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)18 SchemaKey (io.confluent.kafkarest.testing.SchemaRegistryFixture.SchemaKey)17 DynamicMessage (com.google.protobuf.DynamicMessage)11 Message (com.google.protobuf.Message)11 ProtobufSchema (io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema)11 ProduceResponse (io.confluent.kafkarest.entities.v2.ProduceResponse)11 TextNode (com.fasterxml.jackson.databind.node.TextNode)10 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)10 TestUtils.assertOKResponse (io.confluent.kafkarest.TestUtils.assertOKResponse)10 JsonSchema (io.confluent.kafka.schemaregistry.json.JsonSchema)9 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)9 ResultOrError (io.confluent.kafkarest.response.StreamingResponse.ResultOrError)6 RequestRateLimiter (io.confluent.kafkarest.ratelimit.RequestRateLimiter)4 ChunkedOutputFactory (io.confluent.kafkarest.response.ChunkedOutputFactory)4