use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class AbstractProducerTest method testProduceToAuthorizationError.
protected void testProduceToAuthorizationError(String topicName, TopicRequestT request, String contentType, Map<String, String> queryParams) {
Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(request, contentType));
assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus());
final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
for (PartitionOffset pOffset : produceResponse.getOffsets()) {
assertEquals(Errors.KAFKA_AUTHORIZATION_ERROR_CODE, (int) pOffset.getErrorCode());
}
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class AbstractProducerTest method testProduceToTopicFails.
protected void testProduceToTopicFails(String topicName, TopicRequestT request, Map<String, String> queryParams) {
Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(request, Versions.KAFKA_V2_JSON_BINARY));
assertOKResponse(response, Versions.KAFKA_V2_JSON);
final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
for (PartitionOffset pOffset : produceResponse.getOffsets()) {
assertNotNull(pOffset.getError());
}
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class AbstractProducerTest method testProduceToPartition.
protected <K, V> void testProduceToPartition(String topicName, int partition, PartitionRequestT request, String keySerializerClassName, String valueSerializerClassName, List<PartitionOffset> offsetResponse, Map<String, String> queryParams, List<ProduceRecord<K, V>> expected) {
Response response = request("/topics/" + topicName + "/partitions/0", queryParams).post(Entity.entity(request, getEmbeddedContentType()));
assertOKResponse(response, Versions.KAFKA_V2_JSON);
final ProduceResponse poffsetResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
assertEquals(offsetResponse, poffsetResponse.getOffsets());
TestUtils.assertTopicContains(plaintextBrokerList, topicName, expected, partition, keySerializerClassName, valueSerializerClassName, true);
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class AvroProducerTest method testProduceToTopic.
protected <K, V> void testProduceToTopic(List<SchemaTopicProduceRecord> records, List<PartitionOffset> offsetResponses, Map<String, String> queryParams) {
SchemaTopicProduceRequest payload = SchemaTopicProduceRequest.create(records, keySchemaStr, /* keySchemaId= */
null, valueSchemaStr, /* valueSchemaId= */
null);
Response response = request("/topics/" + topicName, queryParams).post(Entity.entity(payload, Versions.KAFKA_V2_JSON_AVRO));
assertOKResponse(response, Versions.KAFKA_V2_JSON);
final ProduceResponse produceResponse = TestUtils.tryReadEntityOrLog(response, ProduceResponse.class);
TestUtils.assertPartitionOffsetsEqual(offsetResponses, produceResponse.getOffsets());
TestUtils.assertTopicContains(plaintextBrokerList, topicName, payload.toProduceRequest().getRecords(), null, KafkaAvroDeserializer.class.getName(), KafkaAvroDeserializer.class.getName(), deserializerProps, false);
assertEquals(produceResponse.getKeySchemaId(), (Integer) 1);
assertEquals(produceResponse.getValueSchemaId(), (Integer) 2);
}
use of io.confluent.kafkarest.entities.v2.ProduceResponse in project kafka-rest by confluentinc.
the class SchemaProduceConsumeTest method produceThenConsume_returnsExactlyProduced.
@Test
public void produceThenConsume_returnsExactlyProduced() throws Exception {
testEnv.kafkaCluster().createTopic(TOPIC, /* numPartitions= */
1, /* replicationFactor= */
(short) 3);
Response createConsumerInstanceResponse = testEnv.kafkaRest().target().path(String.format("/consumers/%s", CONSUMER_GROUP)).request().post(Entity.entity(new CreateConsumerInstanceRequest(/* id= */
null, /* name= */
null, getFormat() != null ? getFormat().name() : null, /* autoOffsetReset= */
null, /* autoCommitEnable= */
null, /* responseMinBytes= */
null, /* requestWaitMs= */
null), Versions.KAFKA_V2_JSON));
assertEquals(Status.OK.getStatusCode(), createConsumerInstanceResponse.getStatus());
CreateConsumerInstanceResponse createConsumerInstance = createConsumerInstanceResponse.readEntity(CreateConsumerInstanceResponse.class);
Response subscribeResponse = testEnv.kafkaRest().target().path(String.format("/consumers/%s/instances/%s/subscription", CONSUMER_GROUP, createConsumerInstance.getInstanceId())).request().post(Entity.entity(new ConsumerSubscriptionRecord(singletonList(TOPIC), null), Versions.KAFKA_V2_JSON));
assertEquals(Status.NO_CONTENT.getStatusCode(), subscribeResponse.getStatus());
// Needs to consume empty once before producing.
testEnv.kafkaRest().target().path(String.format("/consumers/%s/instances/%s/records", CONSUMER_GROUP, createConsumerInstance.getInstanceId())).request().accept(getContentType()).get();
SchemaTopicProduceRequest produceRequest = new SchemaTopicProduceRequest(getProduceRecords(), getKeySchema().canonicalString(), null, getValueSchema().canonicalString(), null);
ProduceResponse produceResponse = testEnv.kafkaRest().target().path(String.format("/topics/%s", TOPIC)).request().post(Entity.entity(produceRequest, getContentType())).readEntity(ProduceResponse.class);
assertEquals(Status.OK, produceResponse.getRequestStatus());
Response readRecordsResponse = testEnv.kafkaRest().target().path(String.format("/consumers/%s/instances/%s/records", CONSUMER_GROUP, createConsumerInstance.getInstanceId())).request().accept(getContentType()).get();
assertEquals(Status.OK.getStatusCode(), readRecordsResponse.getStatus());
List<SchemaConsumerRecord> readRecords = readRecordsResponse.readEntity(new GenericType<List<SchemaConsumerRecord>>() {
});
assertMapEquals(producedToMap(getProduceRecords()), consumedToMap(readRecords));
}
Aggregations