use of io.confluent.kafkarest.response.StreamingResponseFactory in project kafka-rest by confluentinc.
the class ProduceActionTest method getProduceAction.
private static ProduceAction getProduceAction(ProduceRateLimiters produceRateLimiters, ChunkedOutputFactory chunkedOutputFactory, int times, int producerId, boolean errorSchemaRegistry) {
Provider<SchemaManager> schemaManagerProvider = mock(Provider.class);
SchemaManager schemaManagerMock = mock(SchemaManager.class);
expect(schemaManagerProvider.get()).andReturn(schemaManagerMock);
expect(schemaManagerMock.getSchema("topicName", Optional.of(EmbeddedFormat.AVRO), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of("bob"), true)).andThrow(Errors.invalidPayloadException("Schema Registry must be configured when using schemas."));
replay(schemaManagerProvider, schemaManagerMock);
Provider<ProducerMetrics> producerMetricsProvider = mock(Provider.class);
getProducerMetricsProvider(producerMetricsProvider);
Provider<RecordSerializer> recordSerializerProvider = getRecordSerializerProvider(errorSchemaRegistry);
Provider<ProduceController> produceControllerProvider = mock(Provider.class);
ProduceController produceController = getProduceControllerMock(produceControllerProvider);
setupExpectsMockCallsForProduce(produceController, times, producerId);
replay(producerMetricsProvider, produceControllerProvider, produceController);
StreamingResponseFactory streamingResponseFactory = new StreamingResponseFactory(chunkedOutputFactory);
// get the current thread so that the call counts can be seen by easy mock
ExecutorService executorService = MoreExecutors.newDirectExecutorService();
ProduceAction produceAction = new ProduceAction(schemaManagerProvider, recordSerializerProvider, produceControllerProvider, producerMetricsProvider, streamingResponseFactory, produceRateLimiters, executorService);
produceRateLimiters.clear();
return produceAction;
}
Aggregations