use of org.zalando.nakadi.service.EventStreamConfig in project nakadi by zalando.
the class EventStreamController method streamEvents.
@RequestMapping(value = "/event-types/{name}/events", method = RequestMethod.GET)
public StreamingResponseBody streamEvents(@PathVariable("name") final String eventTypeName, @Nullable @RequestParam(value = "batch_limit", required = false) final Integer batchLimit, @Nullable @RequestParam(value = "stream_limit", required = false) final Integer streamLimit, @Nullable @RequestParam(value = "batch_flush_timeout", required = false) final Integer batchTimeout, @Nullable @RequestParam(value = "stream_timeout", required = false) final Integer streamTimeout, @Nullable @RequestParam(value = "stream_keep_alive_limit", required = false) final Integer streamKeepAliveLimit, @Nullable @RequestHeader(name = "X-nakadi-cursors", required = false) final String cursorsStr, final HttpServletRequest request, final HttpServletResponse response, final Client client) {
final String flowId = FlowIdUtils.peek();
return outputStream -> {
FlowIdUtils.push(flowId);
if (blacklistService.isConsumptionBlocked(eventTypeName, client.getClientId())) {
writeProblemResponse(response, outputStream, Problem.valueOf(Response.Status.FORBIDDEN, "Application or event type is blocked"));
return;
}
final AtomicBoolean connectionReady = closedConnectionsCrutch.listenForConnectionClose(request);
Counter consumerCounter = null;
EventStream eventStream = null;
List<ConnectionSlot> connectionSlots = ImmutableList.of();
final AtomicBoolean needCheckAuthorization = new AtomicBoolean(false);
LOG.info("[X-NAKADI-CURSORS] \"{}\" {}", eventTypeName, Optional.ofNullable(cursorsStr).orElse("-"));
try (Closeable ignore = eventTypeChangeListener.registerListener(et -> needCheckAuthorization.set(true), Collections.singletonList(eventTypeName))) {
final EventType eventType = eventTypeRepository.findByName(eventTypeName);
authorizeStreamRead(eventTypeName);
// validate parameters
final EventStreamConfig streamConfig = EventStreamConfig.builder().withBatchLimit(batchLimit).withStreamLimit(streamLimit).withBatchTimeout(batchTimeout).withStreamTimeout(streamTimeout).withStreamKeepAliveLimit(streamKeepAliveLimit).withEtName(eventTypeName).withConsumingClient(client).withCursors(getStreamingStart(eventType, cursorsStr)).withMaxMemoryUsageBytes(maxMemoryUsageBytes).build();
// acquire connection slots to limit the number of simultaneous connections from one client
if (featureToggleService.isFeatureEnabled(LIMIT_CONSUMERS_NUMBER)) {
final List<String> partitions = streamConfig.getCursors().stream().map(NakadiCursor::getPartition).collect(Collectors.toList());
connectionSlots = consumerLimitingService.acquireConnectionSlots(client.getClientId(), eventTypeName, partitions);
}
consumerCounter = metricRegistry.counter(metricNameFor(eventTypeName, CONSUMERS_COUNT_METRIC_NAME));
consumerCounter.inc();
final String kafkaQuotaClientId = getKafkaQuotaClientId(eventTypeName, client);
response.setStatus(HttpStatus.OK.value());
response.setHeader("Warning", "299 - nakadi - the Low-level API is deprecated and will " + "be removed from a future release. Please consider migrating to the Subscriptions API.");
response.setContentType("application/x-json-stream");
final EventConsumer eventConsumer = timelineService.createEventConsumer(kafkaQuotaClientId, streamConfig.getCursors());
final String bytesFlushedMetricName = MetricUtils.metricNameForLoLAStream(client.getClientId(), eventTypeName);
final Meter bytesFlushedMeter = this.streamMetrics.meter(bytesFlushedMetricName);
eventStream = eventStreamFactory.createEventStream(outputStream, eventConsumer, streamConfig, bytesFlushedMeter);
// Flush status code to client
outputStream.flush();
eventStream.streamEvents(connectionReady, () -> {
if (needCheckAuthorization.getAndSet(false)) {
authorizeStreamRead(eventTypeName);
}
});
} catch (final UnparseableCursorException e) {
LOG.debug("Incorrect syntax of X-nakadi-cursors header: {}. Respond with BAD_REQUEST.", e.getCursors(), e);
writeProblemResponse(response, outputStream, BAD_REQUEST, e.getMessage());
} catch (final NoSuchEventTypeException e) {
writeProblemResponse(response, outputStream, NOT_FOUND, "topic not found");
} catch (final NoConnectionSlotsException e) {
LOG.debug("Connection creation failed due to exceeding max connection count");
writeProblemResponse(response, outputStream, e.asProblem());
} catch (final NakadiException e) {
LOG.error("Error while trying to stream events.", e);
writeProblemResponse(response, outputStream, e.asProblem());
} catch (final InvalidCursorException e) {
writeProblemResponse(response, outputStream, PRECONDITION_FAILED, e.getMessage());
} catch (final AccessDeniedException e) {
writeProblemResponse(response, outputStream, FORBIDDEN, e.explain());
} catch (final Exception e) {
LOG.error("Error while trying to stream events. Respond with INTERNAL_SERVER_ERROR.", e);
writeProblemResponse(response, outputStream, INTERNAL_SERVER_ERROR, e.getMessage());
} finally {
connectionReady.set(false);
consumerLimitingService.releaseConnectionSlots(connectionSlots);
if (consumerCounter != null) {
consumerCounter.dec();
}
if (eventStream != null) {
eventStream.close();
}
try {
outputStream.flush();
} finally {
outputStream.close();
}
}
};
}
use of org.zalando.nakadi.service.EventStreamConfig in project nakadi by zalando.
the class EventStreamControllerTest method whenNoParamsThenDefaultsAreUsed.
@Test
@SuppressWarnings("unchecked")
public void whenNoParamsThenDefaultsAreUsed() throws Exception {
final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class);
final EventConsumer.LowLevelConsumer eventConsumerMock = mock(EventConsumer.LowLevelConsumer.class);
when(topicRepositoryMock.createEventConsumer(any(), any())).thenReturn(eventConsumerMock);
final EventStream eventStreamMock = mock(EventStream.class);
when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())).thenReturn(eventStreamMock);
when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE);
mockMvc.perform(get(String.format("/event-types/%s/events", TEST_EVENT_TYPE_NAME)).header("X-nakadi-cursors", "[{\"partition\":\"0\",\"offset\":\"000000000000000000\"}]")).andExpect(status().isOk());
// we have to retry here as mockMvc exits at the very beginning, before the body starts streaming
TestUtils.waitFor(() -> {
final EventStreamConfig actualConfig = configCaptor.getValue();
assertThat(actualConfig.getBatchLimit(), equalTo(1));
assertThat(actualConfig.getBatchTimeout(), equalTo(30));
assertThat(actualConfig.getCursors(), equalTo(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000"))));
assertThat(actualConfig.getStreamKeepAliveLimit(), equalTo(0));
assertThat(actualConfig.getStreamLimit(), equalTo(0));
assertThat(actualConfig.getStreamTimeout(), greaterThanOrEqualTo(EventStreamConfig.MAX_STREAM_TIMEOUT - 1200));
assertThat(actualConfig.getStreamTimeout(), lessThanOrEqualTo(EventStreamConfig.MAX_STREAM_TIMEOUT));
}, 2000, 50, MockitoException.class);
}
use of org.zalando.nakadi.service.EventStreamConfig in project nakadi by zalando.
the class EventStreamControllerTest method whenNoCursorsThenLatestOffsetsAreUsed.
@Test
public void whenNoCursorsThenLatestOffsetsAreUsed() throws NakadiException, IOException, InvalidCursorException {
when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE);
final List<PartitionStatistics> tps2 = ImmutableList.of(new KafkaPartitionStatistics(timeline, 0, 0, 87), new KafkaPartitionStatistics(timeline, 1, 0, 34));
when(timelineService.getActiveTimeline(any(EventType.class))).thenReturn(timeline);
when(topicRepositoryMock.loadTopicStatistics(eq(Collections.singletonList(timeline)))).thenReturn(tps2);
final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class);
final EventStream eventStreamMock = mock(EventStream.class);
when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())).thenReturn(eventStreamMock);
final StreamingResponseBody responseBody = createStreamingResponseBody(1, 0, 1, 1, 0, null);
responseBody.writeTo(new ByteArrayOutputStream());
final EventStreamConfig streamConfig = configCaptor.getValue();
assertThat(streamConfig.getCursors(), equalTo(tps2.stream().map(PartitionStatistics::getLast).collect(Collectors.toList())));
}
use of org.zalando.nakadi.service.EventStreamConfig in project nakadi by zalando.
the class EventStreamControllerTest method whenNormalCaseThenParametersArePassedToConfigAndStreamStarted.
@Test
public void whenNormalCaseThenParametersArePassedToConfigAndStreamStarted() throws Exception {
final EventConsumer eventConsumerMock = mock(EventConsumer.class);
when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE);
when(timelineService.createEventConsumer(eq(KAFKA_CLIENT_ID), eq(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000"))))).thenReturn(eventConsumerMock);
when(timelineService.getActiveTimeline(eq(EVENT_TYPE))).thenReturn(timeline);
final ArgumentCaptor<Integer> statusCaptor = getStatusCaptor();
final ArgumentCaptor<String> contentTypeCaptor = getContentTypeCaptor();
final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class);
final EventStream eventStreamMock = mock(EventStream.class);
when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())).thenReturn(eventStreamMock);
final StreamingResponseBody responseBody = createStreamingResponseBody(1, 2, 3, 4, 5, "[{\"partition\":\"0\",\"offset\":\"000000000000000000\"}]");
final OutputStream outputStream = mock(OutputStream.class);
responseBody.writeTo(outputStream);
final EventStreamConfig streamConfig = configCaptor.getValue();
assertThat(streamConfig, equalTo(EventStreamConfig.builder().withCursors(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000"))).withBatchLimit(1).withStreamLimit(2).withBatchTimeout(3).withStreamTimeout(4).withStreamKeepAliveLimit(5).build()));
assertThat(statusCaptor.getValue(), equalTo(HttpStatus.OK.value()));
assertThat(contentTypeCaptor.getValue(), equalTo("application/x-json-stream"));
verify(timelineService, times(1)).createEventConsumer(eq(KAFKA_CLIENT_ID), eq(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000"))));
verify(eventStreamFactoryMock, times(1)).createEventStream(eq(outputStream), eq(eventConsumerMock), eq(streamConfig), any());
verify(eventStreamMock, times(1)).streamEvents(any(), any());
verify(outputStream, times(2)).flush();
verify(outputStream, times(1)).close();
}
Aggregations