use of io.confluent.ksql.util.KeyValueMetadata in project ksql by confluentinc.
the class StreamedQueryResourceTest method shouldStreamRowsCorrectly.
@Test
public void shouldStreamRowsCorrectly() throws Throwable {
final int NUM_ROWS = 5;
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValueMetadata<List<?>, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(() -> {
try {
for (int i = 0; i != NUM_ROWS; i++) {
final GenericRow value = genericRow(i);
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValueMetadata<>(KeyValue.keyValue(null, value)));
}
} catch (final InterruptedException exception) {
// This should happen during the test, so it's fine
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
when(mockStatementParser.<Query>parseSingleStatement(queryString)).thenReturn(query);
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
final KafkaStreamsBuilder kafkaStreamsBuilder = mock(KafkaStreamsBuilder.class);
when(kafkaStreamsBuilder.build(any(), any())).thenReturn(mockKafkaStreams);
MutableBoolean closed = new MutableBoolean(false);
when(mockKafkaStreams.close(any())).thenAnswer(i -> {
closed.setValue(true);
return true;
});
when(mockKafkaStreams.state()).thenAnswer(i -> closed.getValue() ? State.NOT_RUNNING : State.RUNNING);
final TransientQueryMetadata transientQueryMetadata = new TransientQueryMetadata(queryString, SOME_SCHEMA, Collections.emptySet(), "", new TestRowQueue(rowQueue), queryId, "appId", mock(Topology.class), kafkaStreamsBuilder, Collections.emptyMap(), Collections.emptyMap(), closeTimeout, 10, ResultType.STREAM, 0L, 0L, listener);
transientQueryMetadata.initialize();
when(queryMetadataHolder.getPushQueryMetadata()).thenReturn(Optional.of(transientQueryMetadata));
final EndpointResponse response = testResource.streamQuery(securityContext, new KsqlRequest(queryString, requestStreamsProperties, Collections.emptyMap(), null), new CompletableFuture<>(), Optional.empty(), new MetricsCallbackHolder(), context);
final PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
final PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
final StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(() -> {
try {
responseStream.write(responseOutputStream);
} catch (final EOFException exception) {
// It's fine
} catch (final IOException exception) {
throw new RuntimeException(exception);
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
final Scanner responseScanner = new Scanner(responseInputStream, "UTF-8");
final ObjectMapper objectMapper = ApiJsonMapper.INSTANCE.get();
for (int i = 0; i != NUM_ROWS; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
final String responseLine = responseScanner.nextLine();
String jsonLine = StringUtils.stripStart(responseLine, "[");
jsonLine = StringUtils.stripEnd(jsonLine, ",");
jsonLine = StringUtils.stripEnd(jsonLine, "]");
if (jsonLine.isEmpty()) {
i--;
continue;
}
if (i == 0) {
// Header:
assertThat(jsonLine, is("{\"header\":{\"queryId\":\"queryId\",\"schema\":\"`f1` INTEGER\"}}"));
continue;
}
final GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
final DataRow testRow = objectMapper.readValue(jsonLine, StreamedRow.class).getRow().get();
assertThat(testRow.getColumns(), is(expectedRow.values()));
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams).start();
// called on init and when setting uncaught exception handler manually
verify(mockKafkaStreams, times(2)).setUncaughtExceptionHandler(any(StreamsUncaughtExceptionHandler.class));
verify(mockKafkaStreams).cleanUp();
verify(mockKafkaStreams).close(Duration.ofMillis(closeTimeout));
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
final Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
use of io.confluent.ksql.util.KeyValueMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method verifyAvailableRows.
private static List<GenericRow> verifyAvailableRows(final TransientQueryMetadata queryMetadata, final int expectedRows) {
final BlockingRowQueue rowQueue = queryMetadata.getRowQueue();
assertThatEventually(expectedRows + " rows were not available after 30 seconds", () -> rowQueue.size() >= expectedRows, is(true), 30, TimeUnit.SECONDS);
final List<KeyValueMetadata<List<?>, GenericRow>> rows = new ArrayList<>();
rowQueue.drainTo(rows);
return rows.stream().map(kvm -> kvm.getKeyValue().value()).collect(Collectors.toList());
}
Aggregations