use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method shouldSelectAllFromUsers.
@Test
public void shouldSelectAllFromUsers() throws Exception {
final QueuedQueryMetadata queryMetadata = executeQuery("SELECT * from %s;", userTable);
BlockingQueue<KeyValue<String, GenericRow>> rowQueue = queryMetadata.getRowQueue();
Set<String> actualUsers = new HashSet<>();
Set<String> expectedUsers = Utils.mkSet("USER_0", "USER_1", "USER_2", "USER_3", "USER_4");
while (actualUsers.size() < expectedUsers.size()) {
KeyValue<String, GenericRow> nextRow = rowQueue.poll();
if (nextRow != null) {
List<Object> columns = nextRow.value.getColumns();
assertEquals(6, columns.size());
actualUsers.add((String) columns.get(1));
}
}
assertEquals(expectedUsers, actualUsers);
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method shouldCreateStreamUsingLikeClause.
@Test
public void shouldCreateStreamUsingLikeClause() throws Exception {
executeStatement("CREATE STREAM pageviews_like_p5" + " WITH (kafka_topic='pageviews_enriched_r0', value_format='DELIMITED')" + " AS SELECT * FROM %s" + " WHERE pageId LIKE '%%_5';", pageViewStream);
final QueuedQueryMetadata queryMetadata = executeQuery("SELECT userid, pageid from pageviews_like_p5;");
final List<Object> columns = waitForFirstRow(queryMetadata);
assertThat(columns.get(1), is("PAGE_5"));
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method shouldRetainSelectedColumnsInPartitionBy.
@Test
public void shouldRetainSelectedColumnsInPartitionBy() throws Exception {
executeStatement("CREATE STREAM pageviews_by_viewtime " + "AS SELECT viewtime, pageid, userid " + "from %s " + "partition by viewtime;", pageViewStream);
final QueuedQueryMetadata queryMetadata = executeQuery("SELECT * from pageviews_by_viewtime;");
final List<Object> columns = waitForFirstRow(queryMetadata);
assertThat(columns.get(3).toString(), startsWith("PAGE_"));
assertThat(columns.get(4).toString(), startsWith("USER_"));
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class StreamedQueryResourceTest method testStreamQuery.
@Test
public void testStreamQuery() throws Throwable {
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; ; i++) {
String key = Integer.toString(i);
GenericRow value = new GenericRow(Collections.singletonList(i));
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValue<>(key, value));
}
} catch (InterruptedException exception) {
// This should happen during the test, so it's fine
}
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
mockKafkaStreams.start();
expectLastCall();
mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
expectLastCall();
expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
mockKafkaStreams.close();
expectLastCall();
mockKafkaStreams.cleanUp();
expectLastCall();
final OutputNode mockOutputNode = mock(OutputNode.class);
expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
expectLastCall();
StatementParser mockStatementParser = mock(StatementParser.class);
expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(new Runnable() {
@Override
public void run() {
try {
responseStream.write(responseOutputStream);
} catch (EOFException exception) {
// It's fine
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
Scanner responseScanner = new Scanner(responseInputStream);
ObjectMapper objectMapper = new ObjectMapper();
for (int i = 0; i < 5; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
String responseLine = responseScanner.nextLine();
if (responseLine.trim().isEmpty()) {
i--;
} else {
GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
assertEquals(expectedRow, testRow);
}
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams);
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
Aggregations