use of io.confluent.ksql.parser.tree.Query in project ksql by confluentinc.
the class QueryAnalyzerTest method shouldThrowExceptionIfAggregateAnalysisDoesntHaveGroupBy.
@Test
public void shouldThrowExceptionIfAggregateAnalysisDoesntHaveGroupBy() {
final List<Statement> statements = ksqlParser.buildAst("select itemid, sum(orderunits) from orders window TUMBLING ( size 30 second) " + "where orderunits > 5;", metaStore);
final Query query = (Query) statements.get(0);
final Analysis analysis = queryAnalyzer.analyze("sqlExpression", query);
try {
queryAnalyzer.analyzeAggregate(query, analysis);
fail("should have thrown KsqlException as aggregate query doesn't have a groupby clause");
} catch (KsqlException e) {
// ok
}
}
use of io.confluent.ksql.parser.tree.Query in project ksql by confluentinc.
the class StreamedQueryResourceTest method testStreamQuery.
@Test
public void testStreamQuery() throws Throwable {
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; ; i++) {
String key = Integer.toString(i);
GenericRow value = new GenericRow(Collections.singletonList(i));
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValue<>(key, value));
}
} catch (InterruptedException exception) {
// This should happen during the test, so it's fine
}
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
mockKafkaStreams.start();
expectLastCall();
mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
expectLastCall();
expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
mockKafkaStreams.close();
expectLastCall();
mockKafkaStreams.cleanUp();
expectLastCall();
final OutputNode mockOutputNode = mock(OutputNode.class);
expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
expectLastCall();
StatementParser mockStatementParser = mock(StatementParser.class);
expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(new Runnable() {
@Override
public void run() {
try {
responseStream.write(responseOutputStream);
} catch (EOFException exception) {
// It's fine
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
Scanner responseScanner = new Scanner(responseInputStream);
ObjectMapper objectMapper = new ObjectMapper();
for (int i = 0; i < 5; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
String responseLine = responseScanner.nextLine();
if (responseLine.trim().isEmpty()) {
i--;
} else {
GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
assertEquals(expectedRow, testRow);
}
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams);
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
use of io.confluent.ksql.parser.tree.Query in project ksql by confluentinc.
the class StreamedQueryResource method streamQuery.
@POST
@Consumes(MediaType.APPLICATION_JSON)
public Response streamQuery(KsqlRequest request) throws Exception {
String ksql = Objects.requireNonNull(request.getKsql(), "\"ksql\" field must be given");
Map<String, Object> clientLocalProperties = Optional.ofNullable(request.getStreamsProperties()).orElse(Collections.emptyMap());
Statement statement = statementParser.parseSingleStatement(ksql);
if (statement instanceof Query) {
QueryStreamWriter queryStreamWriter = new QueryStreamWriter(ksqlEngine, disconnectCheckInterval, ksql, clientLocalProperties);
log.info("Streaming query '{}'", ksql);
return Response.ok().entity(queryStreamWriter).build();
} else if (statement instanceof PrintTopic) {
TopicStreamWriter topicStreamWriter = getTopicStreamWriter(clientLocalProperties, (PrintTopic) statement);
return Response.ok().entity(topicStreamWriter).build();
} else {
throw new Exception(String.format("Statement type `%s' not supported for this resource", statement.getClass().getName()));
}
}
use of io.confluent.ksql.parser.tree.Query in project ksql by confluentinc.
the class StatementExecutor method handleCreateAsSelect.
private String handleCreateAsSelect(final CreateAsSelect statement, final Command command, final CommandId commandId, final Map<QueryId, CommandId> terminatedQueries, final String statementStr, final boolean wasDropped) throws Exception {
QuerySpecification querySpecification = (QuerySpecification) statement.getQuery().getQueryBody();
Query query = ksqlEngine.addInto(statement.getQuery(), querySpecification, statement.getName().getSuffix(), statement.getProperties(), statement.getPartitionByColumn());
if (startQuery(statementStr, query, commandId, terminatedQueries, command, wasDropped)) {
return statement instanceof CreateTableAsSelect ? "Table created and running" : "Stream created and running";
}
return null;
}
Aggregations