use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class KsqlResourceTest method setUp.
@Before
public void setUp() throws IOException, RestClientException {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
registerSchema(schemaRegistryClient);
ksqlRestConfig = new KsqlRestConfig(TestKsqlResourceUtil.getDefaultKsqlConfig());
KsqlConfig ksqlConfig = new KsqlConfig(ksqlRestConfig.getKsqlConfigProperties());
ksqlEngine = new KsqlEngine(ksqlConfig, new MockKafkaTopicClient(), schemaRegistryClient, new MetaStoreImpl());
}
use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class StreamedQueryResourceTest method testStreamQuery.
@Test
public void testStreamQuery() throws Throwable {
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; ; i++) {
String key = Integer.toString(i);
GenericRow value = new GenericRow(Collections.singletonList(i));
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValue<>(key, value));
}
} catch (InterruptedException exception) {
// This should happen during the test, so it's fine
}
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
mockKafkaStreams.start();
expectLastCall();
mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
expectLastCall();
expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
mockKafkaStreams.close();
expectLastCall();
mockKafkaStreams.cleanUp();
expectLastCall();
final OutputNode mockOutputNode = mock(OutputNode.class);
expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
expectLastCall();
StatementParser mockStatementParser = mock(StatementParser.class);
expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(new Runnable() {
@Override
public void run() {
try {
responseStream.write(responseOutputStream);
} catch (EOFException exception) {
// It's fine
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
Scanner responseScanner = new Scanner(responseInputStream);
ObjectMapper objectMapper = new ObjectMapper();
for (int i = 0; i < 5; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
String responseLine = responseScanner.nextLine();
if (responseLine.trim().isEmpty()) {
i--;
} else {
GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
assertEquals(expectedRow, testRow);
}
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams);
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class StandaloneExecutor method create.
public static StandaloneExecutor create(final Properties properties, final String queriesFile) {
final KsqlConfig ksqlConfig = new KsqlConfig(properties);
Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, KsqlConfig.KSQL_SERVICE_ID_DEFAULT);
}
final KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps())));
return new StandaloneExecutor(ksqlEngine, queriesFile);
}
Aggregations