Search in sources :

Example 6 with KsqlEngine

use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.

the class KsqlResourceTest method setUp.

@Before
public void setUp() throws IOException, RestClientException {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    registerSchema(schemaRegistryClient);
    ksqlRestConfig = new KsqlRestConfig(TestKsqlResourceUtil.getDefaultKsqlConfig());
    KsqlConfig ksqlConfig = new KsqlConfig(ksqlRestConfig.getKsqlConfigProperties());
    ksqlEngine = new KsqlEngine(ksqlConfig, new MockKafkaTopicClient(), schemaRegistryClient, new MetaStoreImpl());
}
Also used : KsqlEngine(io.confluent.ksql.KsqlEngine) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlRestConfig(io.confluent.ksql.rest.server.KsqlRestConfig) MockKafkaTopicClient(io.confluent.ksql.rest.server.mock.MockKafkaTopicClient) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlConfig(io.confluent.ksql.util.KsqlConfig) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Before(org.junit.Before)

Example 7 with KsqlEngine

use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.

the class StreamedQueryResourceTest method testStreamQuery.

@Test
public void testStreamQuery() throws Throwable {
    final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
    final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
    final String queryString = "SELECT * FROM test_stream;";
    final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
    final LinkedList<GenericRow> writtenRows = new LinkedList<>();
    final Thread rowQueuePopulatorThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                for (int i = 0; ; i++) {
                    String key = Integer.toString(i);
                    GenericRow value = new GenericRow(Collections.singletonList(i));
                    synchronized (writtenRows) {
                        writtenRows.add(value);
                    }
                    rowQueue.put(new KeyValue<>(key, value));
                }
            } catch (InterruptedException exception) {
            // This should happen during the test, so it's fine
            }
        }
    }, "Row Queue Populator");
    rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
    rowQueuePopulatorThread.start();
    final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
    mockKafkaStreams.start();
    expectLastCall();
    mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
    expectLastCall();
    expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
    mockKafkaStreams.close();
    expectLastCall();
    mockKafkaStreams.cleanUp();
    expectLastCall();
    final OutputNode mockOutputNode = mock(OutputNode.class);
    expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
    final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
    KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
    KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
    expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
    final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
    expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
    mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
    expectLastCall();
    StatementParser mockStatementParser = mock(StatementParser.class);
    expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
    replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
    StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
    Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
    PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
    PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
    StreamingOutput responseStream = (StreamingOutput) response.getEntity();
    final Thread queryWriterThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                responseStream.write(responseOutputStream);
            } catch (EOFException exception) {
            // It's fine
            } catch (IOException exception) {
                throw new RuntimeException(exception);
            }
        }
    }, "Query Writer");
    queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
    queryWriterThread.start();
    Scanner responseScanner = new Scanner(responseInputStream);
    ObjectMapper objectMapper = new ObjectMapper();
    for (int i = 0; i < 5; i++) {
        if (!responseScanner.hasNextLine()) {
            throw new Exception("Response input stream failed to have expected line available");
        }
        String responseLine = responseScanner.nextLine();
        if (responseLine.trim().isEmpty()) {
            i--;
        } else {
            GenericRow expectedRow;
            synchronized (writtenRows) {
                expectedRow = writtenRows.poll();
            }
            GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
            assertEquals(expectedRow, testRow);
        }
    }
    responseOutputStream.close();
    queryWriterThread.join();
    rowQueuePopulatorThread.interrupt();
    rowQueuePopulatorThread.join();
    // Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
    verify(mockKafkaStreams);
    // If one of the other threads has somehow managed to throw an exception without breaking things up until this
    // point, we throw that exception now in the main thread and cause the test to fail
    Throwable exception = threadException.get();
    if (exception != null) {
        throw exception;
    }
}
Also used : Query(io.confluent.ksql.parser.tree.Query) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) Scanner(java.util.Scanner) EasyMock.mock(org.easymock.EasyMock.mock) KsqlEngine(io.confluent.ksql.KsqlEngine) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) Map(java.util.Map) EasyMock.replay(org.easymock.EasyMock.replay) LinkedList(java.util.LinkedList) EasyMock.anyObject(org.easymock.EasyMock.anyObject) OutputNode(io.confluent.ksql.planner.plan.OutputNode) SynchronousQueue(java.util.concurrent.SynchronousQueue) StatementParser(io.confluent.ksql.rest.server.StatementParser) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) PipedOutputStream(java.io.PipedOutputStream) EasyMock.expect(org.easymock.EasyMock.expect) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) EOFException(java.io.EOFException) DataSource(io.confluent.ksql.serde.DataSource) EasyMock.expectLastCall(org.easymock.EasyMock.expectLastCall) Response(javax.ws.rs.core.Response) GenericRow(io.confluent.ksql.GenericRow) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) EasyMock.verify(org.easymock.EasyMock.verify) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KsqlEngine(io.confluent.ksql.KsqlEngine) Scanner(java.util.Scanner) KeyValue(org.apache.kafka.streams.KeyValue) Query(io.confluent.ksql.parser.tree.Query) PipedOutputStream(java.io.PipedOutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) GenericRow(io.confluent.ksql.GenericRow) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) SynchronousQueue(java.util.concurrent.SynchronousQueue) EOFException(java.io.EOFException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KafkaStreams(org.apache.kafka.streams.KafkaStreams) OutputNode(io.confluent.ksql.planner.plan.OutputNode) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) StatementParser(io.confluent.ksql.rest.server.StatementParser) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) LinkedList(java.util.LinkedList) IOException(java.io.IOException) EOFException(java.io.EOFException) Response(javax.ws.rs.core.Response) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Test(org.junit.Test)

Example 8 with KsqlEngine

use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.

the class StandaloneExecutor method create.

public static StandaloneExecutor create(final Properties properties, final String queriesFile) {
    final KsqlConfig ksqlConfig = new KsqlConfig(properties);
    Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
    if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
        streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, KsqlConfig.KSQL_SERVICE_ID_DEFAULT);
    }
    final KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps())));
    return new StandaloneExecutor(ksqlEngine, queriesFile);
}
Also used : KsqlEngine(io.confluent.ksql.KsqlEngine) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl)

Aggregations

KsqlEngine (io.confluent.ksql.KsqlEngine)8 KsqlConfig (io.confluent.ksql.util.KsqlConfig)7 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)6 Before (org.junit.Before)4 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)3 HashMap (java.util.HashMap)3 StatementParser (io.confluent.ksql.rest.server.StatementParser)2 MockKafkaTopicClient (io.confluent.ksql.rest.server.mock.MockKafkaTopicClient)2 StreamedQueryResource (io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)1 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)1 GenericRow (io.confluent.ksql.GenericRow)1 CreateStreamCommand (io.confluent.ksql.ddl.commands.CreateStreamCommand)1 RegisterTopicCommand (io.confluent.ksql.ddl.commands.RegisterTopicCommand)1 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)1 CreateStream (io.confluent.ksql.parser.tree.CreateStream)1 Expression (io.confluent.ksql.parser.tree.Expression)1 Query (io.confluent.ksql.parser.tree.Query)1 RegisterTopic (io.confluent.ksql.parser.tree.RegisterTopic)1