Search in sources :

Example 6 with QueuedQueryMetadata

use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.

the class EndToEndIntegrationTest method shouldSelectAllFromUsers.

@Test
public void shouldSelectAllFromUsers() throws Exception {
    final QueuedQueryMetadata queryMetadata = executeQuery("SELECT * from %s;", userTable);
    BlockingQueue<KeyValue<String, GenericRow>> rowQueue = queryMetadata.getRowQueue();
    Set<String> actualUsers = new HashSet<>();
    Set<String> expectedUsers = Utils.mkSet("USER_0", "USER_1", "USER_2", "USER_3", "USER_4");
    while (actualUsers.size() < expectedUsers.size()) {
        KeyValue<String, GenericRow> nextRow = rowQueue.poll();
        if (nextRow != null) {
            List<Object> columns = nextRow.value.getColumns();
            assertEquals(6, columns.size());
            actualUsers.add((String) columns.get(1));
        }
    }
    assertEquals(expectedUsers, actualUsers);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) KeyValue(org.apache.kafka.streams.KeyValue) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) HashSet(java.util.HashSet) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 7 with QueuedQueryMetadata

use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.

the class EndToEndIntegrationTest method shouldCreateStreamUsingLikeClause.

@Test
public void shouldCreateStreamUsingLikeClause() throws Exception {
    executeStatement("CREATE STREAM pageviews_like_p5" + " WITH (kafka_topic='pageviews_enriched_r0', value_format='DELIMITED')" + " AS SELECT * FROM %s" + " WHERE pageId LIKE '%%_5';", pageViewStream);
    final QueuedQueryMetadata queryMetadata = executeQuery("SELECT userid, pageid from pageviews_like_p5;");
    final List<Object> columns = waitForFirstRow(queryMetadata);
    assertThat(columns.get(1), is("PAGE_5"));
}
Also used : QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 8 with QueuedQueryMetadata

use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.

the class EndToEndIntegrationTest method shouldRetainSelectedColumnsInPartitionBy.

@Test
public void shouldRetainSelectedColumnsInPartitionBy() throws Exception {
    executeStatement("CREATE STREAM pageviews_by_viewtime " + "AS SELECT viewtime, pageid, userid " + "from %s " + "partition by viewtime;", pageViewStream);
    final QueuedQueryMetadata queryMetadata = executeQuery("SELECT * from pageviews_by_viewtime;");
    final List<Object> columns = waitForFirstRow(queryMetadata);
    assertThat(columns.get(3).toString(), startsWith("PAGE_"));
    assertThat(columns.get(4).toString(), startsWith("USER_"));
}
Also used : QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 9 with QueuedQueryMetadata

use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.

the class StreamedQueryResourceTest method testStreamQuery.

@Test
public void testStreamQuery() throws Throwable {
    final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
    final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
    final String queryString = "SELECT * FROM test_stream;";
    final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
    final LinkedList<GenericRow> writtenRows = new LinkedList<>();
    final Thread rowQueuePopulatorThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                for (int i = 0; ; i++) {
                    String key = Integer.toString(i);
                    GenericRow value = new GenericRow(Collections.singletonList(i));
                    synchronized (writtenRows) {
                        writtenRows.add(value);
                    }
                    rowQueue.put(new KeyValue<>(key, value));
                }
            } catch (InterruptedException exception) {
            // This should happen during the test, so it's fine
            }
        }
    }, "Row Queue Populator");
    rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
    rowQueuePopulatorThread.start();
    final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
    mockKafkaStreams.start();
    expectLastCall();
    mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
    expectLastCall();
    expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
    mockKafkaStreams.close();
    expectLastCall();
    mockKafkaStreams.cleanUp();
    expectLastCall();
    final OutputNode mockOutputNode = mock(OutputNode.class);
    expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
    final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
    KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
    KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
    expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
    final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
    expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
    mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
    expectLastCall();
    StatementParser mockStatementParser = mock(StatementParser.class);
    expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
    replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
    StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
    Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
    PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
    PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
    StreamingOutput responseStream = (StreamingOutput) response.getEntity();
    final Thread queryWriterThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                responseStream.write(responseOutputStream);
            } catch (EOFException exception) {
            // It's fine
            } catch (IOException exception) {
                throw new RuntimeException(exception);
            }
        }
    }, "Query Writer");
    queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
    queryWriterThread.start();
    Scanner responseScanner = new Scanner(responseInputStream);
    ObjectMapper objectMapper = new ObjectMapper();
    for (int i = 0; i < 5; i++) {
        if (!responseScanner.hasNextLine()) {
            throw new Exception("Response input stream failed to have expected line available");
        }
        String responseLine = responseScanner.nextLine();
        if (responseLine.trim().isEmpty()) {
            i--;
        } else {
            GenericRow expectedRow;
            synchronized (writtenRows) {
                expectedRow = writtenRows.poll();
            }
            GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
            assertEquals(expectedRow, testRow);
        }
    }
    responseOutputStream.close();
    queryWriterThread.join();
    rowQueuePopulatorThread.interrupt();
    rowQueuePopulatorThread.join();
    // Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
    verify(mockKafkaStreams);
    // If one of the other threads has somehow managed to throw an exception without breaking things up until this
    // point, we throw that exception now in the main thread and cause the test to fail
    Throwable exception = threadException.get();
    if (exception != null) {
        throw exception;
    }
}
Also used : Query(io.confluent.ksql.parser.tree.Query) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) Scanner(java.util.Scanner) EasyMock.mock(org.easymock.EasyMock.mock) KsqlEngine(io.confluent.ksql.KsqlEngine) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) Map(java.util.Map) EasyMock.replay(org.easymock.EasyMock.replay) LinkedList(java.util.LinkedList) EasyMock.anyObject(org.easymock.EasyMock.anyObject) OutputNode(io.confluent.ksql.planner.plan.OutputNode) SynchronousQueue(java.util.concurrent.SynchronousQueue) StatementParser(io.confluent.ksql.rest.server.StatementParser) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) PipedOutputStream(java.io.PipedOutputStream) EasyMock.expect(org.easymock.EasyMock.expect) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) EOFException(java.io.EOFException) DataSource(io.confluent.ksql.serde.DataSource) EasyMock.expectLastCall(org.easymock.EasyMock.expectLastCall) Response(javax.ws.rs.core.Response) GenericRow(io.confluent.ksql.GenericRow) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) EasyMock.verify(org.easymock.EasyMock.verify) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KsqlEngine(io.confluent.ksql.KsqlEngine) Scanner(java.util.Scanner) KeyValue(org.apache.kafka.streams.KeyValue) Query(io.confluent.ksql.parser.tree.Query) PipedOutputStream(java.io.PipedOutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) GenericRow(io.confluent.ksql.GenericRow) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) SynchronousQueue(java.util.concurrent.SynchronousQueue) EOFException(java.io.EOFException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KafkaStreams(org.apache.kafka.streams.KafkaStreams) OutputNode(io.confluent.ksql.planner.plan.OutputNode) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) StatementParser(io.confluent.ksql.rest.server.StatementParser) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) LinkedList(java.util.LinkedList) IOException(java.io.IOException) EOFException(java.io.EOFException) Response(javax.ws.rs.core.Response) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Test(org.junit.Test)

Aggregations

QueuedQueryMetadata (io.confluent.ksql.util.QueuedQueryMetadata)9 Test (org.junit.Test)7 IntegrationTest (org.apache.kafka.test.IntegrationTest)6 GenericRow (io.confluent.ksql.GenericRow)4 KeyValue (org.apache.kafka.streams.KeyValue)4 ArrayList (java.util.ArrayList)2 KafkaStreams (org.apache.kafka.streams.KafkaStreams)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 KsqlEngine (io.confluent.ksql.KsqlEngine)1 Query (io.confluent.ksql.parser.tree.Query)1 OutputNode (io.confluent.ksql.planner.plan.OutputNode)1 KsqlRequest (io.confluent.ksql.rest.entity.KsqlRequest)1 StreamedRow (io.confluent.ksql.rest.entity.StreamedRow)1 StatementParser (io.confluent.ksql.rest.server.StatementParser)1 StreamedQueryResource (io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource)1 DataSource (io.confluent.ksql.serde.DataSource)1 QueuedSchemaKStream (io.confluent.ksql.structured.QueuedSchemaKStream)1 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)1 SchemaKTable (io.confluent.ksql.structured.SchemaKTable)1 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)1