Search in sources :

Example 1 with OutputNode

use of io.confluent.ksql.planner.plan.OutputNode in project ksql by confluentinc.

the class PhysicalPlanBuilder method buildPhysicalPlan.

public QueryMetadata buildPhysicalPlan(final Pair<String, PlanNode> statementPlanPair) throws Exception {
    final SchemaKStream resultStream = statementPlanPair.getRight().buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, overriddenStreamsProperties, schemaRegistryClient);
    final OutputNode outputNode = resultStream.outputNode();
    boolean isBareQuery = outputNode instanceof KsqlBareOutputNode;
    // the corresponding Kafka Streams job
    if (isBareQuery && !(resultStream instanceof QueuedSchemaKStream)) {
        throw new Exception(String.format("Mismatch between logical and physical output; " + "expected a QueuedSchemaKStream based on logical " + "KsqlBareOutputNode, found a %s instead", resultStream.getClass().getCanonicalName()));
    }
    String serviceId = getServiceId();
    String persistanceQueryPrefix = ksqlConfig.get(KsqlConfig.KSQL_PERSISTENT_QUERY_NAME_PREFIX_CONFIG).toString();
    String transientQueryPrefix = ksqlConfig.get(KsqlConfig.KSQL_TRANSIENT_QUERY_NAME_PREFIX_CONFIG).toString();
    if (isBareQuery) {
        return buildPlanForBareQuery((QueuedSchemaKStream) resultStream, (KsqlBareOutputNode) outputNode, serviceId, transientQueryPrefix, statementPlanPair.getLeft());
    } else if (outputNode instanceof KsqlStructuredDataOutputNode) {
        return buildPlanForStructuredOutputNode(statementPlanPair.getLeft(), resultStream, (KsqlStructuredDataOutputNode) outputNode, serviceId, persistanceQueryPrefix, statementPlanPair.getLeft());
    } else {
        throw new KsqlException("Sink data source of type: " + outputNode.getClass() + " is not supported.");
    }
}
Also used : KsqlBareOutputNode(io.confluent.ksql.planner.plan.KsqlBareOutputNode) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) OutputNode(io.confluent.ksql.planner.plan.OutputNode) KsqlBareOutputNode(io.confluent.ksql.planner.plan.KsqlBareOutputNode) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) QueuedSchemaKStream(io.confluent.ksql.structured.QueuedSchemaKStream) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) QueuedSchemaKStream(io.confluent.ksql.structured.QueuedSchemaKStream) KsqlException(io.confluent.ksql.util.KsqlException) KsqlException(io.confluent.ksql.util.KsqlException)

Example 2 with OutputNode

use of io.confluent.ksql.planner.plan.OutputNode in project ksql by confluentinc.

the class StreamedQueryResourceTest method testStreamQuery.

@Test
public void testStreamQuery() throws Throwable {
    final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
    final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
    final String queryString = "SELECT * FROM test_stream;";
    final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
    final LinkedList<GenericRow> writtenRows = new LinkedList<>();
    final Thread rowQueuePopulatorThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                for (int i = 0; ; i++) {
                    String key = Integer.toString(i);
                    GenericRow value = new GenericRow(Collections.singletonList(i));
                    synchronized (writtenRows) {
                        writtenRows.add(value);
                    }
                    rowQueue.put(new KeyValue<>(key, value));
                }
            } catch (InterruptedException exception) {
            // This should happen during the test, so it's fine
            }
        }
    }, "Row Queue Populator");
    rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
    rowQueuePopulatorThread.start();
    final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
    mockKafkaStreams.start();
    expectLastCall();
    mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
    expectLastCall();
    expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
    mockKafkaStreams.close();
    expectLastCall();
    mockKafkaStreams.cleanUp();
    expectLastCall();
    final OutputNode mockOutputNode = mock(OutputNode.class);
    expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
    final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
    KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
    KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
    expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
    final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
    expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
    mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
    expectLastCall();
    StatementParser mockStatementParser = mock(StatementParser.class);
    expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
    replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
    StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
    Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
    PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
    PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
    StreamingOutput responseStream = (StreamingOutput) response.getEntity();
    final Thread queryWriterThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                responseStream.write(responseOutputStream);
            } catch (EOFException exception) {
            // It's fine
            } catch (IOException exception) {
                throw new RuntimeException(exception);
            }
        }
    }, "Query Writer");
    queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
    queryWriterThread.start();
    Scanner responseScanner = new Scanner(responseInputStream);
    ObjectMapper objectMapper = new ObjectMapper();
    for (int i = 0; i < 5; i++) {
        if (!responseScanner.hasNextLine()) {
            throw new Exception("Response input stream failed to have expected line available");
        }
        String responseLine = responseScanner.nextLine();
        if (responseLine.trim().isEmpty()) {
            i--;
        } else {
            GenericRow expectedRow;
            synchronized (writtenRows) {
                expectedRow = writtenRows.poll();
            }
            GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
            assertEquals(expectedRow, testRow);
        }
    }
    responseOutputStream.close();
    queryWriterThread.join();
    rowQueuePopulatorThread.interrupt();
    rowQueuePopulatorThread.join();
    // Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
    verify(mockKafkaStreams);
    // If one of the other threads has somehow managed to throw an exception without breaking things up until this
    // point, we throw that exception now in the main thread and cause the test to fail
    Throwable exception = threadException.get();
    if (exception != null) {
        throw exception;
    }
}
Also used : Query(io.confluent.ksql.parser.tree.Query) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) Scanner(java.util.Scanner) EasyMock.mock(org.easymock.EasyMock.mock) KsqlEngine(io.confluent.ksql.KsqlEngine) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) Map(java.util.Map) EasyMock.replay(org.easymock.EasyMock.replay) LinkedList(java.util.LinkedList) EasyMock.anyObject(org.easymock.EasyMock.anyObject) OutputNode(io.confluent.ksql.planner.plan.OutputNode) SynchronousQueue(java.util.concurrent.SynchronousQueue) StatementParser(io.confluent.ksql.rest.server.StatementParser) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) PipedOutputStream(java.io.PipedOutputStream) EasyMock.expect(org.easymock.EasyMock.expect) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) EOFException(java.io.EOFException) DataSource(io.confluent.ksql.serde.DataSource) EasyMock.expectLastCall(org.easymock.EasyMock.expectLastCall) Response(javax.ws.rs.core.Response) GenericRow(io.confluent.ksql.GenericRow) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) EasyMock.verify(org.easymock.EasyMock.verify) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KsqlEngine(io.confluent.ksql.KsqlEngine) Scanner(java.util.Scanner) KeyValue(org.apache.kafka.streams.KeyValue) Query(io.confluent.ksql.parser.tree.Query) PipedOutputStream(java.io.PipedOutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) GenericRow(io.confluent.ksql.GenericRow) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) SynchronousQueue(java.util.concurrent.SynchronousQueue) EOFException(java.io.EOFException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KafkaStreams(org.apache.kafka.streams.KafkaStreams) OutputNode(io.confluent.ksql.planner.plan.OutputNode) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) StatementParser(io.confluent.ksql.rest.server.StatementParser) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) LinkedList(java.util.LinkedList) IOException(java.io.IOException) EOFException(java.io.EOFException) Response(javax.ws.rs.core.Response) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Test(org.junit.Test)

Aggregations

OutputNode (io.confluent.ksql.planner.plan.OutputNode)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 GenericRow (io.confluent.ksql.GenericRow)1 KsqlEngine (io.confluent.ksql.KsqlEngine)1 Query (io.confluent.ksql.parser.tree.Query)1 KsqlBareOutputNode (io.confluent.ksql.planner.plan.KsqlBareOutputNode)1 KsqlStructuredDataOutputNode (io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode)1 KsqlRequest (io.confluent.ksql.rest.entity.KsqlRequest)1 StreamedRow (io.confluent.ksql.rest.entity.StreamedRow)1 StatementParser (io.confluent.ksql.rest.server.StatementParser)1 StreamedQueryResource (io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource)1 DataSource (io.confluent.ksql.serde.DataSource)1 QueuedSchemaKStream (io.confluent.ksql.structured.QueuedSchemaKStream)1 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)1 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)1 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)1 KsqlException (io.confluent.ksql.util.KsqlException)1 QueuedQueryMetadata (io.confluent.ksql.util.QueuedQueryMetadata)1 EOFException (java.io.EOFException)1 IOException (java.io.IOException)1