Search in sources :

Example 1 with TransientQueryMetadata

use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.

the class QueryBuilderTest method shouldBuildTransientQueryCorrectly.

@Test
public void shouldBuildTransientQueryCorrectly() {
    // Given:
    givenTransientQuery();
    // When:
    final TransientQueryMetadata queryMetadata = queryBuilder.buildTransientQuery(STATEMENT_TEXT, QUERY_ID, SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet()), physicalPlan, SUMMARY, TRANSIENT_SINK_SCHEMA, LIMIT, Optional.empty(), false, queryListener, streamsBuilder, Optional.empty(), new MetricCollectors());
    queryMetadata.initialize();
    // Then:
    assertThat(queryMetadata.getStatementString(), equalTo(STATEMENT_TEXT));
    assertThat(queryMetadata.getSourceNames(), equalTo(SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet())));
    assertThat(queryMetadata.getExecutionPlan(), equalTo(SUMMARY));
    assertThat(queryMetadata.getTopology(), is(topology));
    assertThat(queryMetadata.getOverriddenProperties(), equalTo(OVERRIDES));
    verify(kafkaStreamsBuilder).build(any(), propertyCaptor.capture());
    assertThat(queryMetadata.getStreamsProperties(), equalTo(propertyCaptor.getValue()));
}
Also used : MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) DataSource(io.confluent.ksql.metastore.model.DataSource) Test(org.junit.Test)

Example 2 with TransientQueryMetadata

use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.

the class StreamedQueryResourceTest method shouldStreamRowsCorrectly.

@Test
public void shouldStreamRowsCorrectly() throws Throwable {
    final int NUM_ROWS = 5;
    final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
    final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
    final String queryString = "SELECT * FROM test_stream;";
    final SynchronousQueue<KeyValueMetadata<List<?>, GenericRow>> rowQueue = new SynchronousQueue<>();
    final LinkedList<GenericRow> writtenRows = new LinkedList<>();
    final Thread rowQueuePopulatorThread = new Thread(() -> {
        try {
            for (int i = 0; i != NUM_ROWS; i++) {
                final GenericRow value = genericRow(i);
                synchronized (writtenRows) {
                    writtenRows.add(value);
                }
                rowQueue.put(new KeyValueMetadata<>(KeyValue.keyValue(null, value)));
            }
        } catch (final InterruptedException exception) {
        // This should happen during the test, so it's fine
        }
    }, "Row Queue Populator");
    rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
    rowQueuePopulatorThread.start();
    final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
    when(mockStatementParser.<Query>parseSingleStatement(queryString)).thenReturn(query);
    final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
    final KafkaStreamsBuilder kafkaStreamsBuilder = mock(KafkaStreamsBuilder.class);
    when(kafkaStreamsBuilder.build(any(), any())).thenReturn(mockKafkaStreams);
    MutableBoolean closed = new MutableBoolean(false);
    when(mockKafkaStreams.close(any())).thenAnswer(i -> {
        closed.setValue(true);
        return true;
    });
    when(mockKafkaStreams.state()).thenAnswer(i -> closed.getValue() ? State.NOT_RUNNING : State.RUNNING);
    final TransientQueryMetadata transientQueryMetadata = new TransientQueryMetadata(queryString, SOME_SCHEMA, Collections.emptySet(), "", new TestRowQueue(rowQueue), queryId, "appId", mock(Topology.class), kafkaStreamsBuilder, Collections.emptyMap(), Collections.emptyMap(), closeTimeout, 10, ResultType.STREAM, 0L, 0L, listener);
    transientQueryMetadata.initialize();
    when(queryMetadataHolder.getPushQueryMetadata()).thenReturn(Optional.of(transientQueryMetadata));
    final EndpointResponse response = testResource.streamQuery(securityContext, new KsqlRequest(queryString, requestStreamsProperties, Collections.emptyMap(), null), new CompletableFuture<>(), Optional.empty(), new MetricsCallbackHolder(), context);
    final PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
    final PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
    final StreamingOutput responseStream = (StreamingOutput) response.getEntity();
    final Thread queryWriterThread = new Thread(() -> {
        try {
            responseStream.write(responseOutputStream);
        } catch (final EOFException exception) {
        // It's fine
        } catch (final IOException exception) {
            throw new RuntimeException(exception);
        }
    }, "Query Writer");
    queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
    queryWriterThread.start();
    final Scanner responseScanner = new Scanner(responseInputStream, "UTF-8");
    final ObjectMapper objectMapper = ApiJsonMapper.INSTANCE.get();
    for (int i = 0; i != NUM_ROWS; i++) {
        if (!responseScanner.hasNextLine()) {
            throw new Exception("Response input stream failed to have expected line available");
        }
        final String responseLine = responseScanner.nextLine();
        String jsonLine = StringUtils.stripStart(responseLine, "[");
        jsonLine = StringUtils.stripEnd(jsonLine, ",");
        jsonLine = StringUtils.stripEnd(jsonLine, "]");
        if (jsonLine.isEmpty()) {
            i--;
            continue;
        }
        if (i == 0) {
            // Header:
            assertThat(jsonLine, is("{\"header\":{\"queryId\":\"queryId\",\"schema\":\"`f1` INTEGER\"}}"));
            continue;
        }
        final GenericRow expectedRow;
        synchronized (writtenRows) {
            expectedRow = writtenRows.poll();
        }
        final DataRow testRow = objectMapper.readValue(jsonLine, StreamedRow.class).getRow().get();
        assertThat(testRow.getColumns(), is(expectedRow.values()));
    }
    responseOutputStream.close();
    queryWriterThread.join();
    rowQueuePopulatorThread.interrupt();
    rowQueuePopulatorThread.join();
    // Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
    verify(mockKafkaStreams).start();
    // called on init and when setting uncaught exception handler manually
    verify(mockKafkaStreams, times(2)).setUncaughtExceptionHandler(any(StreamsUncaughtExceptionHandler.class));
    verify(mockKafkaStreams).cleanUp();
    verify(mockKafkaStreams).close(Duration.ofMillis(closeTimeout));
    // If one of the other threads has somehow managed to throw an exception without breaking things up until this
    // point, we throw that exception now in the main thread and cause the test to fail
    final Throwable exception = threadException.get();
    if (exception != null) {
        throw exception;
    }
}
Also used : ERROR_CODE_BAD_STATEMENT(io.confluent.ksql.rest.Errors.ERROR_CODE_BAD_STATEMENT) ColumnName(io.confluent.ksql.name.ColumnName) ArgumentMatchers.eq(org.mockito.ArgumentMatchers.eq) KsqlTopicAuthorizationException(io.confluent.ksql.exception.KsqlTopicAuthorizationException) KsqlErrorMessage(io.confluent.ksql.rest.entity.KsqlErrorMessage) KsqlRestExceptionMatchers.exceptionStatementErrorMessage(io.confluent.ksql.rest.server.resources.KsqlRestExceptionMatchers.exceptionStatementErrorMessage) Mockito.doThrow(org.mockito.Mockito.doThrow) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) PipedInputStream(java.io.PipedInputStream) Duration(java.time.Duration) Map(java.util.Map) QueryId(io.confluent.ksql.query.QueryId) QueryMetadata(io.confluent.ksql.util.QueryMetadata) CommonClientConfigs(org.apache.kafka.clients.CommonClientConfigs) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) State(org.apache.kafka.streams.KafkaStreams.State) ApiJsonMapper(io.confluent.ksql.rest.ApiJsonMapper) ActivenessRegistrar(io.confluent.ksql.version.metrics.ActivenessRegistrar) KsqlRestConfig(io.confluent.ksql.rest.server.KsqlRestConfig) SERVICE_UNAVAILABLE(io.netty.handler.codec.http.HttpResponseStatus.SERVICE_UNAVAILABLE) KsqlRestException(io.confluent.ksql.rest.server.resources.KsqlRestException) KsqlException(io.confluent.ksql.util.KsqlException) Matchers.is(org.hamcrest.Matchers.is) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) Matchers.containsString(org.hamcrest.Matchers.containsString) MockitoJUnitRunner(org.mockito.junit.MockitoJUnitRunner) PullQueryResult(io.confluent.ksql.physical.pull.PullQueryResult) Topology(org.apache.kafka.streams.Topology) Mockito.mock(org.mockito.Mockito.mock) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) Mock(org.mockito.Mock) RunWith(org.junit.runner.RunWith) KsqlRestExceptionMatchers.exceptionErrorMessage(io.confluent.ksql.rest.server.resources.KsqlRestExceptionMatchers.exceptionErrorMessage) ArgumentMatchers.anyBoolean(org.mockito.ArgumentMatchers.anyBoolean) KsqlSecurityContext(io.confluent.ksql.security.KsqlSecurityContext) SessionConfig(io.confluent.ksql.config.SessionConfig) KeyValueMetadata(io.confluent.ksql.util.KeyValueMetadata) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) CommandQueue(io.confluent.ksql.rest.server.computation.CommandQueue) KsqlEngine(io.confluent.ksql.engine.KsqlEngine) Mockito.times(org.mockito.Mockito.times) IOException(java.io.IOException) Test(org.junit.Test) PipedOutputStream(java.io.PipedOutputStream) AclOperation(org.apache.kafka.common.acl.AclOperation) StreamingOutput(io.confluent.ksql.api.server.StreamingOutput) Mockito.never(org.mockito.Mockito.never) GenericRow(io.confluent.ksql.GenericRow) QueryExecutor(io.confluent.ksql.rest.server.query.QueryExecutor) ERROR_CODE_FORBIDDEN_KAFKA_ACCESS(io.confluent.ksql.rest.Errors.ERROR_CODE_FORBIDDEN_KAFKA_ACCESS) KafkaStreams(org.apache.kafka.streams.KafkaStreams) SqlTypes(io.confluent.ksql.schema.ksql.types.SqlTypes) Assert.assertEquals(org.junit.Assert.assertEquals) PreparedStatement(io.confluent.ksql.parser.KsqlParser.PreparedStatement) Query(io.confluent.ksql.parser.tree.Query) KsqlStatementErrorMessageMatchers.statement(io.confluent.ksql.rest.entity.KsqlStatementErrorMessageMatchers.statement) CoreMatchers(org.hamcrest.CoreMatchers) SessionProperties(io.confluent.ksql.rest.SessionProperties) KeyValue(io.confluent.ksql.util.KeyValue) BlockingRowQueue(io.confluent.ksql.query.BlockingRowQueue) ServiceContext(io.confluent.ksql.services.ServiceContext) Scanner(java.util.Scanner) TimeoutException(java.util.concurrent.TimeoutException) DataRow(io.confluent.ksql.rest.entity.StreamedRow.DataRow) Context(io.vertx.core.Context) GenericRow.genericRow(io.confluent.ksql.GenericRow.genericRow) DenyListPropertyValidator(io.confluent.ksql.properties.DenyListPropertyValidator) KsqlAuthorizationValidator(io.confluent.ksql.security.KsqlAuthorizationValidator) LimitHandler(io.confluent.ksql.query.LimitHandler) ImmutableSet(com.google.common.collect.ImmutableSet) StringUtils(org.codehaus.plexus.util.StringUtils) ImmutableMap(com.google.common.collect.ImmutableMap) Errors(io.confluent.ksql.rest.Errors) SynchronousQueue(java.util.concurrent.SynchronousQueue) StatementParser(io.confluent.ksql.rest.server.StatementParser) Collection(java.util.Collection) KsqlConfig(io.confluent.ksql.util.KsqlConfig) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) EOFException(java.io.EOFException) Objects(java.util.Objects) List(java.util.List) CompletionHandler(io.confluent.ksql.query.CompletionHandler) PrintTopic(io.confluent.ksql.parser.tree.PrintTopic) KsqlErrorMessageMatchers.errorCode(io.confluent.ksql.rest.entity.KsqlErrorMessageMatchers.errorCode) Optional(java.util.Optional) Statement(io.confluent.ksql.parser.tree.Statement) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) PullQueryQueue(io.confluent.ksql.query.PullQueryQueue) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) StreamsConfig(org.apache.kafka.streams.StreamsConfig) QueryMetadataHolder(io.confluent.ksql.rest.server.query.QueryMetadataHolder) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) FORBIDDEN(io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN) Assert.assertThrows(org.junit.Assert.assertThrows) KafkaStreamsBuilder(io.confluent.ksql.query.KafkaStreamsBuilder) ResultType(io.confluent.ksql.util.PushQueryMetadata.ResultType) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) AtomicReference(java.util.concurrent.atomic.AtomicReference) Captor(org.mockito.Captor) BAD_REQUEST(io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST) ArgumentCaptor(org.mockito.ArgumentCaptor) KsqlErrorMessageMatchers.errorMessage(io.confluent.ksql.rest.entity.KsqlErrorMessageMatchers.errorMessage) CustomValidators(io.confluent.ksql.rest.server.validation.CustomValidators) LinkedList(java.util.LinkedList) StreamsUncaughtExceptionHandler(org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) EndpointResponse(io.confluent.ksql.rest.EndpointResponse) Matchers(org.hamcrest.Matchers) Mockito.when(org.mockito.Mockito.when) MetricsCallbackHolder(io.confluent.ksql.api.server.MetricsCallbackHolder) KsqlRestExceptionMatchers.exceptionStatusCode(io.confluent.ksql.rest.server.resources.KsqlRestExceptionMatchers.exceptionStatusCode) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) Errors.badRequest(io.confluent.ksql.rest.Errors.badRequest) Collections(java.util.Collections) Scanner(java.util.Scanner) Query(io.confluent.ksql.parser.tree.Query) MetricsCallbackHolder(io.confluent.ksql.api.server.MetricsCallbackHolder) KeyValueMetadata(io.confluent.ksql.util.KeyValueMetadata) PipedOutputStream(java.io.PipedOutputStream) StreamingOutput(io.confluent.ksql.api.server.StreamingOutput) Matchers.containsString(org.hamcrest.Matchers.containsString) DataRow(io.confluent.ksql.rest.entity.StreamedRow.DataRow) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) GenericRow(io.confluent.ksql.GenericRow) EndpointResponse(io.confluent.ksql.rest.EndpointResponse) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) SynchronousQueue(java.util.concurrent.SynchronousQueue) EOFException(java.io.EOFException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KafkaStreams(org.apache.kafka.streams.KafkaStreams) StreamsUncaughtExceptionHandler(org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) AtomicReference(java.util.concurrent.atomic.AtomicReference) Topology(org.apache.kafka.streams.Topology) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) LinkedList(java.util.LinkedList) KsqlTopicAuthorizationException(io.confluent.ksql.exception.KsqlTopicAuthorizationException) KsqlRestException(io.confluent.ksql.rest.server.resources.KsqlRestException) KsqlException(io.confluent.ksql.util.KsqlException) IOException(java.io.IOException) TimeoutException(java.util.concurrent.TimeoutException) EOFException(java.io.EOFException) KafkaStreamsBuilder(io.confluent.ksql.query.KafkaStreamsBuilder) Test(org.junit.Test)

Example 3 with TransientQueryMetadata

use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.

the class TerminateQueryExecutorTest method shouldTerminateTransientQuery.

@Test
public void shouldTerminateTransientQuery() {
    // Given:
    final ConfiguredStatement<TerminateQuery> terminateTransient = (ConfiguredStatement<TerminateQuery>) engine.configure("TERMINATE TRANSIENT_QUERY;");
    final TransientQueryMetadata transientQueryMetadata = givenTransientQuery("TRANSIENT_QUERY", RUNNING_QUERY_STATE);
    final QueryId transientQueryId = transientQueryMetadata.getQueryId();
    final KsqlEngine engine = mock(KsqlEngine.class);
    when(engine.getQuery(transientQueryId)).thenReturn(Optional.of(transientQueryMetadata));
    // When:
    final Optional<KsqlEntity> ksqlEntity = CUSTOM_EXECUTORS.terminateQuery().execute(terminateTransient, mock(SessionProperties.class), engine, this.engine.getServiceContext()).getEntity();
    // Then:
    assertThat(ksqlEntity, is(Optional.of(new TerminateQueryEntity(terminateTransient.getStatementText(), transientQueryId.toString(), true))));
}
Also used : ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) KsqlEngine(io.confluent.ksql.engine.KsqlEngine) TerminateQuery(io.confluent.ksql.parser.tree.TerminateQuery) TerminateQueryEntity(io.confluent.ksql.rest.entity.TerminateQueryEntity) QueryId(io.confluent.ksql.query.QueryId) KsqlEntity(io.confluent.ksql.rest.entity.KsqlEntity) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) Test(org.junit.Test)

Example 4 with TransientQueryMetadata

use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.

the class KsqlEngine method createStreamPullQuery.

public StreamPullQueryMetadata createStreamPullQuery(final ServiceContext serviceContext, final ImmutableAnalysis analysis, final ConfiguredStatement<Query> statementOrig, final boolean excludeTombstones) {
    final boolean streamPullQueriesEnabled = statementOrig.getSessionConfig().getConfig(true).getBoolean(KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED);
    if (!streamPullQueriesEnabled) {
        throw new KsqlStatementException("Pull queries on streams are disabled. To create a push query on the stream," + " add EMIT CHANGES to the end. To enable pull queries on streams, set" + " the " + KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED + " config to 'true'.", statementOrig.getStatementText());
    }
    // Stream pull query overrides.
    final Map<String, Object> overrides = new HashMap<>(statementOrig.getSessionConfig().getOverrides());
    // Starting from earliest is semantically necessary.
    overrides.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Using a single thread keeps these queries as lightweight as possible, since we are
    // not counting them against the transient query limit.
    overrides.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1);
    // There's no point in EOS, since this query only produces side effects.
    overrides.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.AT_LEAST_ONCE);
    final ConfiguredStatement<Query> statement = statementOrig.withConfigOverrides(overrides);
    final ImmutableMap<TopicPartition, Long> endOffsets = getQueryInputEndOffsets(analysis, serviceContext.getAdminClient());
    final TransientQueryMetadata transientQueryMetadata = EngineExecutor.create(primaryContext, serviceContext, statement.getSessionConfig()).executeStreamPullQuery(statement, excludeTombstones, endOffsets);
    QueryLogger.info("Streaming stream pull query results '{}' from earliest to " + endOffsets, statement.getStatementText());
    return new StreamPullQueryMetadata(transientQueryMetadata, endOffsets);
}
Also used : Query(io.confluent.ksql.parser.tree.Query) HashMap(java.util.HashMap) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) StreamPullQueryMetadata(io.confluent.ksql.util.StreamPullQueryMetadata) TopicPartition(org.apache.kafka.common.TopicPartition) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException)

Example 5 with TransientQueryMetadata

use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.

the class QueryRegistryImplTest method givenStreamPull.

private TransientQueryMetadata givenStreamPull(final QueryRegistry registry, final String id) {
    final QueryId queryId = new QueryId(id);
    final TransientQueryMetadata query = mock(TransientQueryMetadata.class);
    when(query.getQueryId()).thenReturn(queryId);
    when(queryBuilder.buildTransientQuery(any(), any(), any(), any(), any(), any(), any(), any(), anyBoolean(), any(), any(), any(), any())).thenReturn(query);
    when(query.isInitialized()).thenReturn(true);
    registry.createStreamPullQuery(config, serviceContext, logContext, metaStore, "sql", queryId, ImmutableSet.of(SourceName.of("some-source")), mock(ExecutionStep.class), "plan-summary", mock(LogicalSchema.class), OptionalInt.of(123), Optional.empty(), false, ImmutableMap.<TopicPartition, Long>builder().build());
    return query;
}
Also used : ExecutionStep(io.confluent.ksql.execution.plan.ExecutionStep) TopicPartition(org.apache.kafka.common.TopicPartition) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata)

Aggregations

TransientQueryMetadata (io.confluent.ksql.util.TransientQueryMetadata)22 Test (org.junit.Test)10 QueryId (io.confluent.ksql.query.QueryId)5 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)5 QueryMetadata (io.confluent.ksql.util.QueryMetadata)5 GenericRow (io.confluent.ksql.GenericRow)4 KsqlConfig (io.confluent.ksql.util.KsqlConfig)4 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)4 IntegrationTest (org.apache.kafka.test.IntegrationTest)4 Before (org.junit.Before)4 ImmutableMap (com.google.common.collect.ImmutableMap)3 Query (io.confluent.ksql.parser.tree.Query)3 BlockingRowQueue (io.confluent.ksql.query.BlockingRowQueue)3 KeyValue (io.confluent.ksql.util.KeyValue)3 KeyValueMetadata (io.confluent.ksql.util.KeyValueMetadata)3 Collection (java.util.Collection)3 List (java.util.List)3 Map (java.util.Map)3 Objects (java.util.Objects)3 SuppressFBWarnings (edu.umd.cs.findbugs.annotations.SuppressFBWarnings)2