use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.
the class QueryBuilderTest method shouldBuildTransientQueryCorrectly.
@Test
public void shouldBuildTransientQueryCorrectly() {
// Given:
givenTransientQuery();
// When:
final TransientQueryMetadata queryMetadata = queryBuilder.buildTransientQuery(STATEMENT_TEXT, QUERY_ID, SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet()), physicalPlan, SUMMARY, TRANSIENT_SINK_SCHEMA, LIMIT, Optional.empty(), false, queryListener, streamsBuilder, Optional.empty(), new MetricCollectors());
queryMetadata.initialize();
// Then:
assertThat(queryMetadata.getStatementString(), equalTo(STATEMENT_TEXT));
assertThat(queryMetadata.getSourceNames(), equalTo(SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet())));
assertThat(queryMetadata.getExecutionPlan(), equalTo(SUMMARY));
assertThat(queryMetadata.getTopology(), is(topology));
assertThat(queryMetadata.getOverriddenProperties(), equalTo(OVERRIDES));
verify(kafkaStreamsBuilder).build(any(), propertyCaptor.capture());
assertThat(queryMetadata.getStreamsProperties(), equalTo(propertyCaptor.getValue()));
}
use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.
the class StreamedQueryResourceTest method shouldStreamRowsCorrectly.
@Test
public void shouldStreamRowsCorrectly() throws Throwable {
final int NUM_ROWS = 5;
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValueMetadata<List<?>, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(() -> {
try {
for (int i = 0; i != NUM_ROWS; i++) {
final GenericRow value = genericRow(i);
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValueMetadata<>(KeyValue.keyValue(null, value)));
}
} catch (final InterruptedException exception) {
// This should happen during the test, so it's fine
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
when(mockStatementParser.<Query>parseSingleStatement(queryString)).thenReturn(query);
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
final KafkaStreamsBuilder kafkaStreamsBuilder = mock(KafkaStreamsBuilder.class);
when(kafkaStreamsBuilder.build(any(), any())).thenReturn(mockKafkaStreams);
MutableBoolean closed = new MutableBoolean(false);
when(mockKafkaStreams.close(any())).thenAnswer(i -> {
closed.setValue(true);
return true;
});
when(mockKafkaStreams.state()).thenAnswer(i -> closed.getValue() ? State.NOT_RUNNING : State.RUNNING);
final TransientQueryMetadata transientQueryMetadata = new TransientQueryMetadata(queryString, SOME_SCHEMA, Collections.emptySet(), "", new TestRowQueue(rowQueue), queryId, "appId", mock(Topology.class), kafkaStreamsBuilder, Collections.emptyMap(), Collections.emptyMap(), closeTimeout, 10, ResultType.STREAM, 0L, 0L, listener);
transientQueryMetadata.initialize();
when(queryMetadataHolder.getPushQueryMetadata()).thenReturn(Optional.of(transientQueryMetadata));
final EndpointResponse response = testResource.streamQuery(securityContext, new KsqlRequest(queryString, requestStreamsProperties, Collections.emptyMap(), null), new CompletableFuture<>(), Optional.empty(), new MetricsCallbackHolder(), context);
final PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
final PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
final StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(() -> {
try {
responseStream.write(responseOutputStream);
} catch (final EOFException exception) {
// It's fine
} catch (final IOException exception) {
throw new RuntimeException(exception);
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
final Scanner responseScanner = new Scanner(responseInputStream, "UTF-8");
final ObjectMapper objectMapper = ApiJsonMapper.INSTANCE.get();
for (int i = 0; i != NUM_ROWS; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
final String responseLine = responseScanner.nextLine();
String jsonLine = StringUtils.stripStart(responseLine, "[");
jsonLine = StringUtils.stripEnd(jsonLine, ",");
jsonLine = StringUtils.stripEnd(jsonLine, "]");
if (jsonLine.isEmpty()) {
i--;
continue;
}
if (i == 0) {
// Header:
assertThat(jsonLine, is("{\"header\":{\"queryId\":\"queryId\",\"schema\":\"`f1` INTEGER\"}}"));
continue;
}
final GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
final DataRow testRow = objectMapper.readValue(jsonLine, StreamedRow.class).getRow().get();
assertThat(testRow.getColumns(), is(expectedRow.values()));
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams).start();
// called on init and when setting uncaught exception handler manually
verify(mockKafkaStreams, times(2)).setUncaughtExceptionHandler(any(StreamsUncaughtExceptionHandler.class));
verify(mockKafkaStreams).cleanUp();
verify(mockKafkaStreams).close(Duration.ofMillis(closeTimeout));
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
final Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.
the class TerminateQueryExecutorTest method shouldTerminateTransientQuery.
@Test
public void shouldTerminateTransientQuery() {
// Given:
final ConfiguredStatement<TerminateQuery> terminateTransient = (ConfiguredStatement<TerminateQuery>) engine.configure("TERMINATE TRANSIENT_QUERY;");
final TransientQueryMetadata transientQueryMetadata = givenTransientQuery("TRANSIENT_QUERY", RUNNING_QUERY_STATE);
final QueryId transientQueryId = transientQueryMetadata.getQueryId();
final KsqlEngine engine = mock(KsqlEngine.class);
when(engine.getQuery(transientQueryId)).thenReturn(Optional.of(transientQueryMetadata));
// When:
final Optional<KsqlEntity> ksqlEntity = CUSTOM_EXECUTORS.terminateQuery().execute(terminateTransient, mock(SessionProperties.class), engine, this.engine.getServiceContext()).getEntity();
// Then:
assertThat(ksqlEntity, is(Optional.of(new TerminateQueryEntity(terminateTransient.getStatementText(), transientQueryId.toString(), true))));
}
use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.
the class KsqlEngine method createStreamPullQuery.
public StreamPullQueryMetadata createStreamPullQuery(final ServiceContext serviceContext, final ImmutableAnalysis analysis, final ConfiguredStatement<Query> statementOrig, final boolean excludeTombstones) {
final boolean streamPullQueriesEnabled = statementOrig.getSessionConfig().getConfig(true).getBoolean(KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED);
if (!streamPullQueriesEnabled) {
throw new KsqlStatementException("Pull queries on streams are disabled. To create a push query on the stream," + " add EMIT CHANGES to the end. To enable pull queries on streams, set" + " the " + KsqlConfig.KSQL_QUERY_STREAM_PULL_QUERY_ENABLED + " config to 'true'.", statementOrig.getStatementText());
}
// Stream pull query overrides.
final Map<String, Object> overrides = new HashMap<>(statementOrig.getSessionConfig().getOverrides());
// Starting from earliest is semantically necessary.
overrides.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
// Using a single thread keeps these queries as lightweight as possible, since we are
// not counting them against the transient query limit.
overrides.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1);
// There's no point in EOS, since this query only produces side effects.
overrides.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.AT_LEAST_ONCE);
final ConfiguredStatement<Query> statement = statementOrig.withConfigOverrides(overrides);
final ImmutableMap<TopicPartition, Long> endOffsets = getQueryInputEndOffsets(analysis, serviceContext.getAdminClient());
final TransientQueryMetadata transientQueryMetadata = EngineExecutor.create(primaryContext, serviceContext, statement.getSessionConfig()).executeStreamPullQuery(statement, excludeTombstones, endOffsets);
QueryLogger.info("Streaming stream pull query results '{}' from earliest to " + endOffsets, statement.getStatementText());
return new StreamPullQueryMetadata(transientQueryMetadata, endOffsets);
}
use of io.confluent.ksql.util.TransientQueryMetadata in project ksql by confluentinc.
the class QueryRegistryImplTest method givenStreamPull.
private TransientQueryMetadata givenStreamPull(final QueryRegistry registry, final String id) {
final QueryId queryId = new QueryId(id);
final TransientQueryMetadata query = mock(TransientQueryMetadata.class);
when(query.getQueryId()).thenReturn(queryId);
when(queryBuilder.buildTransientQuery(any(), any(), any(), any(), any(), any(), any(), any(), anyBoolean(), any(), any(), any(), any())).thenReturn(query);
when(query.isInitialized()).thenReturn(true);
registry.createStreamPullQuery(config, serviceContext, logContext, metaStore, "sql", queryId, ImmutableSet.of(SourceName.of("some-source")), mock(ExecutionStep.class), "plan-summary", mock(LogicalSchema.class), OptionalInt.of(123), Optional.empty(), false, ImmutableMap.<TopicPartition, Long>builder().build());
return query;
}
Aggregations