use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class StreamedQueryResource method handleStatement.
@SuppressWarnings("unchecked")
private EndpointResponse handleStatement(final KsqlSecurityContext securityContext, final KsqlRequest request, final PreparedStatement<?> statement, final CompletableFuture<Void> connectionClosedFuture, final Optional<Boolean> isInternalRequest, final MetricsCallbackHolder metricsCallbackHolder, final Context context) {
try {
authorizationValidator.ifPresent(validator -> validator.checkAuthorization(securityContext, ksqlEngine.getMetaStore(), statement.getStatement()));
final Map<String, Object> configProperties = request.getConfigOverrides();
denyListPropertyValidator.validateAll(configProperties);
if (statement.getStatement() instanceof Query) {
if (shouldMigrateToQueryStream(request.getConfigOverrides())) {
return EndpointResponse.ok(new NextHandlerOutput());
}
final QueryMetadataHolder queryMetadataHolder = queryExecutor.handleStatement(securityContext.getServiceContext(), request.getConfigOverrides(), request.getRequestProperties(), statement, isInternalRequest, metricsCallbackHolder, context, false);
return handleQuery((PreparedStatement<Query>) statement, connectionClosedFuture, queryMetadataHolder);
} else if (statement.getStatement() instanceof PrintTopic) {
return handlePrintTopic(securityContext.getServiceContext(), configProperties, (PreparedStatement<PrintTopic>) statement, connectionClosedFuture);
} else {
return Errors.badRequest(String.format("Statement type `%s' not supported for this resource", statement.getClass().getName()));
}
} catch (final TopicAuthorizationException e) {
return errorHandler.accessDeniedFromKafkaResponse(e);
} catch (final KsqlStatementException e) {
return Errors.badStatement(e.getRawMessage(), e.getSqlStatement());
} catch (final KsqlException e) {
return errorHandler.generateResponse(e, Errors.badRequest(e));
}
}
use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class DefaultSchemaInjector method buildElements.
private static TableElements buildElements(final ConfiguredStatement<CreateSource> preparedStatement, final Optional<SchemaAndId> keySchema, final Optional<SchemaAndId> valueSchema) {
final List<TableElement> elements = new ArrayList<>();
if (keySchema.isPresent()) {
final ColumnConstraints constraints = getKeyConstraints(preparedStatement.getStatement());
keySchema.get().columns.stream().map(col -> new TableElement(col.name(), new Type(col.type()), constraints)).forEach(elements::add);
} else {
getKeyColumns(preparedStatement).forEach(elements::add);
}
if (valueSchema.isPresent()) {
valueSchema.get().columns.stream().map(col -> new TableElement(col.name(), new Type(col.type()))).forEach(elements::add);
} else {
getValueColumns(preparedStatement).forEach(elements::add);
}
return TableElements.of(elements);
}
use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class DistributingExecutorTest method shouldThrowExceptionWhenInsertIntoReadOnlyTopic.
@Test
public void shouldThrowExceptionWhenInsertIntoReadOnlyTopic() {
// Given
final PreparedStatement<Statement> preparedStatement = PreparedStatement.of("", new InsertInto(SourceName.of("s1"), mock(Query.class)));
final ConfiguredStatement<Statement> configured = ConfiguredStatement.of(preparedStatement, SessionConfig.of(KSQL_CONFIG, ImmutableMap.of()));
final DataSource dataSource = mock(DataSource.class);
doReturn(dataSource).when(metaStore).getSource(SourceName.of("s1"));
when(dataSource.getKafkaTopicName()).thenReturn("_confluent-ksql-default__command-topic");
// When:
final Exception e = assertThrows(KsqlException.class, () -> distributor.execute(configured, executionContext, mock(KsqlSecurityContext.class)));
// Then:
assertThat(e.getMessage(), containsString("Cannot insert into read-only topic: " + "_confluent-ksql-default__command-topic"));
}
use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class DistributingExecutorTest method shouldThrowExceptionWhenInsertIntoProcessingLogTopic.
@Test
public void shouldThrowExceptionWhenInsertIntoProcessingLogTopic() {
// Given
final PreparedStatement<Statement> preparedStatement = PreparedStatement.of("", new InsertInto(SourceName.of("s1"), mock(Query.class)));
final ConfiguredStatement<Statement> configured = ConfiguredStatement.of(preparedStatement, SessionConfig.of(KSQL_CONFIG, ImmutableMap.of()));
final DataSource dataSource = mock(DataSource.class);
doReturn(dataSource).when(metaStore).getSource(SourceName.of("s1"));
when(dataSource.getKafkaTopicName()).thenReturn("default_ksql_processing_log");
// When:
final Exception e = assertThrows(KsqlException.class, () -> distributor.execute(configured, executionContext, mock(KsqlSecurityContext.class)));
// Then:
assertThat(e.getMessage(), containsString("Cannot insert into read-only topic: " + "default_ksql_processing_log"));
}
use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class KsqlParserTest method shouldParseSimpleComment.
@Test
public void shouldParseSimpleComment() {
final String statementString = "--this is a comment.\n" + "SHOW STREAMS;";
final List<PreparedStatement<?>> statements = KsqlParserTestUtil.buildAst(statementString, metaStore);
assertThat(statements, hasSize(1));
assertThat(statements.get(0).getStatement(), is(instanceOf(ListStreams.class)));
}
Aggregations