use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class KsqlTesterTest method execute.
@SuppressWarnings("unchecked")
private void execute(final ParsedStatement parsedStatement) {
final PreparedStatement<?> engineStatement = engine.prepare(parsedStatement);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(engineStatement, SessionConfig.of(config, overrides));
createTopics(engineStatement);
if (engineStatement.getStatement() instanceof InsertValues) {
pipeInput((ConfiguredStatement<InsertValues>) configured);
return;
} else if (engineStatement.getStatement() instanceof SetProperty) {
PropertyOverrider.set((ConfiguredStatement<SetProperty>) configured, overrides);
return;
} else if (engineStatement.getStatement() instanceof UnsetProperty) {
PropertyOverrider.unset((ConfiguredStatement<UnsetProperty>) configured, overrides);
return;
}
final ConfiguredStatement<?> injected = formatInjector.inject(configured);
final ExecuteResult result = engine.execute(serviceContext, injected);
// is DDL statement
if (!result.getQuery().isPresent()) {
return;
}
final PersistentQueryMetadata query = (PersistentQueryMetadata) result.getQuery().get();
final Topology topology = query.getTopology();
final Properties properties = new Properties();
properties.putAll(query.getStreamsProperties());
properties.put(StreamsConfig.STATE_DIR_CONFIG, tmpFolder.getRoot().getAbsolutePath());
final TopologyTestDriver driver = new TopologyTestDriver(topology, properties);
final List<TopicInfo> inputTopics = query.getSourceNames().stream().map(sn -> engine.getMetaStore().getSource(sn)).map(ds -> new TopicInfo(ds.getKafkaTopicName(), keySerde(ds), valueSerde(ds))).collect(Collectors.toList());
// Sink may be Optional for source tables. Once source table query execution is supported, then
// we would need have a condition to not create an output topic info
final DataSource output = engine.getMetaStore().getSource(query.getSinkName().get());
final TopicInfo outputInfo = new TopicInfo(output.getKafkaTopicName(), keySerde(output), valueSerde(output));
driverPipeline.addDriver(driver, inputTopics, outputInfo);
drivers.put(query.getQueryId(), new DriverAndProperties(driver, properties));
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class ExpressionParseTestUtil method parseExpression.
public static Expression parseExpression(final String asText, final MetaStore metaStore) {
final KsqlParser parser = new DefaultKsqlParser();
final String ksql = String.format("SELECT %s FROM test1;", asText);
final ParsedStatement parsedStatement = parser.parse(ksql).get(0);
final PreparedStatement preparedStatement = parser.prepare(parsedStatement, metaStore);
final SingleColumn singleColumn = (SingleColumn) ((Query) preparedStatement.getStatement()).getSelect().getSelectItems().get(0);
return singleColumn.getExpression();
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class RequestHandlerTest method shouldUseCustomExecutor.
@Test
public void shouldUseCustomExecutor() {
// Given
final KsqlEntity entity = mock(KsqlEntity.class);
final StatementExecutor<CreateStream> customExecutor = givenReturningExecutor(CreateStream.class, entity);
givenRequestHandler(ImmutableMap.of(CreateStream.class, customExecutor));
// When
final List<ParsedStatement> statements = KSQL_PARSER.parse(SOME_STREAM_SQL);
final KsqlEntityList entities = handler.execute(securityContext, statements, sessionProperties);
// Then
assertThat(entities, contains(entity));
verify(customExecutor, times(1)).execute(argThat(is(configured(preparedStatement(instanceOf(CreateStream.class)), ImmutableMap.of(), ksqlConfig))), eq(sessionProperties), eq(ksqlEngine), eq(serviceContext));
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class RequestHandlerTest method shouldDistributeProperties.
@Test
public void shouldDistributeProperties() {
// Given
givenRequestHandler(ImmutableMap.of());
when(sessionProperties.getMutableScopedProperties()).thenReturn(ImmutableMap.of("x", "y"));
// When
final List<ParsedStatement> statements = KSQL_PARSER.parse(SOME_STREAM_SQL);
final KsqlEntityList entities = handler.execute(securityContext, statements, sessionProperties);
// Then
assertThat(entities, contains(entity));
verify(distributor, times(2)).execute(argThat(is(configured(preparedStatement(instanceOf(CreateStream.class)), ImmutableMap.of("x", "y"), ksqlConfig))), eq(ksqlEngine), eq(securityContext));
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class RequestValidatorTest method shouldThrowIfNoValidatorAvailable.
@Test
public void shouldThrowIfNoValidatorAvailable() {
// Given:
final List<ParsedStatement> statements = givenParsed("EXPLAIN X;");
// When:
final Exception e = assertThrows(KsqlStatementException.class, () -> validator.validate(serviceContext, statements, sessionProperties, "sql"));
// Then:
assertThat(e.getMessage(), containsString("Do not know how to validate statement"));
}
Aggregations