use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class KsqlEngineTest method shouldHandleMultipleStatements.
@SuppressWarnings("unchecked")
@Test
public void shouldHandleMultipleStatements() {
// Given:
final String sql = "" + "-- single line comment\n" + "/*\n" + " Multi-line comment\n" + "*/\n" + "CREATE STREAM S0 (a INT, b VARCHAR) " + " WITH (kafka_topic='s0_topic', value_format='DELIMITED', key_format='KAFKA');\n" + "\n" + "CREATE TABLE T1 (f0 BIGINT PRIMARY KEY, f1 DOUBLE) " + " WITH (kafka_topic='t1_topic', value_format='JSON', key_format='KAFKA');\n" + "\n" + "CREATE STREAM S1 AS SELECT * FROM S0;\n" + "\n" + "CREATE STREAM S2 AS SELECT * FROM S0;\n" + "\n" + "DROP TABLE T1;";
givenTopicsExist("s0_topic", "t1_topic");
final List<QueryMetadata> queries = new ArrayList<>();
// When:
final List<PreparedStatement<?>> preparedStatements = ksqlEngine.parse(sql).stream().map(stmt -> {
final PreparedStatement<?> prepared = ksqlEngine.prepare(stmt);
final ExecuteResult result = ksqlEngine.execute(serviceContext, ConfiguredStatement.of(prepared, SessionConfig.of(ksqlConfig, new HashMap<>())));
result.getQuery().ifPresent(queries::add);
return prepared;
}).collect(Collectors.toList());
// Then:
final List<?> statements = preparedStatements.stream().map(PreparedStatement::getStatement).collect(Collectors.toList());
assertThat(statements, contains(instanceOf(CreateStream.class), instanceOf(CreateTable.class), instanceOf(CreateStreamAsSelect.class), instanceOf(CreateStreamAsSelect.class), instanceOf(DropTable.class)));
assertThat(queries, hasSize(2));
}
use of io.confluent.ksql.parser.KsqlParser.PreparedStatement in project ksql by confluentinc.
the class KsqlTesterTest method execute.
@SuppressWarnings("unchecked")
private void execute(final ParsedStatement parsedStatement) {
final PreparedStatement<?> engineStatement = engine.prepare(parsedStatement);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(engineStatement, SessionConfig.of(config, overrides));
createTopics(engineStatement);
if (engineStatement.getStatement() instanceof InsertValues) {
pipeInput((ConfiguredStatement<InsertValues>) configured);
return;
} else if (engineStatement.getStatement() instanceof SetProperty) {
PropertyOverrider.set((ConfiguredStatement<SetProperty>) configured, overrides);
return;
} else if (engineStatement.getStatement() instanceof UnsetProperty) {
PropertyOverrider.unset((ConfiguredStatement<UnsetProperty>) configured, overrides);
return;
}
final ConfiguredStatement<?> injected = formatInjector.inject(configured);
final ExecuteResult result = engine.execute(serviceContext, injected);
// is DDL statement
if (!result.getQuery().isPresent()) {
return;
}
final PersistentQueryMetadata query = (PersistentQueryMetadata) result.getQuery().get();
final Topology topology = query.getTopology();
final Properties properties = new Properties();
properties.putAll(query.getStreamsProperties());
properties.put(StreamsConfig.STATE_DIR_CONFIG, tmpFolder.getRoot().getAbsolutePath());
final TopologyTestDriver driver = new TopologyTestDriver(topology, properties);
final List<TopicInfo> inputTopics = query.getSourceNames().stream().map(sn -> engine.getMetaStore().getSource(sn)).map(ds -> new TopicInfo(ds.getKafkaTopicName(), keySerde(ds), valueSerde(ds))).collect(Collectors.toList());
// Sink may be Optional for source tables. Once source table query execution is supported, then
// we would need have a condition to not create an output topic info
final DataSource output = engine.getMetaStore().getSource(query.getSinkName().get());
final TopicInfo outputInfo = new TopicInfo(output.getKafkaTopicName(), keySerde(output), valueSerde(output));
driverPipeline.addDriver(driver, inputTopics, outputInfo);
drivers.put(query.getQueryId(), new DriverAndProperties(driver, properties));
}
Aggregations