use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class AstSanitizerTest method givenQuery.
private static Statement givenQuery(final String sql) {
final List<ParsedStatement> statements = new DefaultKsqlParser().parse(sql);
assertThat(statements, hasSize(1));
return new AstBuilder(META_STORE).buildStatement(statements.get(0).getStatement());
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class KsqlEngineTest method shouldFailDDLStatementIfTopicDoesNotExist.
@Test
public void shouldFailDDLStatementIfTopicDoesNotExist() {
// Given:
final ParsedStatement stmt = parse("CREATE STREAM S1_NOTEXIST (COL1 BIGINT, COL2 VARCHAR) " + "WITH (KAFKA_TOPIC = 'S1_NOTEXIST', VALUE_FORMAT = 'JSON', KEY_FORMAT = 'KAFKA');").get(0);
final PreparedStatement<?> prepared = prepare(stmt);
// When:
final Exception e = assertThrows(KsqlStatementException.class, () -> sandbox.execute(sandboxServiceContext, ConfiguredStatement.of(prepared, SessionConfig.of(ksqlConfig, Collections.emptyMap()))));
// Then:
assertThat(e.getMessage(), containsString("Kafka topic does not exist: S1_NOTEXIST"));
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class KsqlEngineTest method shouldThrowWhenPreparingUnknownSource.
@Test
public void shouldThrowWhenPreparingUnknownSource() {
// Given:
final ParsedStatement stmt = ksqlEngine.parse("CREATE STREAM FOO AS SELECT * FROM UNKNOWN;").get(0);
// When:
final KsqlStatementException e = assertThrows(KsqlStatementException.class, () -> ksqlEngine.prepare(stmt));
// Then:
assertThat(e.getMessage(), containsString("UNKNOWN does not exist."));
assertThat(e, statementText(is("CREATE STREAM FOO AS SELECT * FROM UNKNOWN;")));
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class KsqlEngineTest method shouldExecuteInsertIntoStreamOnSandBox.
@Test
public void shouldExecuteInsertIntoStreamOnSandBox() {
// Given:
final List<ParsedStatement> statements = parse("create stream bar as select * from orders;" + "insert into bar select * from orders;");
givenStatementAlreadyExecuted(statements.get(0));
// When:
final ExecuteResult result = sandbox.execute(sandboxServiceContext, ConfiguredStatement.of(sandbox.prepare(statements.get(1)), SessionConfig.of(ksqlConfig, Collections.emptyMap())));
// Then:
assertThat(result.getQuery(), is(not(Optional.empty())));
}
use of io.confluent.ksql.parser.KsqlParser.ParsedStatement in project ksql by confluentinc.
the class TestCaseBuilderUtil method createTopicFromStatement.
private static Topic createTopicFromStatement(final String sql, final MutableMetaStore metaStore, final KsqlConfig ksqlConfig) {
final KsqlParser parser = new DefaultKsqlParser();
final Function<ConfiguredStatement<?>, Topic> extractTopic = (ConfiguredStatement<?> stmt) -> {
final CreateSource statement = (CreateSource) stmt.getStatement();
final CreateSourceProperties props = statement.getProperties();
final LogicalSchema logicalSchema = statement.getElements().toLogicalSchema();
final FormatInfo keyFormatInfo = SourcePropertiesUtil.getKeyFormat(props, statement.getName());
final Format keyFormat = FormatFactory.fromName(keyFormatInfo.getFormat());
final SerdeFeatures keySerdeFeats = buildKeyFeatures(keyFormat, logicalSchema);
final Optional<ParsedSchema> keySchema = keyFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.key(), keyFormatInfo, keyFormat, keySerdeFeats) : Optional.empty();
final FormatInfo valFormatInfo = SourcePropertiesUtil.getValueFormat(props);
final Format valFormat = FormatFactory.fromName(valFormatInfo.getFormat());
final SerdeFeatures valSerdeFeats = buildValueFeatures(ksqlConfig, props, valFormat, logicalSchema);
final Optional<ParsedSchema> valueSchema = valFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.value(), valFormatInfo, valFormat, valSerdeFeats) : Optional.empty();
final int partitions = props.getPartitions().orElse(Topic.DEFAULT_PARTITIONS);
final short rf = props.getReplicas().orElse(Topic.DEFAULT_RF);
return new Topic(props.getKafkaTopic(), partitions, rf, keySchema, valueSchema, keySerdeFeats, valSerdeFeats);
};
try {
final List<ParsedStatement> parsed = parser.parse(sql);
if (parsed.size() > 1) {
throw new IllegalArgumentException("SQL contains more than one statement: " + sql);
}
final List<Topic> topics = new ArrayList<>();
for (ParsedStatement stmt : parsed) {
// in order to extract the topics, we may need to also register type statements
if (stmt.getStatement().statement() instanceof SqlBaseParser.RegisterTypeContext) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
registerType(prepare, metaStore);
}
if (isCsOrCT(stmt)) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(prepare, SessionConfig.of(ksqlConfig, Collections.emptyMap()));
final ConfiguredStatement<?> withFormats = new DefaultFormatInjector().inject(configured);
topics.add(extractTopic.apply(withFormats));
}
}
return topics.isEmpty() ? null : topics.get(0);
} catch (final Exception e) {
// Statement won't parse: this will be detected/handled later.
System.out.println("Error parsing statement (which may be expected): " + sql);
e.printStackTrace(System.out);
return null;
}
}
Aggregations