use of io.confluent.ksql.parser.DefaultKsqlParser in project ksql by confluentinc.
the class SqlTestReader method handleRunScript.
private TestStatement handleRunScript() {
final String script = ParserUtil.unquote(testStatement.runScript().STRING().getText(), "'");
final List<String> lines;
try {
if (getClass().getResource(script) != null) {
lines = Files.readAllLines(Paths.get(getClass().getResource(script).getPath()));
} else {
lines = Files.readAllLines(Paths.get(script));
}
} catch (IOException e) {
throw new IllegalArgumentException("Could not read " + script, e);
}
try {
cachedRunScript.addAll(new DefaultKsqlParser().parse(String.join("\n", lines)));
} catch (final ParseFailedException e) {
throw new ParseFailedException("Failed to parse contents of RUN SCRIPT", "RUN SCRIPT '" + script + "';", e);
}
if (cachedRunScript.isEmpty()) {
throw new IllegalArgumentException("Empty run script: " + script);
}
return TestStatement.of(cachedRunScript.removeFirst());
}
use of io.confluent.ksql.parser.DefaultKsqlParser in project ksql by confluentinc.
the class KsqlTestingTool method getSqlStatements.
private static List<String> getSqlStatements(final Path queryFilePath) {
try {
final String sqlStatements = new String(Files.readAllBytes(queryFilePath), UTF_8);
final KsqlParser ksqlParser = new DefaultKsqlParser();
final List<ParsedStatement> parsedStatements = ksqlParser.parse(sqlStatements);
return parsedStatements.stream().map(ParsedStatement::getStatementText).collect(Collectors.toList());
} catch (final IOException e) {
throw new KsqlException(String.format("Could not read the query file: %s. Details: %s", queryFilePath, e.getMessage()), e);
}
}
use of io.confluent.ksql.parser.DefaultKsqlParser in project ksql by confluentinc.
the class DataSourceExtractorTest method givenQuery.
private static AstNode givenQuery(final String sql) {
final List<ParsedStatement> statements = new DefaultKsqlParser().parse(sql);
assertThat(statements, hasSize(1));
return new AstBuilder(META_STORE).buildStatement(statements.get(0).getStatement());
}
use of io.confluent.ksql.parser.DefaultKsqlParser in project ksql by confluentinc.
the class AstSanitizerTest method givenQuery.
private static Statement givenQuery(final String sql) {
final List<ParsedStatement> statements = new DefaultKsqlParser().parse(sql);
assertThat(statements, hasSize(1));
return new AstBuilder(META_STORE).buildStatement(statements.get(0).getStatement());
}
use of io.confluent.ksql.parser.DefaultKsqlParser in project ksql by confluentinc.
the class TestCaseBuilderUtil method createTopicFromStatement.
private static Topic createTopicFromStatement(final String sql, final MutableMetaStore metaStore, final KsqlConfig ksqlConfig) {
final KsqlParser parser = new DefaultKsqlParser();
final Function<ConfiguredStatement<?>, Topic> extractTopic = (ConfiguredStatement<?> stmt) -> {
final CreateSource statement = (CreateSource) stmt.getStatement();
final CreateSourceProperties props = statement.getProperties();
final LogicalSchema logicalSchema = statement.getElements().toLogicalSchema();
final FormatInfo keyFormatInfo = SourcePropertiesUtil.getKeyFormat(props, statement.getName());
final Format keyFormat = FormatFactory.fromName(keyFormatInfo.getFormat());
final SerdeFeatures keySerdeFeats = buildKeyFeatures(keyFormat, logicalSchema);
final Optional<ParsedSchema> keySchema = keyFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.key(), keyFormatInfo, keyFormat, keySerdeFeats) : Optional.empty();
final FormatInfo valFormatInfo = SourcePropertiesUtil.getValueFormat(props);
final Format valFormat = FormatFactory.fromName(valFormatInfo.getFormat());
final SerdeFeatures valSerdeFeats = buildValueFeatures(ksqlConfig, props, valFormat, logicalSchema);
final Optional<ParsedSchema> valueSchema = valFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.value(), valFormatInfo, valFormat, valSerdeFeats) : Optional.empty();
final int partitions = props.getPartitions().orElse(Topic.DEFAULT_PARTITIONS);
final short rf = props.getReplicas().orElse(Topic.DEFAULT_RF);
return new Topic(props.getKafkaTopic(), partitions, rf, keySchema, valueSchema, keySerdeFeats, valSerdeFeats);
};
try {
final List<ParsedStatement> parsed = parser.parse(sql);
if (parsed.size() > 1) {
throw new IllegalArgumentException("SQL contains more than one statement: " + sql);
}
final List<Topic> topics = new ArrayList<>();
for (ParsedStatement stmt : parsed) {
// in order to extract the topics, we may need to also register type statements
if (stmt.getStatement().statement() instanceof SqlBaseParser.RegisterTypeContext) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
registerType(prepare, metaStore);
}
if (isCsOrCT(stmt)) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(prepare, SessionConfig.of(ksqlConfig, Collections.emptyMap()));
final ConfiguredStatement<?> withFormats = new DefaultFormatInjector().inject(configured);
topics.add(extractTopic.apply(withFormats));
}
}
return topics.isEmpty() ? null : topics.get(0);
} catch (final Exception e) {
// Statement won't parse: this will be detected/handled later.
System.out.println("Error parsing statement (which may be expected): " + sql);
e.printStackTrace(System.out);
return null;
}
}
Aggregations