use of io.confluent.ksql.parser.tree.CreateSource in project ksql by confluentinc.
the class DefaultFormatInjector method injectForCreateStatement.
private Optional<ConfiguredStatement<CreateSource>> injectForCreateStatement(final ConfiguredStatement<CreateSource> original) {
final CreateSource statement = original.getStatement();
final CreateSourceProperties properties = statement.getProperties();
final Optional<FormatInfo> keyFormat = properties.getKeyFormat(statement.getName());
final Optional<FormatInfo> valueFormat = properties.getValueFormat();
if (keyFormat.isPresent() && valueFormat.isPresent()) {
return Optional.empty();
}
final KsqlConfig config = getConfig(original);
final CreateSourceProperties injectedProps = properties.withFormats(keyFormat.map(FormatInfo::getFormat).orElseGet(() -> getDefaultKeyFormat(config)), valueFormat.map(FormatInfo::getFormat).orElseGet(() -> getDefaultValueFormat(config)));
final CreateSource withFormats = statement.copyWith(original.getStatement().getElements(), injectedProps);
final PreparedStatement<CreateSource> prepared = buildPreparedStatement(withFormats);
final ConfiguredStatement<CreateSource> configured = ConfiguredStatement.of(prepared, original.getSessionConfig());
return Optional.of(configured);
}
use of io.confluent.ksql.parser.tree.CreateSource in project ksql by confluentinc.
the class KsqlParserTest method testNegativeInWith.
@Test
public void testNegativeInWith() {
// When:
final CreateSource stmt = (CreateSource) KsqlParserTestUtil.buildSingleAst("CREATE STREAM foozball (id VARCHAR) WITH (kafka_topic='foozball', value_format='json', partitions=1, replicas=-1);", metaStore).getStatement();
// Then:
assertThat(stmt.getProperties().getReplicas(), is(Optional.of((short) -1)));
}
use of io.confluent.ksql.parser.tree.CreateSource in project ksql by confluentinc.
the class DefaultSchemaInjector method addSchemaFields.
private static CreateSource addSchemaFields(final ConfiguredStatement<CreateSource> preparedStatement, final Optional<SchemaAndId> keySchema, final Optional<SchemaAndId> valueSchema) {
final TableElements elements = buildElements(preparedStatement, keySchema, valueSchema);
final CreateSource statement = preparedStatement.getStatement();
final CreateSourceProperties properties = statement.getProperties();
final Optional<String> keySchemaName;
final Optional<String> valueSchemaName;
// Only populate key and value schema names when schema ids are explicitly provided
if (properties.getKeySchemaId().isPresent() && keySchema.isPresent()) {
keySchemaName = Optional.ofNullable(keySchema.get().rawSchema.name());
} else {
keySchemaName = Optional.empty();
}
if (properties.getValueSchemaId().isPresent() && valueSchema.isPresent()) {
valueSchemaName = Optional.ofNullable(valueSchema.get().rawSchema.name());
} else {
valueSchemaName = Optional.empty();
}
final CreateSourceProperties newProperties = statement.getProperties().withKeyValueSchemaName(keySchemaName, valueSchemaName);
return statement.copyWith(elements, newProperties);
}
use of io.confluent.ksql.parser.tree.CreateSource in project ksql by confluentinc.
the class DefaultSchemaInjector method buildElements.
private static TableElements buildElements(final ConfiguredStatement<CreateSource> preparedStatement, final Optional<SchemaAndId> keySchema, final Optional<SchemaAndId> valueSchema) {
final List<TableElement> elements = new ArrayList<>();
if (keySchema.isPresent()) {
final ColumnConstraints constraints = getKeyConstraints(preparedStatement.getStatement());
keySchema.get().columns.stream().map(col -> new TableElement(col.name(), new Type(col.type()), constraints)).forEach(elements::add);
} else {
getKeyColumns(preparedStatement).forEach(elements::add);
}
if (valueSchema.isPresent()) {
valueSchema.get().columns.stream().map(col -> new TableElement(col.name(), new Type(col.type()))).forEach(elements::add);
} else {
getValueColumns(preparedStatement).forEach(elements::add);
}
return TableElements.of(elements);
}
use of io.confluent.ksql.parser.tree.CreateSource in project ksql by confluentinc.
the class TestCaseBuilderUtil method createTopicFromStatement.
private static Topic createTopicFromStatement(final String sql, final MutableMetaStore metaStore, final KsqlConfig ksqlConfig) {
final KsqlParser parser = new DefaultKsqlParser();
final Function<ConfiguredStatement<?>, Topic> extractTopic = (ConfiguredStatement<?> stmt) -> {
final CreateSource statement = (CreateSource) stmt.getStatement();
final CreateSourceProperties props = statement.getProperties();
final LogicalSchema logicalSchema = statement.getElements().toLogicalSchema();
final FormatInfo keyFormatInfo = SourcePropertiesUtil.getKeyFormat(props, statement.getName());
final Format keyFormat = FormatFactory.fromName(keyFormatInfo.getFormat());
final SerdeFeatures keySerdeFeats = buildKeyFeatures(keyFormat, logicalSchema);
final Optional<ParsedSchema> keySchema = keyFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.key(), keyFormatInfo, keyFormat, keySerdeFeats) : Optional.empty();
final FormatInfo valFormatInfo = SourcePropertiesUtil.getValueFormat(props);
final Format valFormat = FormatFactory.fromName(valFormatInfo.getFormat());
final SerdeFeatures valSerdeFeats = buildValueFeatures(ksqlConfig, props, valFormat, logicalSchema);
final Optional<ParsedSchema> valueSchema = valFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.value(), valFormatInfo, valFormat, valSerdeFeats) : Optional.empty();
final int partitions = props.getPartitions().orElse(Topic.DEFAULT_PARTITIONS);
final short rf = props.getReplicas().orElse(Topic.DEFAULT_RF);
return new Topic(props.getKafkaTopic(), partitions, rf, keySchema, valueSchema, keySerdeFeats, valSerdeFeats);
};
try {
final List<ParsedStatement> parsed = parser.parse(sql);
if (parsed.size() > 1) {
throw new IllegalArgumentException("SQL contains more than one statement: " + sql);
}
final List<Topic> topics = new ArrayList<>();
for (ParsedStatement stmt : parsed) {
// in order to extract the topics, we may need to also register type statements
if (stmt.getStatement().statement() instanceof SqlBaseParser.RegisterTypeContext) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
registerType(prepare, metaStore);
}
if (isCsOrCT(stmt)) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(prepare, SessionConfig.of(ksqlConfig, Collections.emptyMap()));
final ConfiguredStatement<?> withFormats = new DefaultFormatInjector().inject(configured);
topics.add(extractTopic.apply(withFormats));
}
}
return topics.isEmpty() ? null : topics.get(0);
} catch (final Exception e) {
// Statement won't parse: this will be detected/handled later.
System.out.println("Error parsing statement (which may be expected): " + sql);
e.printStackTrace(System.out);
return null;
}
}
Aggregations