use of io.confluent.ksql.parser.properties.with.CreateSourceProperties in project ksql by confluentinc.
the class SqlFormatterTest method shouldFormatCreateOrReplaceStreamStatement.
@Test
public void shouldFormatCreateOrReplaceStreamStatement() {
// Given:
final CreateSourceProperties props = CreateSourceProperties.from(new ImmutableMap.Builder<String, Literal>().putAll(SOME_WITH_PROPS.copyOfOriginalLiterals()).build());
final CreateStream createTable = new CreateStream(TEST, ELEMENTS_WITHOUT_KEY, true, false, props, false);
// When:
final String sql = SqlFormatter.formatSql(createTable);
// Then:
assertThat(sql, is("CREATE OR REPLACE STREAM TEST (`Foo` STRING, `Bar` STRING) " + "WITH (KAFKA_TOPIC='topic_test', VALUE_FORMAT='JSON');"));
}
use of io.confluent.ksql.parser.properties.with.CreateSourceProperties in project ksql by confluentinc.
the class TopicCreateInjector method injectForCreateSource.
private ConfiguredStatement<? extends CreateSource> injectForCreateSource(final ConfiguredStatement<? extends CreateSource> statement, final TopicProperties.Builder topicPropertiesBuilder) {
final CreateSource createSource = statement.getStatement();
final CreateSourceProperties properties = createSource.getProperties();
final String topicName = properties.getKafkaTopic();
if (topicClient.isTopicExists(topicName)) {
topicPropertiesBuilder.withSource(() -> topicClient.describeTopic(topicName));
} else if (!properties.getPartitions().isPresent()) {
final CreateSource example = createSource.copyWith(createSource.getElements(), properties.withPartitions(2));
throw new KsqlException("Topic '" + topicName + "' does not exist. If you want to create a new topic for the " + "stream/table please re-run the statement providing the required '" + CommonCreateConfigs.SOURCE_NUMBER_OF_PARTITIONS + "' configuration in the WITH " + "clause (and optionally '" + CommonCreateConfigs.SOURCE_NUMBER_OF_REPLICAS + "'). " + "For example: " + SqlFormatter.formatSql(example));
}
topicPropertiesBuilder.withName(topicName).withWithClause(Optional.of(properties.getKafkaTopic()), properties.getPartitions(), properties.getReplicas());
final String topicCleanUpPolicy = createSource instanceof CreateTable ? TopicConfig.CLEANUP_POLICY_COMPACT : TopicConfig.CLEANUP_POLICY_DELETE;
createTopic(topicPropertiesBuilder, topicCleanUpPolicy);
return statement;
}
use of io.confluent.ksql.parser.properties.with.CreateSourceProperties in project ksql by confluentinc.
the class DefaultSchemaInjector method getKeySchema.
private Optional<SchemaAndId> getKeySchema(final ConfiguredStatement<CreateSource> statement) {
final CreateSource csStmt = statement.getStatement();
final CreateSourceProperties props = csStmt.getProperties();
final FormatInfo keyFormat = SourcePropertiesUtil.getKeyFormat(props, csStmt.getName());
if (!shouldInferSchema(props.getKeySchemaId(), statement, keyFormat, true)) {
return Optional.empty();
}
return Optional.of(getSchema(Optional.of(props.getKafkaTopic()), props.getKeySchemaId(), keyFormat, // to have key schema inference always result in an unwrapped key
SerdeFeaturesFactory.buildKeyFeatures(FormatFactory.of(keyFormat), true), statement.getStatementText(), true));
}
use of io.confluent.ksql.parser.properties.with.CreateSourceProperties in project ksql by confluentinc.
the class DefaultSchemaInjectorTest method givenFormatsAndProps.
private void givenFormatsAndProps(final String keyFormat, final String valueFormat, final Map<String, Literal> additionalProps) {
final HashMap<String, Literal> props = new HashMap<>(BASE_PROPS);
if (keyFormat != null) {
props.put("KEY_FORMAT", new StringLiteral(keyFormat));
}
if (valueFormat != null) {
props.put("VALUE_FORMAT", new StringLiteral(valueFormat));
}
props.putAll(additionalProps);
final CreateSourceProperties csProps = CreateSourceProperties.from(props);
final CreateSourceAsProperties casProps = CreateSourceAsProperties.from(props);
when(cs.getProperties()).thenReturn(csProps);
when(ct.getProperties()).thenReturn(csProps);
when(csas.getProperties()).thenReturn(casProps);
when(ctas.getProperties()).thenReturn(casProps);
/*
when(csas.getSink()).thenReturn(
Sink.of(SourceName.of("csas"), true, false, casProps));
when(ctas.getSink()).thenReturn(
Sink.of(SourceName.of("ctas"), true, false, casProps));
*/
}
use of io.confluent.ksql.parser.properties.with.CreateSourceProperties in project ksql by confluentinc.
the class TestCaseBuilderUtil method createTopicFromStatement.
private static Topic createTopicFromStatement(final String sql, final MutableMetaStore metaStore, final KsqlConfig ksqlConfig) {
final KsqlParser parser = new DefaultKsqlParser();
final Function<ConfiguredStatement<?>, Topic> extractTopic = (ConfiguredStatement<?> stmt) -> {
final CreateSource statement = (CreateSource) stmt.getStatement();
final CreateSourceProperties props = statement.getProperties();
final LogicalSchema logicalSchema = statement.getElements().toLogicalSchema();
final FormatInfo keyFormatInfo = SourcePropertiesUtil.getKeyFormat(props, statement.getName());
final Format keyFormat = FormatFactory.fromName(keyFormatInfo.getFormat());
final SerdeFeatures keySerdeFeats = buildKeyFeatures(keyFormat, logicalSchema);
final Optional<ParsedSchema> keySchema = keyFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.key(), keyFormatInfo, keyFormat, keySerdeFeats) : Optional.empty();
final FormatInfo valFormatInfo = SourcePropertiesUtil.getValueFormat(props);
final Format valFormat = FormatFactory.fromName(valFormatInfo.getFormat());
final SerdeFeatures valSerdeFeats = buildValueFeatures(ksqlConfig, props, valFormat, logicalSchema);
final Optional<ParsedSchema> valueSchema = valFormat.supportsFeature(SerdeFeature.SCHEMA_INFERENCE) ? buildSchema(sql, logicalSchema.value(), valFormatInfo, valFormat, valSerdeFeats) : Optional.empty();
final int partitions = props.getPartitions().orElse(Topic.DEFAULT_PARTITIONS);
final short rf = props.getReplicas().orElse(Topic.DEFAULT_RF);
return new Topic(props.getKafkaTopic(), partitions, rf, keySchema, valueSchema, keySerdeFeats, valSerdeFeats);
};
try {
final List<ParsedStatement> parsed = parser.parse(sql);
if (parsed.size() > 1) {
throw new IllegalArgumentException("SQL contains more than one statement: " + sql);
}
final List<Topic> topics = new ArrayList<>();
for (ParsedStatement stmt : parsed) {
// in order to extract the topics, we may need to also register type statements
if (stmt.getStatement().statement() instanceof SqlBaseParser.RegisterTypeContext) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
registerType(prepare, metaStore);
}
if (isCsOrCT(stmt)) {
final PreparedStatement<?> prepare = parser.prepare(stmt, metaStore);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(prepare, SessionConfig.of(ksqlConfig, Collections.emptyMap()));
final ConfiguredStatement<?> withFormats = new DefaultFormatInjector().inject(configured);
topics.add(extractTopic.apply(withFormats));
}
}
return topics.isEmpty() ? null : topics.get(0);
} catch (final Exception e) {
// Statement won't parse: this will be detected/handled later.
System.out.println("Error parsing statement (which may be expected): " + sql);
e.printStackTrace(System.out);
return null;
}
}
Aggregations