use of io.confluent.ksql.schema.ksql.inference.TopicSchemaSupplier.SchemaAndId in project ksql by confluentinc.
the class DefaultSchemaInjector method buildElements.
private static TableElements buildElements(final ConfiguredStatement<CreateSource> preparedStatement, final Optional<SchemaAndId> keySchema, final Optional<SchemaAndId> valueSchema) {
final List<TableElement> elements = new ArrayList<>();
if (keySchema.isPresent()) {
final ColumnConstraints constraints = getKeyConstraints(preparedStatement.getStatement());
keySchema.get().columns.stream().map(col -> new TableElement(col.name(), new Type(col.type()), constraints)).forEach(elements::add);
} else {
getKeyColumns(preparedStatement).forEach(elements::add);
}
if (valueSchema.isPresent()) {
valueSchema.get().columns.stream().map(col -> new TableElement(col.name(), new Type(col.type()))).forEach(elements::add);
} else {
getValueColumns(preparedStatement).forEach(elements::add);
}
return TableElements.of(elements);
}
use of io.confluent.ksql.schema.ksql.inference.TopicSchemaSupplier.SchemaAndId in project ksql by confluentinc.
the class DefaultSchemaInjector method forCreateStatement.
private Optional<ConfiguredStatement<CreateSource>> forCreateStatement(final ConfiguredStatement<CreateSource> statement) {
final Optional<SchemaAndId> keySchema = getKeySchema(statement);
final Optional<SchemaAndId> valueSchema = getValueSchema(statement);
if (!keySchema.isPresent() && !valueSchema.isPresent()) {
return Optional.empty();
}
final CreateSource withSchema = addSchemaFields(statement, keySchema, valueSchema);
final PreparedStatement<CreateSource> prepared = buildPreparedStatement(withSchema);
final ImmutableMap.Builder<String, Object> overrideBuilder = ImmutableMap.builder();
// Only store raw schema if schema id is provided by user
if (withSchema.getProperties().getKeySchemaId().isPresent()) {
keySchema.map(schemaAndId -> overrideBuilder.put(CommonCreateConfigs.KEY_SCHEMA_ID, schemaAndId));
}
if (withSchema.getProperties().getValueSchemaId().isPresent()) {
valueSchema.map(schemaAndId -> overrideBuilder.put(CommonCreateConfigs.VALUE_SCHEMA_ID, schemaAndId));
}
final ConfiguredStatement<CreateSource> configured = ConfiguredStatement.of(prepared, statement.getSessionConfig().copyWith(overrideBuilder.build()));
return Optional.of(configured);
}
use of io.confluent.ksql.schema.ksql.inference.TopicSchemaSupplier.SchemaAndId in project ksql by confluentinc.
the class DefaultSchemaInjector method getCreateAsKeySchema.
private Optional<SchemaAndId> getCreateAsKeySchema(final ConfiguredStatement<CreateAsSelect> statement, final CreateSourceCommand createSourceCommand) {
final CreateAsSelect csStmt = statement.getStatement();
final CreateSourceAsProperties props = csStmt.getProperties();
final FormatInfo keyFormat = createSourceCommand.getFormats().getKeyFormat();
if (!shouldInferSchema(props.getKeySchemaId(), statement, keyFormat, true)) {
return Optional.empty();
}
// until we support user-configuration of single key wrapping/unwrapping, we choose
// to have key schema inference always result in an unwrapped key
final SerdeFeatures serdeFeatures = SerdeFeaturesFactory.buildKeyFeatures(FormatFactory.of(keyFormat), true);
if (!shouldInferSchema(props.getKeySchemaId(), statement, keyFormat, true)) {
return Optional.empty();
}
final SchemaAndId schemaAndId = getSchema(props.getKafkaTopic(), props.getKeySchemaId(), keyFormat, serdeFeatures, statement.getStatementText(), true);
final List<Column> tableColumns = createSourceCommand.getSchema().key();
checkColumnsCompatibility(props.getKeySchemaId(), tableColumns, schemaAndId.columns, true);
return Optional.of(schemaAndId);
}
use of io.confluent.ksql.schema.ksql.inference.TopicSchemaSupplier.SchemaAndId in project ksql by confluentinc.
the class DefaultSchemaInjector method getCreateAsValueSchema.
private Optional<SchemaAndId> getCreateAsValueSchema(final ConfiguredStatement<CreateAsSelect> statement, final CreateSourceCommand createSourceCommand) {
final CreateAsSelect csStmt = statement.getStatement();
final CreateSourceAsProperties props = csStmt.getProperties();
final FormatInfo valueFormat = createSourceCommand.getFormats().getValueFormat();
if (!shouldInferSchema(props.getValueSchemaId(), statement, valueFormat, false)) {
return Optional.empty();
}
final SchemaAndId schemaAndId = getSchema(props.getKafkaTopic(), props.getValueSchemaId(), valueFormat, createSourceCommand.getFormats().getValueFeatures(), statement.getStatementText(), false);
final List<Column> tableColumns = createSourceCommand.getSchema().value();
checkColumnsCompatibility(props.getValueSchemaId(), tableColumns, schemaAndId.columns, false);
return Optional.of(schemaAndId);
}
use of io.confluent.ksql.schema.ksql.inference.TopicSchemaSupplier.SchemaAndId in project ksql by confluentinc.
the class DefaultSchemaInjector method forCreateAsStatement.
private Optional<ConfiguredStatement<CreateAsSelect>> forCreateAsStatement(final ConfiguredStatement<CreateAsSelect> statement) {
final CreateAsSelect csStmt = statement.getStatement();
final CreateSourceAsProperties properties = csStmt.getProperties();
// Don't need to inject schema if no key schema id and value schema id
if (!properties.getKeySchemaId().isPresent() && !properties.getValueSchemaId().isPresent()) {
return Optional.empty();
}
final CreateSourceCommand createSourceCommand;
try {
final ServiceContext sandboxServiceContext = SandboxedServiceContext.create(serviceContext);
createSourceCommand = (CreateSourceCommand) executionContext.createSandbox(sandboxServiceContext).plan(sandboxServiceContext, statement).getDdlCommand().get();
} catch (final Exception e) {
throw new KsqlStatementException("Could not determine output schema for query due to error: " + e.getMessage(), statement.getStatementText(), e);
}
final Optional<SchemaAndId> keySchema = getCreateAsKeySchema(statement, createSourceCommand);
final Optional<SchemaAndId> valueSchema = getCreateAsValueSchema(statement, createSourceCommand);
final CreateAsSelect withSchema = addSchemaFieldsCas(statement, keySchema, valueSchema);
final PreparedStatement<CreateAsSelect> prepared = buildPreparedStatement(withSchema);
final ImmutableMap.Builder<String, Object> overrideBuilder = ImmutableMap.builder();
// Only store raw schema if schema id is provided by user
if (properties.getKeySchemaId().isPresent()) {
keySchema.map(schemaAndId -> overrideBuilder.put(CommonCreateConfigs.KEY_SCHEMA_ID, schemaAndId));
}
if (properties.getValueSchemaId().isPresent()) {
valueSchema.map(schemaAndId -> overrideBuilder.put(CommonCreateConfigs.VALUE_SCHEMA_ID, schemaAndId));
}
final ConfiguredStatement<CreateAsSelect> configured = ConfiguredStatement.of(prepared, statement.getSessionConfig().copyWith(overrideBuilder.build()));
return Optional.of(configured);
}
Aggregations