use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class ScalablePushQueryExecutionUtil method findQuery.
static PersistentQueryMetadata findQuery(final EngineContext engineContext, final ImmutableAnalysis analysis) {
final DataSource source = analysis.getFrom().getDataSource();
final SourceName sourceName = source.getName();
final Set<QueryId> queries = engineContext.getQueryRegistry().getQueriesWithSink(sourceName);
if (queries.isEmpty()) {
throw new IllegalStateException("Scalable push queries require a query that has a sink. " + "Source Name: " + sourceName);
}
if (queries.size() > 1) {
throw new IllegalStateException("Scalable push queries only work on sources that have a single writer query. " + "Source Name: " + sourceName + " Queries: " + queries);
}
final QueryId queryId = Iterables.getOnlyElement(queries);
return engineContext.getQueryRegistry().getPersistentQuery(queryId).orElseThrow(() -> new KsqlException("Persistent query has been stopped: " + queryId));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class EngineExecutor method validateExistingSink.
private void validateExistingSink(final KsqlStructuredDataOutputNode outputNode) {
final SourceName name = outputNode.getSinkName().get();
final DataSource existing = engineContext.getMetaStore().getSource(name);
if (existing == null) {
throw new KsqlException(String.format("%s does not exist.", outputNode));
}
if (existing.getDataSourceType() != outputNode.getNodeOutputType()) {
throw new KsqlException(String.format("Incompatible data sink and query result. Data sink" + " (%s) type is %s but select query result is %s.", name.text(), existing.getDataSourceType(), outputNode.getNodeOutputType()));
}
final LogicalSchema resultSchema = outputNode.getSchema();
final LogicalSchema existingSchema = existing.getSchema();
if (!resultSchema.compatibleSchema(existingSchema)) {
throw new KsqlException("Incompatible schema between results and sink." + System.lineSeparator() + "Result schema is " + resultSchema + System.lineSeparator() + "Sink schema is " + existingSchema);
}
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class AlterSourceFactory method create.
public AlterSourceCommand create(final AlterSource statement) {
final DataSource dataSource = metaStore.getSource(statement.getName());
final String dataSourceType = statement.getDataSourceType().getKsqlType();
if (dataSource != null && dataSource.isSource()) {
throw new KsqlException(String.format("Cannot alter %s '%s': ALTER operations are not supported on source %s.", dataSourceType.toLowerCase(), statement.getName().text(), dataSourceType.toLowerCase() + "s"));
}
final List<Column> newColumns = statement.getAlterOptions().stream().map(alterOption -> Column.of(ColumnName.of(alterOption.getColumnName()), alterOption.getType().getSqlType(), Namespace.VALUE, 0)).collect(Collectors.toList());
return new AlterSourceCommand(statement.getName(), dataSourceType, newColumns);
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class CreateSourceFactory method createStreamCommand.
// This method is called by simple CREATE statements
public CreateStreamCommand createStreamCommand(final CreateStream statement, final KsqlConfig ksqlConfig) {
final SourceName sourceName = statement.getName();
final CreateSourceProperties props = statement.getProperties();
final String topicName = ensureTopicExists(props, serviceContext);
final LogicalSchema schema = buildSchema(statement.getElements(), ksqlConfig);
final Optional<TimestampColumn> timestampColumn = buildTimestampColumn(ksqlConfig, props, schema);
final DataSource dataSource = metaStore.getSource(sourceName);
if (dataSource != null && !statement.isOrReplace() && !statement.isNotExists()) {
final String sourceType = dataSource.getDataSourceType().getKsqlType();
throw new KsqlException(String.format("Cannot add stream '%s': A %s with the same name already exists", sourceName.text(), sourceType.toLowerCase()));
}
throwIfCreateOrReplaceOnSourceStreamOrTable(statement, dataSource);
return new CreateStreamCommand(sourceName, schema, timestampColumn, topicName, buildFormats(statement.getName(), schema, props, ksqlConfig), getWindowInfo(props), Optional.of(statement.isOrReplace()), Optional.of(statement.isSource()));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class CreateSourceFactory method createTableCommand.
// This method is called by simple CREATE statements
public CreateTableCommand createTableCommand(final CreateTable statement, final KsqlConfig ksqlConfig) {
final SourceName sourceName = statement.getName();
final CreateSourceProperties props = statement.getProperties();
final String topicName = ensureTopicExists(props, serviceContext);
final LogicalSchema schema = buildSchema(statement.getElements(), ksqlConfig);
final DataSource dataSource = metaStore.getSource(sourceName);
if (dataSource != null && !statement.isOrReplace() && !statement.isNotExists()) {
final String sourceType = dataSource.getDataSourceType().getKsqlType();
throw new KsqlException(String.format("Cannot add table '%s': A %s with the same name already exists", sourceName.text(), sourceType.toLowerCase()));
}
if (schema.key().isEmpty()) {
final boolean usingSchemaInference = props.getValueSchemaId().isPresent();
final String additional = usingSchemaInference ? System.lineSeparator() + "Use a partial schema to define the primary key and still load the value columns from " + "the Schema Registry, for example:" + System.lineSeparator() + "\tCREATE TABLE " + sourceName.text() + " (ID INT PRIMARY KEY) WITH (...);" : "";
throw new KsqlException("Tables require a PRIMARY KEY. Please define the PRIMARY KEY." + additional);
}
throwIfCreateOrReplaceOnSourceStreamOrTable(statement, dataSource);
final Optional<TimestampColumn> timestampColumn = buildTimestampColumn(ksqlConfig, props, schema);
return new CreateTableCommand(sourceName, schema, timestampColumn, topicName, buildFormats(statement.getName(), schema, props, ksqlConfig), getWindowInfo(props), Optional.of(statement.isOrReplace()), Optional.of(statement.isSource()));
}
Aggregations