use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class SourceTopicsExtractor method visitAliasedRelation.
@Override
protected AstNode visitAliasedRelation(final AliasedRelation node, final Void context) {
final SourceName structuredDataSourceName = ((Table) node.getRelation()).getName();
final DataSource source = metaStore.getSource(structuredDataSourceName);
if (source == null) {
throw new KsqlException(structuredDataSourceName.text() + " does not exist.");
}
// This method is called first with the primary kafka topic (or the node.getFrom() node)
if (primarySourceTopic == null) {
primarySourceTopic = source.getKsqlTopic();
}
sourceTopics.add(source.getKsqlTopic());
return node;
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class TopicDeleteInjector method inject.
@SuppressWarnings({ "unchecked", "UnstableApiUsage" })
@Override
public <T extends Statement> ConfiguredStatement<T> inject(final ConfiguredStatement<T> statement) {
if (!(statement.getStatement() instanceof DropStatement)) {
return statement;
}
final DropStatement dropStatement = (DropStatement) statement.getStatement();
if (!dropStatement.isDeleteTopic()) {
return statement;
}
final SourceName sourceName = dropStatement.getName();
final DataSource source = metastore.getSource(sourceName);
if (source != null) {
if (source.isSource()) {
throw new KsqlException("Cannot delete topic for read-only source: " + sourceName.text());
}
checkTopicRefs(source);
deleteTopic(source);
final Closer closer = Closer.create();
closer.register(() -> deleteKeySubject(source));
closer.register(() -> deleteValueSubject(source));
try {
closer.close();
} catch (final KsqlException e) {
throw e;
} catch (final Exception e) {
throw new KsqlException(e);
}
} else if (!dropStatement.getIfExists()) {
throw new KsqlException("Could not find source to delete topic for: " + statement);
}
final T withoutDelete = (T) dropStatement.withoutDeleteClause();
final String withoutDeleteText = SqlFormatter.formatSql(withoutDelete) + ";";
return statement.withStatement(withoutDeleteText, withoutDelete);
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class KsqlResourceTest method givenSource.
private void givenSource(final DataSourceType type, final String sourceName, final String topicName, final LogicalSchema schema, final Set<SourceName> sourceReferences) {
final KsqlTopic ksqlTopic = new KsqlTopic(topicName, KeyFormat.nonWindowed(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of()), ValueFormat.of(FormatInfo.of(FormatFactory.JSON.name()), SerdeFeatures.of()));
givenKafkaTopicExists(topicName);
final DataSource source;
switch(type) {
case KSTREAM:
source = new KsqlStream<>("statementText", SourceName.of(sourceName), schema, Optional.empty(), false, ksqlTopic, false);
break;
case KTABLE:
source = new KsqlTable<>("statementText", SourceName.of(sourceName), schema, Optional.empty(), false, ksqlTopic, false);
break;
default:
throw new IllegalArgumentException(type.toString());
}
metaStore.putSource(source, false);
metaStore.addSourceReferences(source.getName(), sourceReferences);
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class ListSourceExecutorTest method assertSourceListWithWarning.
private static void assertSourceListWithWarning(final KsqlEntity entity, final DataSource... sources) {
assertThat(entity, instanceOf(SourceDescriptionList.class));
final SourceDescriptionList listing = (SourceDescriptionList) entity;
assertThat(listing.getSourceDescriptions(), containsInAnyOrder(Arrays.stream(sources).map(s -> equalTo(SourceDescriptionFactory.create(s, true, ImmutableList.of(), ImmutableList.of(), Optional.empty(), ImmutableList.of(), ImmutableList.of(), new MetricCollectors()))).collect(Collectors.toList())));
assertThat(listing.getWarnings(), containsInAnyOrder(Arrays.stream(sources).map(s -> equalTo(new KsqlWarning("Error from Kafka: unknown topic: " + s.getKafkaTopicName()))).collect(Collectors.toList())));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class ListSourceExecutorTest method shouldShowColumnsSource.
@Test
public void shouldShowColumnsSource() {
// Given:
engine.givenSource(DataSourceType.KSTREAM, "SOURCE");
final ExecuteResult result = engine.getEngine().execute(engine.getServiceContext(), engine.configure("CREATE STREAM SINK AS SELECT * FROM source;"));
final PersistentQueryMetadata metadata = (PersistentQueryMetadata) result.getQuery().orElseThrow(IllegalArgumentException::new);
final DataSource stream = engine.getEngine().getMetaStore().getSource(SourceName.of("SINK"));
// When:
final SourceDescriptionEntity sourceDescription = (SourceDescriptionEntity) CUSTOM_EXECUTORS.showColumns().execute(ConfiguredStatement.of(PreparedStatement.of("DESCRIBE SINK;", new ShowColumns(SourceName.of("SINK"), false)), SessionConfig.of(engine.getKsqlConfig(), ImmutableMap.of())), SESSION_PROPERTIES, engine.getEngine(), engine.getServiceContext()).getEntity().orElseThrow(IllegalStateException::new);
// Then:
final QueryStatusCount queryStatusCount = QueryStatusCount.fromStreamsStateCounts(Collections.singletonMap(metadata.getState(), 1));
assertThat(sourceDescription.getSourceDescription(), equalTo(SourceDescriptionFactory.create(stream, false, ImmutableList.of(), ImmutableList.of(new RunningQuery(metadata.getStatementString(), ImmutableSet.of(metadata.getSinkName().get().toString(FormatOptions.noEscape())), ImmutableSet.of(metadata.getResultTopic().get().getKafkaTopicName()), metadata.getQueryId(), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT)), Optional.empty(), ImmutableList.of(), ImmutableList.of(), new MetricCollectors())));
}
Aggregations