use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class ListSourceExecutor method describeSource.
private static SourceDescriptionWithWarnings describeSource(final KsqlConfig ksqlConfig, final KsqlExecutionContext ksqlExecutionContext, final ServiceContext serviceContext, final SourceName name, final boolean extended, final ConfiguredStatement<? extends StatementWithExtendedClause> statement, final SessionProperties sessionProperties, final Collection<SourceDescription> remoteSourceDescriptions) {
final DataSource dataSource = ksqlExecutionContext.getMetaStore().getSource(name);
if (dataSource == null) {
throw new KsqlStatementException(String.format("Could not find STREAM/TABLE '%s' in the Metastore", name.text()), statement.getStatementText());
}
final List<RunningQuery> readQueries = getQueries(ksqlExecutionContext, q -> q.getSourceNames().contains(dataSource.getName()));
final List<RunningQuery> writeQueries = getQueries(ksqlExecutionContext, q -> q.getSinkName().equals(Optional.of(dataSource.getName())));
Optional<TopicDescription> topicDescription = Optional.empty();
List<QueryOffsetSummary> queryOffsetSummaries = Collections.emptyList();
List<String> sourceConstraints = Collections.emptyList();
final List<KsqlWarning> warnings = new LinkedList<>();
try {
topicDescription = Optional.of(serviceContext.getTopicClient().describeTopic(dataSource.getKafkaTopicName()));
sourceConstraints = getSourceConstraints(name, ksqlExecutionContext.getMetaStore());
} catch (final KafkaException | KafkaResponseGetFailedException e) {
warnings.add(new KsqlWarning("Error from Kafka: " + e.getMessage()));
}
if (extended) {
queryOffsetSummaries = queryOffsetSummaries(ksqlConfig, serviceContext, writeQueries);
return new SourceDescriptionWithWarnings(warnings, SourceDescriptionFactory.create(dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, remoteSourceDescriptions.stream().flatMap(sd -> sd.getClusterStatistics().stream()), remoteSourceDescriptions.stream().flatMap(sd -> sd.getClusterErrorStats().stream()), sessionProperties.getKsqlHostInfo(), ksqlExecutionContext.metricCollectors()));
}
return new SourceDescriptionWithWarnings(warnings, SourceDescriptionFactory.create(dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, java.util.stream.Stream.empty(), java.util.stream.Stream.empty(), sessionProperties.getKsqlHostInfo(), ksqlExecutionContext.metricCollectors()));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class ClusterTerminatorTest method givenSourceRegisteredWithTopic.
private void givenSourceRegisteredWithTopic(final Format format, final String kafkaTopicName, final boolean sink) {
final String sourceName = "SOURCE_" + kafkaTopicName;
final KsqlTopic topic = mock(KsqlTopic.class);
when(topic.getKafkaTopicName()).thenReturn(kafkaTopicName);
when(topic.getKeyFormat()).thenReturn(KeyFormat.of(FormatInfo.of(format.name()), SerdeFeatures.of(), Optional.empty()));
when(topic.getValueFormat()).thenReturn(ValueFormat.of(FormatInfo.of(format.name()), SerdeFeatures.of()));
final DataSource source = mock(DataSource.class);
when(source.getKafkaTopicName()).thenReturn(kafkaTopicName);
when(source.getKsqlTopic()).thenReturn(topic);
when(source.isCasTarget()).thenReturn(sink);
assertThat("topic already registered", dataSources.put(SourceName.of(sourceName), source), is(nullValue()));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class EngineExecutor method maybeCreateSinkDdl.
private Optional<DdlCommand> maybeCreateSinkDdl(final ConfiguredStatement<?> cfgStatement, final KsqlStructuredDataOutputNode outputNode) {
if (!outputNode.createInto()) {
validateExistingSink(outputNode);
return Optional.empty();
}
final Statement statement = cfgStatement.getStatement();
final SourceName intoSource = outputNode.getSinkName().get();
final boolean orReplace = statement instanceof CreateAsSelect && ((CreateAsSelect) statement).isOrReplace();
final boolean ifNotExists = statement instanceof CreateAsSelect && ((CreateAsSelect) statement).isNotExists();
final DataSource dataSource = engineContext.getMetaStore().getSource(intoSource);
if (dataSource != null && !ifNotExists && !orReplace) {
final String failedSourceType = outputNode.getNodeOutputType().getKsqlType();
final String foundSourceType = dataSource.getDataSourceType().getKsqlType();
throw new KsqlException(String.format("Cannot add %s '%s': A %s with the same name already exists", failedSourceType.toLowerCase(), intoSource.text(), foundSourceType.toLowerCase()));
}
return Optional.of(engineContext.createDdlCommand(outputNode));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class EngineExecutor method getSources.
private Set<DataSource> getSources(final QueryPlan queryPlan) {
final ImmutableSet.Builder<DataSource> sources = ImmutableSet.builder();
for (final SourceName name : queryPlan.getSources()) {
final DataSource dataSource = engineContext.getMetaStore().getSource(name);
if (dataSource == null) {
throw new KsqlException("Unknown source: " + name.toString(FormatOptions.noEscape()));
}
sources.add(dataSource);
}
return sources.build();
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class EngineExecutor method execute.
ExecuteResult execute(final KsqlPlan plan, final boolean restoreInProgress) {
if (!plan.getQueryPlan().isPresent()) {
final String ddlResult = plan.getDdlCommand().map(ddl -> executeDdl(ddl, plan.getStatementText(), false, Collections.emptySet(), restoreInProgress)).orElseThrow(() -> new IllegalStateException("DdlResult should be present if there is no physical plan."));
return ExecuteResult.of(ddlResult);
}
final QueryPlan queryPlan = plan.getQueryPlan().get();
final KsqlConstants.PersistentQueryType persistentQueryType = plan.getPersistentQueryType().get();
// that attempt to write to a sink (i.e. INSERT or CREATE_AS).
if (persistentQueryType != KsqlConstants.PersistentQueryType.CREATE_SOURCE) {
final DataSource sinkSource = engineContext.getMetaStore().getSource(queryPlan.getSink().get());
if (sinkSource != null && sinkSource.isSource()) {
throw new KsqlException(String.format("Cannot insert into read-only %s: %s", sinkSource.getDataSourceType().getKsqlType().toLowerCase(), sinkSource.getName().text()));
}
}
final Optional<String> ddlResult = plan.getDdlCommand().map(ddl -> executeDdl(ddl, plan.getStatementText(), true, queryPlan.getSources(), restoreInProgress));
// Return if the source to create already exists.
if (ddlResult.isPresent() && ddlResult.get().contains("already exists")) {
return ExecuteResult.of(ddlResult.get());
}
// must be executed.
if (persistentQueryType == KsqlConstants.PersistentQueryType.CREATE_SOURCE && !isSourceTableMaterializationEnabled()) {
LOG.info(String.format("Source table query '%s' won't be materialized because '%s' is disabled.", plan.getStatementText(), KsqlConfig.KSQL_SOURCE_TABLE_MATERIALIZATION_ENABLED));
return ExecuteResult.of(ddlResult.get());
}
return ExecuteResult.of(executePersistentQuery(queryPlan, plan.getStatementText(), persistentQueryType));
}
Aggregations