use of io.confluent.ksql.name.SourceName in project ksql by confluentinc.
the class CreateSourceFactory method createTableCommand.
// This method is called by simple CREATE statements
public CreateTableCommand createTableCommand(final CreateTable statement, final KsqlConfig ksqlConfig) {
final SourceName sourceName = statement.getName();
final CreateSourceProperties props = statement.getProperties();
final String topicName = ensureTopicExists(props, serviceContext);
final LogicalSchema schema = buildSchema(statement.getElements(), ksqlConfig);
final DataSource dataSource = metaStore.getSource(sourceName);
if (dataSource != null && !statement.isOrReplace() && !statement.isNotExists()) {
final String sourceType = dataSource.getDataSourceType().getKsqlType();
throw new KsqlException(String.format("Cannot add table '%s': A %s with the same name already exists", sourceName.text(), sourceType.toLowerCase()));
}
if (schema.key().isEmpty()) {
final boolean usingSchemaInference = props.getValueSchemaId().isPresent();
final String additional = usingSchemaInference ? System.lineSeparator() + "Use a partial schema to define the primary key and still load the value columns from " + "the Schema Registry, for example:" + System.lineSeparator() + "\tCREATE TABLE " + sourceName.text() + " (ID INT PRIMARY KEY) WITH (...);" : "";
throw new KsqlException("Tables require a PRIMARY KEY. Please define the PRIMARY KEY." + additional);
}
throwIfCreateOrReplaceOnSourceStreamOrTable(statement, dataSource);
final Optional<TimestampColumn> timestampColumn = buildTimestampColumn(ksqlConfig, props, schema);
return new CreateTableCommand(sourceName, schema, timestampColumn, topicName, buildFormats(statement.getName(), schema, props, ksqlConfig), getWindowInfo(props), Optional.of(statement.isOrReplace()), Optional.of(statement.isSource()));
}
use of io.confluent.ksql.name.SourceName in project ksql by confluentinc.
the class DefaultSchemaInjectorTest method setupCopy.
private static Object setupCopy(final InvocationOnMock inv, final CreateAsSelect source, final CreateAsSelect mock) {
final SourceName name = source.getName();
when(mock.getName()).thenReturn(name);
when(mock.accept(any(), any())).thenCallRealMethod();
when(mock.getProperties()).thenReturn(inv.getArgument(0));
when(mock.getQuery()).thenReturn(new Query(Optional.empty(), new Select(ImmutableList.of(new AllColumns(Optional.empty()))), new Table(SourceName.of("sink")), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), false, OptionalInt.empty()));
return mock;
}
use of io.confluent.ksql.name.SourceName in project ksql by confluentinc.
the class TopicDeleteInjector method checkTopicRefs.
private void checkTopicRefs(final DataSource source) {
final String topicName = source.getKafkaTopicName();
final SourceName sourceName = source.getName();
final Map<SourceName, DataSource> sources = metastore.getAllDataSources();
final String using = sources.values().stream().filter(s -> s.getKafkaTopicName().equals(topicName)).map(DataSource::getName).filter(name -> !sourceName.equals(name)).map(SourceName::text).sorted().collect(Collectors.joining(", "));
if (!using.isEmpty()) {
throw new RuntimeException(String.format("Refusing to delete topic. Found other data sources (%s) using topic %s", using, topicName));
}
}
use of io.confluent.ksql.name.SourceName in project ksql by confluentinc.
the class ColumnReferenceValidatorTest method shouldGetSourceForQualifiedColumnRef.
@Test
public void shouldGetSourceForQualifiedColumnRef() {
// Given:
final QualifiedColumnReferenceExp expression = new QualifiedColumnReferenceExp(SourceName.of("something"), ColumnName.of("else"));
when(sourceSchemas.sourcesWithField(any(), any())).thenReturn(ImmutableSet.of(SourceName.of("something")));
// When:
final Set<SourceName> columnRefs = analyzer.analyzeExpression(expression, CLAUSE_TYPE);
// Then:
verify(sourceSchemas).sourcesWithField(Optional.of(expression.getQualifier()), expression.getColumnName());
assertThat(Iterables.getOnlyElement(columnRefs), is(SourceName.of("something")));
}
use of io.confluent.ksql.name.SourceName in project ksql by confluentinc.
the class ScalablePushUtil method isScalablePushQuery.
@SuppressWarnings({ "BooleanExpressionComplexity", "CyclomaticComplexity" })
public static boolean isScalablePushQuery(final Statement statement, final KsqlEngine ksqlEngine, final KsqlConfig ksqlConfig, final Map<String, Object> overrides) {
if (!isPushV2Enabled(ksqlConfig, overrides)) {
return false;
}
if (!(statement instanceof Query)) {
return false;
}
final Query query = (Query) statement;
final SourceFinder sourceFinder = new SourceFinder();
sourceFinder.process(query.getFrom(), null);
// It will be present if it's not a join, which we don't handle
if (!sourceFinder.getSourceName().isPresent()) {
return false;
}
// Find all of the writers to this particular source.
final SourceName sourceName = sourceFinder.getSourceName().get();
final Set<QueryId> upstreamQueries = ksqlEngine.getQueriesWithSink(sourceName);
// See if the config or override have set the stream to be "latest"
final boolean isLatest = isLatest(ksqlConfig, overrides);
// Cannot be a pull query, i.e. must be a push
return !query.isPullQuery() && // Group by is not supported
!query.getGroupBy().isPresent() && // Windowing is not supported
!query.getWindow().isPresent() && // Having clause is not supported
!query.getHaving().isPresent() && // Partition by is not supported
!query.getPartitionBy().isPresent() && // There must be an EMIT CHANGES clause
(query.getRefinement().isPresent() && query.getRefinement().get().getOutputRefinement() == OutputRefinement.CHANGES) && // Must be reading from "latest"
isLatest && // We only handle a single sink source at the moment from a CTAS/CSAS
upstreamQueries.size() == 1 && // ROWPARTITION and ROWOFFSET are not currently supported in SPQs
!containsDisallowedColumns(query, ksqlConfig);
}
Aggregations