use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class SchemaKSourceFactory method buildTable.
private static SchemaKTable<?> buildTable(final PlanBuildContext buildContext, final DataSource dataSource, final Stacker contextStacker) {
final KeyFormat keyFormat = dataSource.getKsqlTopic().getKeyFormat();
if (keyFormat.isWindowed()) {
throw new IllegalArgumentException("windowed");
}
final SourceStep<KTableHolder<GenericKey>> step;
final int pseudoColumnVersionToUse = determinePseudoColumnVersionToUse(buildContext);
// If the old query has a v1 table step, continue to use it.
// See https://github.com/confluentinc/ksql/pull/7990
boolean useOldExecutionStepVersion = false;
if (buildContext.getPlanInfo().isPresent()) {
final Set<ExecutionStep<?>> sourceSteps = buildContext.getPlanInfo().get().getSources();
useOldExecutionStepVersion = sourceSteps.stream().anyMatch(executionStep -> executionStep instanceof TableSourceV1);
}
if (useOldExecutionStepVersion && pseudoColumnVersionToUse != SystemColumns.LEGACY_PSEUDOCOLUMN_VERSION_NUMBER) {
throw new IllegalStateException("TableSourceV2 was released in conjunction with pseudocolumn" + "version 1. Something has gone very wrong");
}
if (buildContext.getKsqlConfig().getBoolean(KsqlConfig.KSQL_ROWPARTITION_ROWOFFSET_ENABLED) && !useOldExecutionStepVersion) {
step = ExecutionStepFactory.tableSource(contextStacker, dataSource.getSchema(), dataSource.getKafkaTopicName(), Formats.from(dataSource.getKsqlTopic()), dataSource.getTimestampColumn(), InternalFormats.of(keyFormat, Formats.from(dataSource.getKsqlTopic()).getValueFormat()), pseudoColumnVersionToUse);
} else {
step = ExecutionStepFactory.tableSourceV1(contextStacker, dataSource.getSchema(), dataSource.getKafkaTopicName(), Formats.from(dataSource.getKsqlTopic()), dataSource.getTimestampColumn(), pseudoColumnVersionToUse);
}
return schemaKTable(buildContext, resolveSchema(buildContext, step, dataSource), dataSource.getKsqlTopic().getKeyFormat(), step);
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class InsertValuesExecutor method getDataSource.
private static DataSource getDataSource(final KsqlConfig ksqlConfig, final MetaStore metaStore, final InsertValues insertValues) {
final DataSource dataSource = metaStore.getSource(insertValues.getTarget());
if (dataSource == null) {
throw new KsqlException("Cannot insert values into an unknown stream/table: " + insertValues.getTarget());
}
if (dataSource.getKsqlTopic().getKeyFormat().isWindowed()) {
throw new KsqlException("Cannot insert values into windowed stream/table!");
}
final ReservedInternalTopics internalTopics = new ReservedInternalTopics(ksqlConfig);
if (internalTopics.isReadOnly(dataSource.getKafkaTopicName())) {
throw new KsqlException("Cannot insert values into read-only topic: " + dataSource.getKafkaTopicName());
}
if (dataSource.isSource()) {
throw new KsqlException(String.format("Cannot insert values into read-only %s: %s", dataSource.getDataSourceType().getKsqlType().toLowerCase(), dataSource.getName().text()));
}
return dataSource;
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class ListSourceExecutor method sourceDescriptionList.
private static Optional<KsqlEntity> sourceDescriptionList(final ConfiguredStatement<? extends StatementWithExtendedClause> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext, final List<? extends DataSource> sources, final boolean extended) {
final RemoteHostExecutor remoteHostExecutor = RemoteHostExecutor.create(statement, sessionProperties, executionContext, serviceContext.getKsqlClient());
final Multimap<String, SourceDescription> remoteSourceDescriptions = extended ? RemoteSourceDescriptionExecutor.fetchSourceDescriptions(remoteHostExecutor) : ImmutableMultimap.of();
final List<SourceDescriptionWithWarnings> descriptions = sources.stream().map(s -> describeSource(statement.getSessionConfig().getConfig(false), executionContext, serviceContext, s.getName(), extended, statement, sessionProperties, remoteSourceDescriptions.get(s.getName().toString()))).collect(Collectors.toList());
return Optional.of(new SourceDescriptionList(statement.getStatementText(), descriptions.stream().map(d -> d.description).collect(Collectors.toList()), descriptions.stream().flatMap(d -> d.warnings.stream()).collect(Collectors.toList())));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class KsqlTesterTest method pipeInput.
private void pipeInput(final ConfiguredStatement<InsertValues> statement) {
final InsertValues insertValues = statement.getStatement();
final DataSource dataSource = engine.getMetaStore().getSource(insertValues.getTarget());
if (dataSource == null) {
throw new KsqlException("Unknown data source " + insertValues.getTarget());
}
final KsqlGenericRecord record = new GenericRecordFactory(config, engine.getMetaStore(), System::currentTimeMillis).build(insertValues.getColumns(), insertValues.getValues(), dataSource.getSchema(), dataSource.getDataSourceType());
driverPipeline.pipeInput(dataSource.getKafkaTopicName(), record.key, record.value, record.ts);
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class KsqlTesterTest method execute.
@SuppressWarnings("unchecked")
private void execute(final ParsedStatement parsedStatement) {
final PreparedStatement<?> engineStatement = engine.prepare(parsedStatement);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(engineStatement, SessionConfig.of(config, overrides));
createTopics(engineStatement);
if (engineStatement.getStatement() instanceof InsertValues) {
pipeInput((ConfiguredStatement<InsertValues>) configured);
return;
} else if (engineStatement.getStatement() instanceof SetProperty) {
PropertyOverrider.set((ConfiguredStatement<SetProperty>) configured, overrides);
return;
} else if (engineStatement.getStatement() instanceof UnsetProperty) {
PropertyOverrider.unset((ConfiguredStatement<UnsetProperty>) configured, overrides);
return;
}
final ConfiguredStatement<?> injected = formatInjector.inject(configured);
final ExecuteResult result = engine.execute(serviceContext, injected);
// is DDL statement
if (!result.getQuery().isPresent()) {
return;
}
final PersistentQueryMetadata query = (PersistentQueryMetadata) result.getQuery().get();
final Topology topology = query.getTopology();
final Properties properties = new Properties();
properties.putAll(query.getStreamsProperties());
properties.put(StreamsConfig.STATE_DIR_CONFIG, tmpFolder.getRoot().getAbsolutePath());
final TopologyTestDriver driver = new TopologyTestDriver(topology, properties);
final List<TopicInfo> inputTopics = query.getSourceNames().stream().map(sn -> engine.getMetaStore().getSource(sn)).map(ds -> new TopicInfo(ds.getKafkaTopicName(), keySerde(ds), valueSerde(ds))).collect(Collectors.toList());
// Sink may be Optional for source tables. Once source table query execution is supported, then
// we would need have a condition to not create an output topic info
final DataSource output = engine.getMetaStore().getSource(query.getSinkName().get());
final TopicInfo outputInfo = new TopicInfo(output.getKafkaTopicName(), keySerde(output), valueSerde(output));
driverPipeline.addDriver(driver, inputTopics, outputInfo);
drivers.put(query.getQueryId(), new DriverAndProperties(driver, properties));
}
Aggregations