use of io.confluent.ksql.KsqlExecutionContext.ExecuteResult in project ksql by confluentinc.
the class TestExecutorUtil method executePlan.
private static ExecuteResultAndSources executePlan(final KsqlExecutionContext executionContext, final ConfiguredKsqlPlan plan) {
final ExecuteResult executeResult = executionContext.execute(executionContext.getServiceContext(), plan);
final Optional<List<DataSource>> dataSources = plan.getPlan().getQueryPlan().map(queryPlan -> getSources(queryPlan.getSources(), executionContext.getMetaStore()));
return new ExecuteResultAndSources(executeResult, dataSources);
}
use of io.confluent.ksql.KsqlExecutionContext.ExecuteResult in project ksql by confluentinc.
the class KsqlTesterTest method execute.
@SuppressWarnings("unchecked")
private void execute(final ParsedStatement parsedStatement) {
final PreparedStatement<?> engineStatement = engine.prepare(parsedStatement);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(engineStatement, SessionConfig.of(config, overrides));
createTopics(engineStatement);
if (engineStatement.getStatement() instanceof InsertValues) {
pipeInput((ConfiguredStatement<InsertValues>) configured);
return;
} else if (engineStatement.getStatement() instanceof SetProperty) {
PropertyOverrider.set((ConfiguredStatement<SetProperty>) configured, overrides);
return;
} else if (engineStatement.getStatement() instanceof UnsetProperty) {
PropertyOverrider.unset((ConfiguredStatement<UnsetProperty>) configured, overrides);
return;
}
final ConfiguredStatement<?> injected = formatInjector.inject(configured);
final ExecuteResult result = engine.execute(serviceContext, injected);
// is DDL statement
if (!result.getQuery().isPresent()) {
return;
}
final PersistentQueryMetadata query = (PersistentQueryMetadata) result.getQuery().get();
final Topology topology = query.getTopology();
final Properties properties = new Properties();
properties.putAll(query.getStreamsProperties());
properties.put(StreamsConfig.STATE_DIR_CONFIG, tmpFolder.getRoot().getAbsolutePath());
final TopologyTestDriver driver = new TopologyTestDriver(topology, properties);
final List<TopicInfo> inputTopics = query.getSourceNames().stream().map(sn -> engine.getMetaStore().getSource(sn)).map(ds -> new TopicInfo(ds.getKafkaTopicName(), keySerde(ds), valueSerde(ds))).collect(Collectors.toList());
// Sink may be Optional for source tables. Once source table query execution is supported, then
// we would need have a condition to not create an output topic info
final DataSource output = engine.getMetaStore().getSource(query.getSinkName().get());
final TopicInfo outputInfo = new TopicInfo(output.getKafkaTopicName(), keySerde(output), valueSerde(output));
driverPipeline.addDriver(driver, inputTopics, outputInfo);
drivers.put(query.getQueryId(), new DriverAndProperties(driver, properties));
}
use of io.confluent.ksql.KsqlExecutionContext.ExecuteResult in project ksql by confluentinc.
the class ListSourceExecutorTest method shouldShowColumnsSource.
@Test
public void shouldShowColumnsSource() {
// Given:
engine.givenSource(DataSourceType.KSTREAM, "SOURCE");
final ExecuteResult result = engine.getEngine().execute(engine.getServiceContext(), engine.configure("CREATE STREAM SINK AS SELECT * FROM source;"));
final PersistentQueryMetadata metadata = (PersistentQueryMetadata) result.getQuery().orElseThrow(IllegalArgumentException::new);
final DataSource stream = engine.getEngine().getMetaStore().getSource(SourceName.of("SINK"));
// When:
final SourceDescriptionEntity sourceDescription = (SourceDescriptionEntity) CUSTOM_EXECUTORS.showColumns().execute(ConfiguredStatement.of(PreparedStatement.of("DESCRIBE SINK;", new ShowColumns(SourceName.of("SINK"), false)), SessionConfig.of(engine.getKsqlConfig(), ImmutableMap.of())), SESSION_PROPERTIES, engine.getEngine(), engine.getServiceContext()).getEntity().orElseThrow(IllegalStateException::new);
// Then:
final QueryStatusCount queryStatusCount = QueryStatusCount.fromStreamsStateCounts(Collections.singletonMap(metadata.getState(), 1));
assertThat(sourceDescription.getSourceDescription(), equalTo(SourceDescriptionFactory.create(stream, false, ImmutableList.of(), ImmutableList.of(new RunningQuery(metadata.getStatementString(), ImmutableSet.of(metadata.getSinkName().get().toString(FormatOptions.noEscape())), ImmutableSet.of(metadata.getResultTopic().get().getKafkaTopicName()), metadata.getQueryId(), queryStatusCount, KsqlConstants.KsqlQueryType.PERSISTENT)), Optional.empty(), ImmutableList.of(), ImmutableList.of(), new MetricCollectors())));
}
use of io.confluent.ksql.KsqlExecutionContext.ExecuteResult in project ksql by confluentinc.
the class InteractiveStatementExecutor method executePlan.
private void executePlan(final Command command, final CommandId commandId, final Optional<CommandStatusFuture> commandStatusFuture, final KsqlPlan plan, final Mode mode, final long offset, final boolean restoreInProgress) {
final KsqlConfig mergedConfig = buildMergedConfig(command);
final ConfiguredKsqlPlan configured = ConfiguredKsqlPlan.of(plan, SessionConfig.of(mergedConfig, command.getOverwriteProperties()));
putStatus(commandId, commandStatusFuture, new CommandStatus(CommandStatus.Status.EXECUTING, "Executing statement"));
final ExecuteResult result = ksqlEngine.execute(serviceContext, configured, restoreInProgress);
queryIdGenerator.setNextId(offset + 1);
if (result.getQuery().isPresent()) {
if (mode == Mode.EXECUTE) {
result.getQuery().get().start();
}
}
final String successMessage = getSuccessMessage(result);
final Optional<QueryId> queryId = result.getQuery().map(QueryMetadata::getQueryId);
final CommandStatus successStatus = new CommandStatus(CommandStatus.Status.SUCCESS, successMessage, queryId);
putFinalStatus(commandId, commandStatusFuture, successStatus);
}
Aggregations