use of io.confluent.ksql.parser.tree.Sink in project ksql by confluentinc.
the class EngineExecutor method sourceTablePlan.
@SuppressFBWarnings(value = "NP_NULL_PARAM_DEREF_NONVIRTUAL")
private KsqlPlan sourceTablePlan(final ConfiguredStatement<?> statement) {
final CreateTable createTable = (CreateTable) statement.getStatement();
final CreateTableCommand ddlCommand = (CreateTableCommand) engineContext.createDdlCommand(statement.getStatementText(), (ExecutableDdlStatement) statement.getStatement(), config);
final Relation from = new AliasedRelation(new Table(createTable.getName()), createTable.getName());
// Only VALUE or HEADER columns must be selected from the source table. When running a
// pull query, the keys are added if selecting all columns.
final Select select = new Select(createTable.getElements().stream().filter(column -> !column.getConstraints().isKey() && !column.getConstraints().isPrimaryKey()).map(column -> new SingleColumn(new UnqualifiedColumnReferenceExp(column.getName()), Optional.of(column.getName()))).collect(Collectors.toList()));
// Source table need to keep emitting changes so every new record is materialized for
// pull query availability.
final RefinementInfo refinementInfo = RefinementInfo.of(OutputRefinement.CHANGES);
// This is a plan for a `select * from <source-table> emit changes` statement,
// without a sink topic to write the results. The query is just made to materialize the
// source table.
final Query query = new Query(Optional.empty(), select, from, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(refinementInfo), false, OptionalInt.empty());
// The source table does not exist in the current metastore, so a temporary metastore that
// contains only the source table is created here. This metastore is used later to create
// ExecutorsPlan.
final MutableMetaStore tempMetastore = new MetaStoreImpl(new InternalFunctionRegistry());
final Formats formats = ddlCommand.getFormats();
tempMetastore.putSource(new KsqlTable<>(statement.getStatementText(), createTable.getName(), ddlCommand.getSchema(), Optional.empty(), false, new KsqlTopic(ddlCommand.getTopicName(), KeyFormat.of(formats.getKeyFormat(), formats.getKeyFeatures(), Optional.empty()), ValueFormat.of(formats.getValueFormat(), formats.getValueFeatures())), true), false);
final ExecutorPlans plans = planQuery(statement, query, Optional.empty(), Optional.empty(), tempMetastore);
final KsqlBareOutputNode outputNode = (KsqlBareOutputNode) plans.logicalPlan.getNode().get();
final QueryPlan queryPlan = new QueryPlan(getSourceNames(outputNode), Optional.empty(), plans.physicalPlan.getPhysicalPlan(), plans.physicalPlan.getQueryId(), getApplicationId(plans.physicalPlan.getQueryId(), getSourceNames(outputNode)));
engineContext.createQueryValidator().validateQuery(config, plans.physicalPlan, engineContext.getQueryRegistry().getAllLiveQueries());
return KsqlPlan.queryPlanCurrent(statement.getStatementText(), Optional.of(ddlCommand), queryPlan);
}
use of io.confluent.ksql.parser.tree.Sink in project ksql by confluentinc.
the class EngineExecutor method execute.
ExecuteResult execute(final KsqlPlan plan, final boolean restoreInProgress) {
if (!plan.getQueryPlan().isPresent()) {
final String ddlResult = plan.getDdlCommand().map(ddl -> executeDdl(ddl, plan.getStatementText(), false, Collections.emptySet(), restoreInProgress)).orElseThrow(() -> new IllegalStateException("DdlResult should be present if there is no physical plan."));
return ExecuteResult.of(ddlResult);
}
final QueryPlan queryPlan = plan.getQueryPlan().get();
final KsqlConstants.PersistentQueryType persistentQueryType = plan.getPersistentQueryType().get();
// that attempt to write to a sink (i.e. INSERT or CREATE_AS).
if (persistentQueryType != KsqlConstants.PersistentQueryType.CREATE_SOURCE) {
final DataSource sinkSource = engineContext.getMetaStore().getSource(queryPlan.getSink().get());
if (sinkSource != null && sinkSource.isSource()) {
throw new KsqlException(String.format("Cannot insert into read-only %s: %s", sinkSource.getDataSourceType().getKsqlType().toLowerCase(), sinkSource.getName().text()));
}
}
final Optional<String> ddlResult = plan.getDdlCommand().map(ddl -> executeDdl(ddl, plan.getStatementText(), true, queryPlan.getSources(), restoreInProgress));
// Return if the source to create already exists.
if (ddlResult.isPresent() && ddlResult.get().contains("already exists")) {
return ExecuteResult.of(ddlResult.get());
}
// must be executed.
if (persistentQueryType == KsqlConstants.PersistentQueryType.CREATE_SOURCE && !isSourceTableMaterializationEnabled()) {
LOG.info(String.format("Source table query '%s' won't be materialized because '%s' is disabled.", plan.getStatementText(), KsqlConfig.KSQL_SOURCE_TABLE_MATERIALIZATION_ENABLED));
return ExecuteResult.of(ddlResult.get());
}
return ExecuteResult.of(executePersistentQuery(queryPlan, plan.getStatementText(), persistentQueryType));
}
use of io.confluent.ksql.parser.tree.Sink in project ksql by confluentinc.
the class QueryAnalyzerFunctionalTest method shouldHandleValueFormat.
@Test
public void shouldHandleValueFormat() {
// Given:
final PreparedStatement<CreateStreamAsSelect> statement = KsqlParserTestUtil.buildSingleAst("create stream s with(value_format='delimited') as select * from test1;", metaStore, ROWPARTITION_ROWOFFSET_ENABLED);
final Query query = statement.getStatement().getQuery();
final Optional<Sink> sink = Optional.of(statement.getStatement().getSink());
// When:
final Analysis analysis = queryAnalyzer.analyze(query, sink);
// Then:
assertThat(analysis.getInto().get().getNewTopic().get().getValueFormat().getFormat(), is(FormatFactory.DELIMITED.name()));
}
Aggregations