use of io.confluent.ksql.parser.tree.DropTopic in project ksql by confluentinc.
the class KsqlEngine method buildSingleQueryAst.
private Pair<String, Statement> buildSingleQueryAst(final Statement statement, final String statementString, final MetaStore tempMetaStore, final MetaStore tempMetaStoreForParser, final Map<String, Object> overriddenProperties) {
log.info("Building AST for {}.", statementString);
if (statement instanceof Query) {
return new Pair<>(statementString, statement);
} else if (statement instanceof CreateAsSelect) {
CreateAsSelect createAsSelect = (CreateAsSelect) statement;
QuerySpecification querySpecification = (QuerySpecification) createAsSelect.getQuery().getQueryBody();
Query query = addInto(createAsSelect.getQuery(), querySpecification, createAsSelect.getName().getSuffix(), createAsSelect.getProperties(), createAsSelect.getPartitionByColumn());
tempMetaStoreForParser.putSource(queryEngine.getResultDatasource(querySpecification.getSelect(), createAsSelect.getName().getSuffix()).cloneWithTimeKeyColumns());
return new Pair<>(statementString, query);
} else if (statement instanceof RegisterTopic) {
ddlCommandExec.tryExecute(new RegisterTopicCommand((RegisterTopic) statement), tempMetaStoreForParser);
ddlCommandExec.tryExecute(new RegisterTopicCommand((RegisterTopic) statement), tempMetaStore);
return new Pair<>(statementString, statement);
} else if (statement instanceof CreateStream) {
ddlCommandExec.tryExecute(new CreateStreamCommand(statementString, (CreateStream) statement, overriddenProperties, topicClient, false), tempMetaStoreForParser);
ddlCommandExec.tryExecute(new CreateStreamCommand(statementString, (CreateStream) statement, overriddenProperties, topicClient, false), tempMetaStore);
return new Pair<>(statementString, statement);
} else if (statement instanceof CreateTable) {
ddlCommandExec.tryExecute(new CreateTableCommand(statementString, (CreateTable) statement, overriddenProperties, topicClient, false), tempMetaStoreForParser);
ddlCommandExec.tryExecute(new CreateTableCommand(statementString, (CreateTable) statement, overriddenProperties, topicClient, false), tempMetaStore);
return new Pair<>(statementString, statement);
} else if (statement instanceof DropStream) {
ddlCommandExec.tryExecute(new DropSourceCommand((DropStream) statement, DataSource.DataSourceType.KSTREAM, this), tempMetaStore);
ddlCommandExec.tryExecute(new DropSourceCommand((DropStream) statement, DataSource.DataSourceType.KSTREAM, this), tempMetaStoreForParser);
return new Pair<>(statementString, statement);
} else if (statement instanceof DropTable) {
ddlCommandExec.tryExecute(new DropSourceCommand((DropTable) statement, DataSource.DataSourceType.KTABLE, this), tempMetaStore);
ddlCommandExec.tryExecute(new DropSourceCommand((DropTable) statement, DataSource.DataSourceType.KTABLE, this), tempMetaStoreForParser);
return new Pair<>(statementString, statement);
} else if (statement instanceof DropTopic) {
ddlCommandExec.tryExecute(new DropTopicCommand((DropTopic) statement), tempMetaStore);
ddlCommandExec.tryExecute(new DropTopicCommand((DropTopic) statement), tempMetaStoreForParser);
return new Pair<>(statementString, statement);
} else if (statement instanceof SetProperty) {
return new Pair<>(statementString, statement);
}
return null;
}
use of io.confluent.ksql.parser.tree.DropTopic in project ksql by confluentinc.
the class CommandFactoriesTest method shouldCreateCommandForDropTopic.
@Test
public void shouldCreateCommandForDropTopic() {
final DdlCommand result = commandFactories.create(sqlExpression, new DropTopic(QualifiedName.of("foo"), true), Collections.emptyMap());
assertThat(result, instanceOf(DropTopicCommand.class));
}
Aggregations