Search in sources :

Example 1 with RegisterTopic

use of io.confluent.ksql.parser.tree.RegisterTopic in project ksql by confluentinc.

the class CommandFactoriesTest method shouldCreateDDLCommandForRegisterTopic.

@Test
public void shouldCreateDDLCommandForRegisterTopic() {
    final DdlCommand result = commandFactories.create(sqlExpression, new RegisterTopic(QualifiedName.of("blah"), true, properties), Collections.emptyMap());
    assertThat(result, instanceOf(RegisterTopicCommand.class));
}
Also used : RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) Test(org.junit.Test)

Example 2 with RegisterTopic

use of io.confluent.ksql.parser.tree.RegisterTopic in project ksql by confluentinc.

the class KsqlParserTest method testRegisterTopic.

@Test
public void testRegisterTopic() throws Exception {
    String queryStr = "REGISTER TOPIC orders_topic WITH (value_format = 'avro', " + "avroschemafile='/Users/hojjat/avro_order_schema.avro',kafka_topic='orders_topic');";
    Statement statement = KSQL_PARSER.buildAst(queryStr, metaStore).get(0);
    Assert.assertTrue("testRegisterTopic failed.", statement instanceof RegisterTopic);
    RegisterTopic registerTopic = (RegisterTopic) statement;
    Assert.assertTrue("testRegisterTopic failed.", registerTopic.getName().toString().equalsIgnoreCase("ORDERS_TOPIC"));
    Assert.assertTrue("testRegisterTopic failed.", registerTopic.getProperties().size() == 3);
    Assert.assertTrue("testRegisterTopic failed.", registerTopic.getProperties().get(DdlConfig.VALUE_FORMAT_PROPERTY).toString().equalsIgnoreCase("'avro'"));
}
Also used : RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) Statement(io.confluent.ksql.parser.tree.Statement) Test(org.junit.Test)

Example 3 with RegisterTopic

use of io.confluent.ksql.parser.tree.RegisterTopic in project ksql by confluentinc.

the class KsqlParserTest method testCreateTopicFormatWithoutQuotes.

@Test
/*
      TODO: Handle so-called identifier expressions as values in table properties (right now, the lack of single quotes
      around in the variables <format> and <kafkaTopic> cause things to break).
   */
@Ignore
public void testCreateTopicFormatWithoutQuotes() throws Exception {
    String ksqlTopic = "unquoted_topic";
    String format = "json";
    String kafkaTopic = "case_insensitive_kafka_topic";
    String queryStr = String.format("REGISTER TOPIC %s WITH (value_format = %s, kafka_topic = %s);", ksqlTopic, format, kafkaTopic);
    Statement statement = KSQL_PARSER.buildAst(queryStr, metaStore).get(0);
    Assert.assertTrue(statement instanceof RegisterTopic);
    RegisterTopic registerTopic = (RegisterTopic) statement;
    Assert.assertTrue(registerTopic.getName().toString().equalsIgnoreCase(ksqlTopic));
    Assert.assertTrue(registerTopic.getProperties().size() == 2);
    Assert.assertTrue(registerTopic.getProperties().get(DdlConfig.VALUE_FORMAT_PROPERTY).toString().equalsIgnoreCase(format));
    Assert.assertTrue(registerTopic.getProperties().get(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY).toString().equalsIgnoreCase(kafkaTopic));
}
Also used : RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) Statement(io.confluent.ksql.parser.tree.Statement) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 4 with RegisterTopic

use of io.confluent.ksql.parser.tree.RegisterTopic in project ksql by confluentinc.

the class KsqlRestApplication method buildApplication.

public static KsqlRestApplication buildApplication(KsqlRestConfig restConfig, boolean isUiEnabled, VersionCheckerAgent versionCheckerAgent) throws Exception {
    Map<String, Object> ksqlConfProperties = new HashMap<>();
    ksqlConfProperties.putAll(restConfig.getKsqlConfigProperties());
    KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfProperties);
    adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
    KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(adminClient));
    KafkaTopicClient topicClient = ksqlEngine.getTopicClient();
    final String kafkaClusterId;
    try {
        kafkaClusterId = adminClient.describeCluster().clusterId().get();
    } catch (final UnsupportedVersionException e) {
        throw new KsqlException("The kafka brokers are incompatible with. " + "KSQL requires broker versions >= 0.10.1.x");
    }
    String commandTopic = restConfig.getCommandTopic(ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG));
    ensureCommandTopic(restConfig, topicClient, commandTopic);
    Map<String, Expression> commandTopicProperties = new HashMap<>();
    commandTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("json"));
    commandTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(commandTopic));
    ksqlEngine.getDdlCommandExec().execute(new RegisterTopicCommand(new RegisterTopic(QualifiedName.of(COMMANDS_KSQL_TOPIC_NAME), false, commandTopicProperties)));
    ksqlEngine.getDdlCommandExec().execute(new CreateStreamCommand("statementText", new CreateStream(QualifiedName.of(COMMANDS_STREAM_NAME), Collections.singletonList(new TableElement("STATEMENT", "STRING")), false, Collections.singletonMap(DdlConfig.TOPIC_NAME_PROPERTY, new StringLiteral(COMMANDS_KSQL_TOPIC_NAME))), Collections.emptyMap(), ksqlEngine.getTopicClient(), true));
    Map<String, Object> commandConsumerProperties = restConfig.getCommandConsumerProperties();
    KafkaConsumer<CommandId, Command> commandConsumer = new KafkaConsumer<>(commandConsumerProperties, getJsonDeserializer(CommandId.class, true), getJsonDeserializer(Command.class, false));
    KafkaProducer<CommandId, Command> commandProducer = new KafkaProducer<>(restConfig.getCommandProducerProperties(), getJsonSerializer(true), getJsonSerializer(false));
    CommandStore commandStore = new CommandStore(commandTopic, commandConsumer, commandProducer, new CommandIdAssigner(ksqlEngine.getMetaStore()));
    StatementParser statementParser = new StatementParser(ksqlEngine);
    StatementExecutor statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
    CommandRunner commandRunner = new CommandRunner(statementExecutor, commandStore);
    RootDocument rootDocument = new RootDocument(isUiEnabled, restConfig.getList(RestConfig.LISTENERS_CONFIG).get(0));
    StatusResource statusResource = new StatusResource(statementExecutor);
    StreamedQueryResource streamedQueryResource = new StreamedQueryResource(ksqlEngine, statementParser, restConfig.getLong(KsqlRestConfig.STREAMED_QUERY_DISCONNECT_CHECK_MS_CONFIG));
    KsqlResource ksqlResource = new KsqlResource(ksqlEngine, commandStore, statementExecutor, restConfig.getLong(KsqlRestConfig.DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT_MS_CONFIG));
    commandRunner.processPriorCommands();
    return new KsqlRestApplication(ksqlEngine, restConfig, commandRunner, rootDocument, statusResource, streamedQueryResource, ksqlResource, isUiEnabled, versionCheckerAgent, new ServerInfo(Version.getVersion(), kafkaClusterId));
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KsqlEngine(io.confluent.ksql.KsqlEngine) HashMap(java.util.HashMap) RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) KsqlResource(io.confluent.ksql.rest.server.resources.KsqlResource) ServerInfo(io.confluent.ksql.rest.entity.ServerInfo) CommandStore(io.confluent.ksql.rest.server.computation.CommandStore) KsqlException(io.confluent.ksql.util.KsqlException) StatementExecutor(io.confluent.ksql.rest.server.computation.StatementExecutor) StatusResource(io.confluent.ksql.rest.server.resources.StatusResource) TableElement(io.confluent.ksql.parser.tree.TableElement) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) CommandRunner(io.confluent.ksql.rest.server.computation.CommandRunner) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) RootDocument(io.confluent.ksql.rest.server.resources.RootDocument) KsqlConfig(io.confluent.ksql.util.KsqlConfig) CreateStream(io.confluent.ksql.parser.tree.CreateStream) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) StringLiteral(io.confluent.ksql.parser.tree.StringLiteral) Expression(io.confluent.ksql.parser.tree.Expression) Command(io.confluent.ksql.rest.server.computation.Command) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) CommandIdAssigner(io.confluent.ksql.rest.server.computation.CommandIdAssigner) CommandId(io.confluent.ksql.rest.server.computation.CommandId) UnsupportedVersionException(org.apache.kafka.common.errors.UnsupportedVersionException)

Example 5 with RegisterTopic

use of io.confluent.ksql.parser.tree.RegisterTopic in project ksql by confluentinc.

the class KsqlEngine method buildSingleQueryAst.

private Pair<String, Statement> buildSingleQueryAst(final Statement statement, final String statementString, final MetaStore tempMetaStore, final MetaStore tempMetaStoreForParser, final Map<String, Object> overriddenProperties) {
    log.info("Building AST for {}.", statementString);
    if (statement instanceof Query) {
        return new Pair<>(statementString, statement);
    } else if (statement instanceof CreateAsSelect) {
        CreateAsSelect createAsSelect = (CreateAsSelect) statement;
        QuerySpecification querySpecification = (QuerySpecification) createAsSelect.getQuery().getQueryBody();
        Query query = addInto(createAsSelect.getQuery(), querySpecification, createAsSelect.getName().getSuffix(), createAsSelect.getProperties(), createAsSelect.getPartitionByColumn());
        tempMetaStoreForParser.putSource(queryEngine.getResultDatasource(querySpecification.getSelect(), createAsSelect.getName().getSuffix()).cloneWithTimeKeyColumns());
        return new Pair<>(statementString, query);
    } else if (statement instanceof RegisterTopic) {
        ddlCommandExec.tryExecute(new RegisterTopicCommand((RegisterTopic) statement), tempMetaStoreForParser);
        ddlCommandExec.tryExecute(new RegisterTopicCommand((RegisterTopic) statement), tempMetaStore);
        return new Pair<>(statementString, statement);
    } else if (statement instanceof CreateStream) {
        ddlCommandExec.tryExecute(new CreateStreamCommand(statementString, (CreateStream) statement, overriddenProperties, topicClient, false), tempMetaStoreForParser);
        ddlCommandExec.tryExecute(new CreateStreamCommand(statementString, (CreateStream) statement, overriddenProperties, topicClient, false), tempMetaStore);
        return new Pair<>(statementString, statement);
    } else if (statement instanceof CreateTable) {
        ddlCommandExec.tryExecute(new CreateTableCommand(statementString, (CreateTable) statement, overriddenProperties, topicClient, false), tempMetaStoreForParser);
        ddlCommandExec.tryExecute(new CreateTableCommand(statementString, (CreateTable) statement, overriddenProperties, topicClient, false), tempMetaStore);
        return new Pair<>(statementString, statement);
    } else if (statement instanceof DropStream) {
        ddlCommandExec.tryExecute(new DropSourceCommand((DropStream) statement, DataSource.DataSourceType.KSTREAM, this), tempMetaStore);
        ddlCommandExec.tryExecute(new DropSourceCommand((DropStream) statement, DataSource.DataSourceType.KSTREAM, this), tempMetaStoreForParser);
        return new Pair<>(statementString, statement);
    } else if (statement instanceof DropTable) {
        ddlCommandExec.tryExecute(new DropSourceCommand((DropTable) statement, DataSource.DataSourceType.KTABLE, this), tempMetaStore);
        ddlCommandExec.tryExecute(new DropSourceCommand((DropTable) statement, DataSource.DataSourceType.KTABLE, this), tempMetaStoreForParser);
        return new Pair<>(statementString, statement);
    } else if (statement instanceof DropTopic) {
        ddlCommandExec.tryExecute(new DropTopicCommand((DropTopic) statement), tempMetaStore);
        ddlCommandExec.tryExecute(new DropTopicCommand((DropTopic) statement), tempMetaStoreForParser);
        return new Pair<>(statementString, statement);
    } else if (statement instanceof SetProperty) {
        return new Pair<>(statementString, statement);
    }
    return null;
}
Also used : DropTopicCommand(io.confluent.ksql.ddl.commands.DropTopicCommand) Query(io.confluent.ksql.parser.tree.Query) RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) DropSourceCommand(io.confluent.ksql.ddl.commands.DropSourceCommand) CreateTable(io.confluent.ksql.parser.tree.CreateTable) CreateStream(io.confluent.ksql.parser.tree.CreateStream) CreateAsSelect(io.confluent.ksql.parser.tree.CreateAsSelect) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) DropTable(io.confluent.ksql.parser.tree.DropTable) CreateTableCommand(io.confluent.ksql.ddl.commands.CreateTableCommand) QuerySpecification(io.confluent.ksql.parser.tree.QuerySpecification) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) DropTopic(io.confluent.ksql.parser.tree.DropTopic) DropStream(io.confluent.ksql.parser.tree.DropStream) SetProperty(io.confluent.ksql.parser.tree.SetProperty) Pair(io.confluent.ksql.util.Pair)

Aggregations

RegisterTopic (io.confluent.ksql.parser.tree.RegisterTopic)7 Test (org.junit.Test)4 CreateStreamCommand (io.confluent.ksql.ddl.commands.CreateStreamCommand)2 RegisterTopicCommand (io.confluent.ksql.ddl.commands.RegisterTopicCommand)2 CreateStream (io.confluent.ksql.parser.tree.CreateStream)2 Expression (io.confluent.ksql.parser.tree.Expression)2 Statement (io.confluent.ksql.parser.tree.Statement)2 StringLiteral (io.confluent.ksql.parser.tree.StringLiteral)2 CommandId (io.confluent.ksql.rest.server.computation.CommandId)2 HashMap (java.util.HashMap)2 KsqlEngine (io.confluent.ksql.KsqlEngine)1 CreateTableCommand (io.confluent.ksql.ddl.commands.CreateTableCommand)1 DropSourceCommand (io.confluent.ksql.ddl.commands.DropSourceCommand)1 DropTopicCommand (io.confluent.ksql.ddl.commands.DropTopicCommand)1 AstBuilder (io.confluent.ksql.parser.AstBuilder)1 CreateAsSelect (io.confluent.ksql.parser.tree.CreateAsSelect)1 CreateTable (io.confluent.ksql.parser.tree.CreateTable)1 DropStream (io.confluent.ksql.parser.tree.DropStream)1 DropTable (io.confluent.ksql.parser.tree.DropTable)1 DropTopic (io.confluent.ksql.parser.tree.DropTopic)1