Search in sources :

Example 6 with KafkaTopicClient

use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.

the class KsqlContextTest method shouldRunSimpleStatements.

@Test
public void shouldRunSimpleStatements() throws Exception {
    AdminClient adminClient = mock(AdminClient.class);
    KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClientImpl.class);
    KsqlEngine ksqlEngine = mock(KsqlEngine.class);
    Map<QueryId, PersistentQueryMetadata> liveQueryMap = new HashMap<>();
    KsqlContext ksqlContext = new KsqlContext(adminClient, kafkaTopicClient, ksqlEngine);
    expect(ksqlEngine.buildMultipleQueries(statement1, Collections.emptyMap())).andReturn(Collections.emptyList());
    expect(ksqlEngine.buildMultipleQueries(statement2, Collections.emptyMap())).andReturn(getQueryMetadata(new QueryId("CSAS_BIGORDERS"), DataSource.DataSourceType.KSTREAM));
    expect(ksqlEngine.getPersistentQueries()).andReturn(liveQueryMap);
    replay(ksqlEngine);
    ksqlContext.sql(statement1);
    ksqlContext.sql(statement2);
    verify(ksqlEngine);
}
Also used : KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) HashMap(java.util.HashMap) QueryId(io.confluent.ksql.query.QueryId) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) AdminClient(org.apache.kafka.clients.admin.AdminClient) Test(org.junit.Test)

Example 7 with KafkaTopicClient

use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.

the class KsqlRestApplication method buildApplication.

public static KsqlRestApplication buildApplication(KsqlRestConfig restConfig, boolean isUiEnabled, VersionCheckerAgent versionCheckerAgent) throws Exception {
    Map<String, Object> ksqlConfProperties = new HashMap<>();
    ksqlConfProperties.putAll(restConfig.getKsqlConfigProperties());
    KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfProperties);
    adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
    KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(adminClient));
    KafkaTopicClient topicClient = ksqlEngine.getTopicClient();
    final String kafkaClusterId;
    try {
        kafkaClusterId = adminClient.describeCluster().clusterId().get();
    } catch (final UnsupportedVersionException e) {
        throw new KsqlException("The kafka brokers are incompatible with. " + "KSQL requires broker versions >= 0.10.1.x");
    }
    String commandTopic = restConfig.getCommandTopic(ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG));
    ensureCommandTopic(restConfig, topicClient, commandTopic);
    Map<String, Expression> commandTopicProperties = new HashMap<>();
    commandTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("json"));
    commandTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(commandTopic));
    ksqlEngine.getDdlCommandExec().execute(new RegisterTopicCommand(new RegisterTopic(QualifiedName.of(COMMANDS_KSQL_TOPIC_NAME), false, commandTopicProperties)));
    ksqlEngine.getDdlCommandExec().execute(new CreateStreamCommand("statementText", new CreateStream(QualifiedName.of(COMMANDS_STREAM_NAME), Collections.singletonList(new TableElement("STATEMENT", "STRING")), false, Collections.singletonMap(DdlConfig.TOPIC_NAME_PROPERTY, new StringLiteral(COMMANDS_KSQL_TOPIC_NAME))), Collections.emptyMap(), ksqlEngine.getTopicClient(), true));
    Map<String, Object> commandConsumerProperties = restConfig.getCommandConsumerProperties();
    KafkaConsumer<CommandId, Command> commandConsumer = new KafkaConsumer<>(commandConsumerProperties, getJsonDeserializer(CommandId.class, true), getJsonDeserializer(Command.class, false));
    KafkaProducer<CommandId, Command> commandProducer = new KafkaProducer<>(restConfig.getCommandProducerProperties(), getJsonSerializer(true), getJsonSerializer(false));
    CommandStore commandStore = new CommandStore(commandTopic, commandConsumer, commandProducer, new CommandIdAssigner(ksqlEngine.getMetaStore()));
    StatementParser statementParser = new StatementParser(ksqlEngine);
    StatementExecutor statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
    CommandRunner commandRunner = new CommandRunner(statementExecutor, commandStore);
    RootDocument rootDocument = new RootDocument(isUiEnabled, restConfig.getList(RestConfig.LISTENERS_CONFIG).get(0));
    StatusResource statusResource = new StatusResource(statementExecutor);
    StreamedQueryResource streamedQueryResource = new StreamedQueryResource(ksqlEngine, statementParser, restConfig.getLong(KsqlRestConfig.STREAMED_QUERY_DISCONNECT_CHECK_MS_CONFIG));
    KsqlResource ksqlResource = new KsqlResource(ksqlEngine, commandStore, statementExecutor, restConfig.getLong(KsqlRestConfig.DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT_MS_CONFIG));
    commandRunner.processPriorCommands();
    return new KsqlRestApplication(ksqlEngine, restConfig, commandRunner, rootDocument, statusResource, streamedQueryResource, ksqlResource, isUiEnabled, versionCheckerAgent, new ServerInfo(Version.getVersion(), kafkaClusterId));
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KsqlEngine(io.confluent.ksql.KsqlEngine) HashMap(java.util.HashMap) RegisterTopic(io.confluent.ksql.parser.tree.RegisterTopic) KsqlResource(io.confluent.ksql.rest.server.resources.KsqlResource) ServerInfo(io.confluent.ksql.rest.entity.ServerInfo) CommandStore(io.confluent.ksql.rest.server.computation.CommandStore) KsqlException(io.confluent.ksql.util.KsqlException) StatementExecutor(io.confluent.ksql.rest.server.computation.StatementExecutor) StatusResource(io.confluent.ksql.rest.server.resources.StatusResource) TableElement(io.confluent.ksql.parser.tree.TableElement) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) CommandRunner(io.confluent.ksql.rest.server.computation.CommandRunner) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) RootDocument(io.confluent.ksql.rest.server.resources.RootDocument) KsqlConfig(io.confluent.ksql.util.KsqlConfig) CreateStream(io.confluent.ksql.parser.tree.CreateStream) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) StringLiteral(io.confluent.ksql.parser.tree.StringLiteral) Expression(io.confluent.ksql.parser.tree.Expression) Command(io.confluent.ksql.rest.server.computation.Command) RegisterTopicCommand(io.confluent.ksql.ddl.commands.RegisterTopicCommand) CreateStreamCommand(io.confluent.ksql.ddl.commands.CreateStreamCommand) CommandIdAssigner(io.confluent.ksql.rest.server.computation.CommandIdAssigner) CommandId(io.confluent.ksql.rest.server.computation.CommandId) UnsupportedVersionException(org.apache.kafka.common.errors.UnsupportedVersionException)

Example 8 with KafkaTopicClient

use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.

the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable.

@Test
public void shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable() {
    KafkaTopicClient topicClientForNonWindowTable = EasyMock.mock(KafkaTopicClient.class);
    KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(false);
    StreamsBuilder streamsBuilder = new StreamsBuilder();
    Map<String, String> topicConfig = ImmutableMap.of(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
    topicClientForNonWindowTable.createTopic("output", 4, (short) 3, topicConfig);
    EasyMock.replay(topicClientForNonWindowTable);
    SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForNonWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
    assertThat(schemaKStream, instanceOf(SchemaKTable.class));
    EasyMock.verify();
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) SchemaKTable(io.confluent.ksql.structured.SchemaKTable) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) Test(org.junit.Test)

Example 9 with KafkaTopicClient

use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.

the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyWindowedTable.

@Test
public void shouldCreateSinkWithCorrectCleanupPolicyWindowedTable() {
    KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
    KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(true);
    StreamsBuilder streamsBuilder = new StreamsBuilder();
    topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
    EasyMock.replay(topicClientForWindowTable);
    SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
    assertThat(schemaKStream, instanceOf(SchemaKTable.class));
    EasyMock.verify();
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) SchemaKTable(io.confluent.ksql.structured.SchemaKTable) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) Test(org.junit.Test)

Example 10 with KafkaTopicClient

use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.

the class KsqlContext method create.

public static KsqlContext create(KsqlConfig ksqlConfig, SchemaRegistryClient schemaRegistryClient) {
    if (ksqlConfig == null) {
        ksqlConfig = new KsqlConfig(Collections.emptyMap());
    }
    Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
    if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
        streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID_OPTION_DEFAULT);
    }
    if (!streamsProperties.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) {
        streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BOOTSTRAP_SERVER_OPTION_DEFAULT);
    }
    AdminClient adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    if (schemaRegistryClient == null) {
        return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient));
    } else {
        return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient, schemaRegistryClient, new MetaStoreImpl()));
    }
}
Also used : KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient)

Aggregations

KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)13 Test (org.junit.Test)10 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)8 HashMap (java.util.HashMap)7 AdminClient (org.apache.kafka.clients.admin.AdminClient)7 GenericRow (io.confluent.ksql.GenericRow)5 IntegrationTest (io.confluent.common.utils.IntegrationTest)4 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)4 FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)4 KsqlConfig (io.confluent.ksql.util.KsqlConfig)4 QueryMetadata (io.confluent.ksql.util.QueryMetadata)4 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)4 Schema (org.apache.kafka.connect.data.Schema)4 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)4 KsqlEngine (io.confluent.ksql.KsqlEngine)3 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)3 StreamedQueryResource (io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource)2 SchemaKTable (io.confluent.ksql.structured.SchemaKTable)2 KsqlException (io.confluent.ksql.util.KsqlException)2 Map (java.util.Map)2