use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class JsonFormatTest method before.
@Before
public void before() throws Exception {
Map<String, Object> configMap = new HashMap<>();
configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
configMap.put("application.id", "KSQL");
configMap.put("commit.interval.ms", 0);
configMap.put("cache.max.bytes.buffering", 0);
configMap.put("auto.offset.reset", "earliest");
KsqlConfig ksqlConfig = new KsqlConfig(configMap);
adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
topicClient = new KafkaTopicClientImpl(adminClient);
ksqlEngine = new KsqlEngine(ksqlConfig, topicClient);
metaStore = ksqlEngine.getMetaStore();
createInitTopics();
produceInitData();
execInitCreateStreamQueries();
}
use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class KsqlRestApplication method buildApplication.
public static KsqlRestApplication buildApplication(KsqlRestConfig restConfig, boolean isUiEnabled, VersionCheckerAgent versionCheckerAgent) throws Exception {
Map<String, Object> ksqlConfProperties = new HashMap<>();
ksqlConfProperties.putAll(restConfig.getKsqlConfigProperties());
KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfProperties);
adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(adminClient));
KafkaTopicClient topicClient = ksqlEngine.getTopicClient();
final String kafkaClusterId;
try {
kafkaClusterId = adminClient.describeCluster().clusterId().get();
} catch (final UnsupportedVersionException e) {
throw new KsqlException("The kafka brokers are incompatible with. " + "KSQL requires broker versions >= 0.10.1.x");
}
String commandTopic = restConfig.getCommandTopic(ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG));
ensureCommandTopic(restConfig, topicClient, commandTopic);
Map<String, Expression> commandTopicProperties = new HashMap<>();
commandTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("json"));
commandTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(commandTopic));
ksqlEngine.getDdlCommandExec().execute(new RegisterTopicCommand(new RegisterTopic(QualifiedName.of(COMMANDS_KSQL_TOPIC_NAME), false, commandTopicProperties)));
ksqlEngine.getDdlCommandExec().execute(new CreateStreamCommand("statementText", new CreateStream(QualifiedName.of(COMMANDS_STREAM_NAME), Collections.singletonList(new TableElement("STATEMENT", "STRING")), false, Collections.singletonMap(DdlConfig.TOPIC_NAME_PROPERTY, new StringLiteral(COMMANDS_KSQL_TOPIC_NAME))), Collections.emptyMap(), ksqlEngine.getTopicClient(), true));
Map<String, Object> commandConsumerProperties = restConfig.getCommandConsumerProperties();
KafkaConsumer<CommandId, Command> commandConsumer = new KafkaConsumer<>(commandConsumerProperties, getJsonDeserializer(CommandId.class, true), getJsonDeserializer(Command.class, false));
KafkaProducer<CommandId, Command> commandProducer = new KafkaProducer<>(restConfig.getCommandProducerProperties(), getJsonSerializer(true), getJsonSerializer(false));
CommandStore commandStore = new CommandStore(commandTopic, commandConsumer, commandProducer, new CommandIdAssigner(ksqlEngine.getMetaStore()));
StatementParser statementParser = new StatementParser(ksqlEngine);
StatementExecutor statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
CommandRunner commandRunner = new CommandRunner(statementExecutor, commandStore);
RootDocument rootDocument = new RootDocument(isUiEnabled, restConfig.getList(RestConfig.LISTENERS_CONFIG).get(0));
StatusResource statusResource = new StatusResource(statementExecutor);
StreamedQueryResource streamedQueryResource = new StreamedQueryResource(ksqlEngine, statementParser, restConfig.getLong(KsqlRestConfig.STREAMED_QUERY_DISCONNECT_CHECK_MS_CONFIG));
KsqlResource ksqlResource = new KsqlResource(ksqlEngine, commandStore, statementExecutor, restConfig.getLong(KsqlRestConfig.DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT_MS_CONFIG));
commandRunner.processPriorCommands();
return new KsqlRestApplication(ksqlEngine, restConfig, commandRunner, rootDocument, statusResource, streamedQueryResource, ksqlResource, isUiEnabled, versionCheckerAgent, new ServerInfo(Version.getVersion(), kafkaClusterId));
}
use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class StatementExecutorTest method setUp.
@Before
public void setUp() {
Map<String, Object> props = new HashMap<>();
props.put("application.id", "ksqlStatementExecutorTest");
props.put("bootstrap.servers", CLUSTER.bootstrapServers());
ksqlEngine = new KsqlEngine(new KsqlConfig(props), new MockKafkaTopicClient());
StatementParser statementParser = new StatementParser(ksqlEngine);
statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
}
use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class EndToEndIntegrationTest method before.
@Before
public void before() throws Exception {
testHarness = new IntegrationTestHarness();
testHarness.start();
Map<String, Object> streamsConfig = testHarness.ksqlConfig.getKsqlStreamConfigProps();
streamsConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
KsqlConfig ksqlconfig = new KsqlConfig(streamsConfig);
AdminClient adminClient = AdminClient.create(ksqlconfig.getKsqlAdminClientConfigProps());
KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
ksqlEngine = new KsqlEngine(ksqlconfig, topicClient);
testHarness.createTopic(pageViewTopic);
testHarness.createTopic(usersTopic);
pageViewDataProvider = new PageViewDataProvider();
testHarness.publishTestData(usersTopic, new UserDataProvider(), System.currentTimeMillis() - 10000);
testHarness.publishTestData(pageViewTopic, pageViewDataProvider, System.currentTimeMillis());
ksqlEngine.buildMultipleQueries(format("CREATE TABLE %s (registertime bigint, gender varchar, regionid varchar, " + "userid varchar) WITH (kafka_topic='%s', value_format='JSON', key = 'userid');", userTable, usersTopic), Collections.emptyMap());
ksqlEngine.buildMultipleQueries(format("CREATE STREAM %s (viewtime bigint, userid varchar, pageid varchar) " + "WITH (kafka_topic='%s', value_format='JSON');", pageViewStream, pageViewTopic), Collections.emptyMap());
}
use of io.confluent.ksql.KsqlEngine in project ksql by confluentinc.
the class SecureIntegrationTest method givenTestSetupWithConfig.
private void givenTestSetupWithConfig(final Map<String, Object> ksqlConfigs) throws Exception {
final KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfigs);
ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps())));
execInitCreateStreamQueries();
}
Aggregations