use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.
the class KsqlContextTest method shouldRunSimpleStatements.
@Test
public void shouldRunSimpleStatements() throws Exception {
AdminClient adminClient = mock(AdminClient.class);
KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClientImpl.class);
KsqlEngine ksqlEngine = mock(KsqlEngine.class);
Map<QueryId, PersistentQueryMetadata> liveQueryMap = new HashMap<>();
KsqlContext ksqlContext = new KsqlContext(adminClient, kafkaTopicClient, ksqlEngine);
expect(ksqlEngine.buildMultipleQueries(statement1, Collections.emptyMap())).andReturn(Collections.emptyList());
expect(ksqlEngine.buildMultipleQueries(statement2, Collections.emptyMap())).andReturn(getQueryMetadata(new QueryId("CSAS_BIGORDERS"), DataSource.DataSourceType.KSTREAM));
expect(ksqlEngine.getPersistentQueries()).andReturn(liveQueryMap);
replay(ksqlEngine);
ksqlContext.sql(statement1);
ksqlContext.sql(statement2);
verify(ksqlEngine);
}
use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.
the class KsqlRestApplication method buildApplication.
public static KsqlRestApplication buildApplication(KsqlRestConfig restConfig, boolean isUiEnabled, VersionCheckerAgent versionCheckerAgent) throws Exception {
Map<String, Object> ksqlConfProperties = new HashMap<>();
ksqlConfProperties.putAll(restConfig.getKsqlConfigProperties());
KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfProperties);
adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(adminClient));
KafkaTopicClient topicClient = ksqlEngine.getTopicClient();
final String kafkaClusterId;
try {
kafkaClusterId = adminClient.describeCluster().clusterId().get();
} catch (final UnsupportedVersionException e) {
throw new KsqlException("The kafka brokers are incompatible with. " + "KSQL requires broker versions >= 0.10.1.x");
}
String commandTopic = restConfig.getCommandTopic(ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG));
ensureCommandTopic(restConfig, topicClient, commandTopic);
Map<String, Expression> commandTopicProperties = new HashMap<>();
commandTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("json"));
commandTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(commandTopic));
ksqlEngine.getDdlCommandExec().execute(new RegisterTopicCommand(new RegisterTopic(QualifiedName.of(COMMANDS_KSQL_TOPIC_NAME), false, commandTopicProperties)));
ksqlEngine.getDdlCommandExec().execute(new CreateStreamCommand("statementText", new CreateStream(QualifiedName.of(COMMANDS_STREAM_NAME), Collections.singletonList(new TableElement("STATEMENT", "STRING")), false, Collections.singletonMap(DdlConfig.TOPIC_NAME_PROPERTY, new StringLiteral(COMMANDS_KSQL_TOPIC_NAME))), Collections.emptyMap(), ksqlEngine.getTopicClient(), true));
Map<String, Object> commandConsumerProperties = restConfig.getCommandConsumerProperties();
KafkaConsumer<CommandId, Command> commandConsumer = new KafkaConsumer<>(commandConsumerProperties, getJsonDeserializer(CommandId.class, true), getJsonDeserializer(Command.class, false));
KafkaProducer<CommandId, Command> commandProducer = new KafkaProducer<>(restConfig.getCommandProducerProperties(), getJsonSerializer(true), getJsonSerializer(false));
CommandStore commandStore = new CommandStore(commandTopic, commandConsumer, commandProducer, new CommandIdAssigner(ksqlEngine.getMetaStore()));
StatementParser statementParser = new StatementParser(ksqlEngine);
StatementExecutor statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
CommandRunner commandRunner = new CommandRunner(statementExecutor, commandStore);
RootDocument rootDocument = new RootDocument(isUiEnabled, restConfig.getList(RestConfig.LISTENERS_CONFIG).get(0));
StatusResource statusResource = new StatusResource(statementExecutor);
StreamedQueryResource streamedQueryResource = new StreamedQueryResource(ksqlEngine, statementParser, restConfig.getLong(KsqlRestConfig.STREAMED_QUERY_DISCONNECT_CHECK_MS_CONFIG));
KsqlResource ksqlResource = new KsqlResource(ksqlEngine, commandStore, statementExecutor, restConfig.getLong(KsqlRestConfig.DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT_MS_CONFIG));
commandRunner.processPriorCommands();
return new KsqlRestApplication(ksqlEngine, restConfig, commandRunner, rootDocument, statusResource, streamedQueryResource, ksqlResource, isUiEnabled, versionCheckerAgent, new ServerInfo(Version.getVersion(), kafkaClusterId));
}
use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.
the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable.
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable() {
KafkaTopicClient topicClientForNonWindowTable = EasyMock.mock(KafkaTopicClient.class);
KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(false);
StreamsBuilder streamsBuilder = new StreamsBuilder();
Map<String, String> topicConfig = ImmutableMap.of(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
topicClientForNonWindowTable.createTopic("output", 4, (short) 3, topicConfig);
EasyMock.replay(topicClientForNonWindowTable);
SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForNonWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
assertThat(schemaKStream, instanceOf(SchemaKTable.class));
EasyMock.verify();
}
use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.
the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyWindowedTable.
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyWindowedTable() {
KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(true);
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
EasyMock.replay(topicClientForWindowTable);
SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
assertThat(schemaKStream, instanceOf(SchemaKTable.class));
EasyMock.verify();
}
use of io.confluent.ksql.util.KafkaTopicClient in project ksql by confluentinc.
the class KsqlContext method create.
public static KsqlContext create(KsqlConfig ksqlConfig, SchemaRegistryClient schemaRegistryClient) {
if (ksqlConfig == null) {
ksqlConfig = new KsqlConfig(Collections.emptyMap());
}
Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID_OPTION_DEFAULT);
}
if (!streamsProperties.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) {
streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BOOTSTRAP_SERVER_OPTION_DEFAULT);
}
AdminClient adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
if (schemaRegistryClient == null) {
return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient));
} else {
return new KsqlContext(adminClient, topicClient, new KsqlEngine(ksqlConfig, topicClient, schemaRegistryClient, new MetaStoreImpl()));
}
}
Aggregations