use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.
the class ListSourceExecutorTest method shouldNotCallTopicClientForExtendedDescription.
@Test
public void shouldNotCallTopicClientForExtendedDescription() {
// Given:
engine.givenSource(DataSourceType.KSTREAM, "stream1");
final KafkaTopicClient spyTopicClient = spy(engine.getServiceContext().getTopicClient());
final ServiceContext serviceContext = TestServiceContext.create(engine.getServiceContext().getKafkaClientSupplier(), engine.getServiceContext().getAdminClient(), spyTopicClient, engine.getServiceContext().getSchemaRegistryClientFactory(), engine.getServiceContext().getConnectClient());
// When:
CUSTOM_EXECUTORS.listStreams().execute((ConfiguredStatement<ListStreams>) engine.configure("SHOW STREAMS;"), SESSION_PROPERTIES, engine.getEngine(), serviceContext).getEntity().orElseThrow(IllegalStateException::new);
// Then:
verify(spyTopicClient, never()).describeTopic(anyString());
}
use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.
the class RequestValidatorTest method shouldExecuteWithSpecifiedServiceContext.
@Test
public void shouldExecuteWithSpecifiedServiceContext() {
// Given:
final List<ParsedStatement> statements = givenParsed(SOME_STREAM_SQL);
final ServiceContext otherServiceContext = SandboxedServiceContext.create(TestServiceContext.create());
// When:
validator.validate(otherServiceContext, statements, sessionProperties, "sql");
// Then:
verify(distributedStatementValidator).create(argThat(configured(preparedStatement(instanceOf(CreateStream.class)))), same(otherServiceContext), any());
}
use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.
the class DefaultSchemaInjector method forCreateAsStatement.
private Optional<ConfiguredStatement<CreateAsSelect>> forCreateAsStatement(final ConfiguredStatement<CreateAsSelect> statement) {
final CreateAsSelect csStmt = statement.getStatement();
final CreateSourceAsProperties properties = csStmt.getProperties();
// Don't need to inject schema if no key schema id and value schema id
if (!properties.getKeySchemaId().isPresent() && !properties.getValueSchemaId().isPresent()) {
return Optional.empty();
}
final CreateSourceCommand createSourceCommand;
try {
final ServiceContext sandboxServiceContext = SandboxedServiceContext.create(serviceContext);
createSourceCommand = (CreateSourceCommand) executionContext.createSandbox(sandboxServiceContext).plan(sandboxServiceContext, statement).getDdlCommand().get();
} catch (final Exception e) {
throw new KsqlStatementException("Could not determine output schema for query due to error: " + e.getMessage(), statement.getStatementText(), e);
}
final Optional<SchemaAndId> keySchema = getCreateAsKeySchema(statement, createSourceCommand);
final Optional<SchemaAndId> valueSchema = getCreateAsValueSchema(statement, createSourceCommand);
final CreateAsSelect withSchema = addSchemaFieldsCas(statement, keySchema, valueSchema);
final PreparedStatement<CreateAsSelect> prepared = buildPreparedStatement(withSchema);
final ImmutableMap.Builder<String, Object> overrideBuilder = ImmutableMap.builder();
// Only store raw schema if schema id is provided by user
if (properties.getKeySchemaId().isPresent()) {
keySchema.map(schemaAndId -> overrideBuilder.put(CommonCreateConfigs.KEY_SCHEMA_ID, schemaAndId));
}
if (properties.getValueSchemaId().isPresent()) {
valueSchema.map(schemaAndId -> overrideBuilder.put(CommonCreateConfigs.VALUE_SCHEMA_ID, schemaAndId));
}
final ConfiguredStatement<CreateAsSelect> configured = ConfiguredStatement.of(prepared, statement.getSessionConfig().copyWith(overrideBuilder.build()));
return Optional.of(configured);
}
use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.
the class SchemaRegisterInjector method registerForCreateAs.
private void registerForCreateAs(final ConfiguredStatement<? extends CreateAsSelect> cas) {
final CreateSourceCommand createSourceCommand;
try {
final ServiceContext sandboxServiceContext = SandboxedServiceContext.create(serviceContext);
createSourceCommand = (CreateSourceCommand) executionContext.createSandbox(sandboxServiceContext).plan(sandboxServiceContext, cas).getDdlCommand().get();
} catch (final Exception e) {
throw new KsqlStatementException("Could not determine output schema for query due to error: " + e.getMessage(), cas.getStatementText(), e);
}
final SchemaAndId rawKeySchema = (SchemaAndId) cas.getSessionConfig().getOverrides().get(CommonCreateConfigs.KEY_SCHEMA_ID);
final SchemaAndId rawValueSchema = (SchemaAndId) cas.getSessionConfig().getOverrides().get(CommonCreateConfigs.VALUE_SCHEMA_ID);
registerSchemas(createSourceCommand.getSchema(), Pair.of(rawKeySchema, rawValueSchema), createSourceCommand.getTopicName(), createSourceCommand.getFormats().getKeyFormat(), createSourceCommand.getFormats().getKeyFeatures(), createSourceCommand.getFormats().getValueFormat(), createSourceCommand.getFormats().getValueFeatures(), cas.getSessionConfig().getConfig(false), cas.getStatementText(), true);
}
use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.
the class KsqlRestApplication method buildApplication.
public static KsqlRestApplication buildApplication(final KsqlRestConfig restConfig, final MetricCollectors metricCollectors) {
final Map<String, Object> updatedRestProps = restConfig.getOriginals();
final KsqlConfig ksqlConfig = new KsqlConfig(restConfig.getKsqlConfigProperties());
final Vertx vertx = Vertx.vertx(new VertxOptions().setMaxWorkerExecuteTimeUnit(TimeUnit.MILLISECONDS).setMaxWorkerExecuteTime(Long.MAX_VALUE).setMetricsOptions(setUpHttpMetrics(ksqlConfig)));
vertx.exceptionHandler(t -> log.error("Unhandled exception in Vert.x", t));
final KsqlClient sharedClient = InternalKsqlClientFactory.createInternalClient(PropertiesUtil.toMapStrings(ksqlConfig.originals()), SocketAddress::inetSocketAddress, vertx);
final Supplier<SchemaRegistryClient> schemaRegistryClientFactory = new KsqlSchemaRegistryClientFactory(ksqlConfig, Collections.emptyMap())::get;
final ConnectClientFactory connectClientFactory = new DefaultConnectClientFactory(ksqlConfig);
final ServiceContext tempServiceContext = new LazyServiceContext(() -> RestServiceContextFactory.create(ksqlConfig, Optional.empty(), schemaRegistryClientFactory, connectClientFactory, sharedClient, Collections.emptyList(), Optional.empty()));
final String kafkaClusterId = KafkaClusterUtil.getKafkaClusterId(tempServiceContext);
final String ksqlServerId = ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG);
updatedRestProps.putAll(metricCollectors.addConfluentMetricsContextConfigs(ksqlServerId, kafkaClusterId));
final KsqlRestConfig updatedRestConfig = new KsqlRestConfig(updatedRestProps);
final ServiceContext serviceContext = new LazyServiceContext(() -> RestServiceContextFactory.create(new KsqlConfig(updatedRestConfig.getKsqlConfigProperties()), Optional.empty(), schemaRegistryClientFactory, connectClientFactory, sharedClient, Collections.emptyList(), Optional.empty()));
return buildApplication("", updatedRestConfig, KsqlVersionCheckerAgent::new, Integer.MAX_VALUE, serviceContext, schemaRegistryClientFactory, connectClientFactory, vertx, sharedClient, RestServiceContextFactory::create, RestServiceContextFactory::create, metricCollectors);
}
Aggregations