use of io.micronaut.context.annotation.Bean in project micronaut-elasticsearch by micronaut-projects.
the class DefaultElasticsearchClientFactory method elasticsearchTransport.
/**
* @param elasticsearchConfiguration The {@link DefaultElasticsearchConfigurationProperties} object.
* @param objectMapper The {@link ObjectMapper} object.
* @return The {@link ElasticsearchTransport}.
* @since 4.2.0
*/
@Singleton
@Bean(preDestroy = "close")
ElasticsearchTransport elasticsearchTransport(DefaultElasticsearchConfigurationProperties elasticsearchConfiguration, ObjectMapper objectMapper) {
RestClient restClient = restClientBuilder(elasticsearchConfiguration).build();
ElasticsearchTransport transport = new RestClientTransport(restClient, new JacksonJsonpMapper(objectMapper));
return transport;
}
use of io.micronaut.context.annotation.Bean in project micronaut-kafka by micronaut-projects.
the class KafkaProducerFactory method getProducer.
/**
* Creates a new {@link KafkaProducer} for the given configuration.
*
* @param injectionPoint The injection point used to create the bean
* @param producerConfiguration An optional producer configuration
* @param <K> The key type
* @param <V> The value type
* @return The consumer
*/
@Bean
@Any
public <K, V> Producer<K, V> getProducer(@Nullable InjectionPoint<KafkaProducer<K, V>> injectionPoint, @Nullable @Parameter AbstractKafkaProducerConfiguration<K, V> producerConfiguration) {
if (injectionPoint == null) {
if (producerConfiguration != null) {
Optional<Serializer<K>> keySerializer = producerConfiguration.getKeySerializer();
Optional<Serializer<V>> valueSerializer = producerConfiguration.getValueSerializer();
Properties config = producerConfiguration.getConfig();
if (keySerializer.isPresent() && valueSerializer.isPresent()) {
Serializer<K> ks = keySerializer.get();
Serializer<V> vs = valueSerializer.get();
return new KafkaProducer<>(config, ks, vs);
} else if (keySerializer.isPresent() || valueSerializer.isPresent()) {
throw new ConfigurationException("Both the [keySerializer] and [valueSerializer] must be set when setting either");
} else {
return new KafkaProducer<>(config);
}
} else {
throw new ConfigurationException("No Kafka configuration specified when using direct instantiation");
}
}
Argument<?> argument;
if (injectionPoint instanceof FieldInjectionPoint) {
argument = ((FieldInjectionPoint<?, ?>) injectionPoint).asArgument();
} else if (injectionPoint instanceof ArgumentInjectionPoint) {
argument = ((ArgumentInjectionPoint<?, ?>) injectionPoint).getArgument();
} else {
throw new ConfigurationException("Cannot directly retrieve KafkaProducer instances. Use @Inject or constructor injection");
}
Argument<?> k = argument.getTypeVariable("K").orElse(null);
Argument<?> v = argument.getTypeVariable("V").orElse(null);
if (k == null || v == null) {
throw new ConfigurationException("@KafkaClient used on type missing generic argument values for Key and Value: " + injectionPoint);
}
final String id = injectionPoint.getAnnotationMetadata().stringValue(KafkaClient.class).orElse(null);
return getKafkaProducer(id, null, k, v, false);
}
use of io.micronaut.context.annotation.Bean in project micronaut-camunda-bpm by camunda-community-hub.
the class ProcessEngineFactory method processEngine.
@Singleton
@Bean(preDestroy = "close")
public ProcessEngine processEngine(ProcessEngineConfiguration processEngineConfiguration, CamundaVersion camundaVersion, SynchronousTransactionManager<Connection> transactionManager, BasicJdbcConfiguration basicJdbcConfiguration, ParallelInitializationService parallelInitializationService) {
if (camundaVersion.getVersion().isPresent()) {
log.info("Camunda version: {}", camundaVersion.getVersion().get());
} else {
log.warn("The Camunda version cannot be determined. If you created a Fat/Uber/Shadow JAR then please consider using the Micronaut Application Plugin's 'dockerBuild' task to create a Docker image.");
}
log.info("Building process engine connected to {}", basicJdbcConfiguration.getUrl());
Instant start = Instant.now();
ProcessEngine processEngine = transactionManager.executeWrite(transactionStatus -> processEngineConfiguration.buildProcessEngine());
log.info("Started process engine in {}ms", ChronoUnit.MILLIS.between(start, Instant.now()));
log.debug("Starting Camunda related services...");
parallelInitializationService.process(processEngine);
log.debug("Camunda related services started in {}ms", ChronoUnit.MILLIS.between(start, Instant.now()));
return processEngine;
}
use of io.micronaut.context.annotation.Bean in project micronaut-graphql by micronaut-projects.
the class GraphQLFactory method graphQL.
@Bean
@Singleton
public GraphQL graphQL(ResourceResolver resourceResolver, HelloDataFetcher helloDataFetcher) {
// <2>
SchemaParser schemaParser = new SchemaParser();
SchemaGenerator schemaGenerator = new SchemaGenerator();
// Parse the schema.
TypeDefinitionRegistry typeRegistry = new TypeDefinitionRegistry();
typeRegistry.merge(schemaParser.parse(new BufferedReader(new InputStreamReader(resourceResolver.getResourceAsStream("classpath:schema.graphqls").get()))));
// Create the runtime wiring.
RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring().type("Query", typeWiring -> typeWiring.dataFetcher("hello", helloDataFetcher)).build();
// Create the executable schema.
GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeRegistry, runtimeWiring);
// Return the GraphQL bean.
return GraphQL.newGraphQL(graphQLSchema).build();
}
use of io.micronaut.context.annotation.Bean in project micronaut-graphql by micronaut-projects.
the class GraphQLFactory method graphQL.
@Bean
@Singleton
public GraphQL graphQL(ResourceResolver resourceResolver, ToDosDataFetcher toDosDataFetcher, CreateToDoDataFetcher createToDoDataFetcher, CompleteToDoDataFetcher completeToDoDataFetcher, DeleteToDoDataFetcher deleteToDoDataFetcher, AuthorDataFetcher authorDataFetcher) {
SchemaParser schemaParser = new SchemaParser();
SchemaGenerator schemaGenerator = new SchemaGenerator();
// Parse the schema.
TypeDefinitionRegistry typeRegistry = new TypeDefinitionRegistry();
typeRegistry.merge(schemaParser.parse(new BufferedReader(new InputStreamReader(resourceResolver.getResourceAsStream("classpath:schema.graphqls").get()))));
// Create the runtime wiring.
RuntimeWiring runtimeWiring = RuntimeWiring.newRuntimeWiring().type("Query", typeWiring -> typeWiring.dataFetcher("toDos", toDosDataFetcher)).type("Mutation", typeWiring -> typeWiring.dataFetcher("createToDo", createToDoDataFetcher).dataFetcher("completeToDo", completeToDoDataFetcher).dataFetcher("deleteToDo", deleteToDoDataFetcher)).type("ToDo", typeWiring -> typeWiring.dataFetcher("author", authorDataFetcher)).build();
// Create the executable schema.
GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeRegistry, runtimeWiring);
// Return the GraphQL bean.
return GraphQL.newGraphQL(graphQLSchema).build();
}
Aggregations