use of com.netflix.conductor.postgres.PostgresWorkflowModule in project conductor by Netflix.
the class ModulesProvider method selectModulesToLoad.
private List<AbstractModule> selectModulesToLoad() {
Configuration.DB database;
List<AbstractModule> modules = new ArrayList<>();
// Load Jackson module early to make ObjectMapper provider available across all the usages.
modules.add(new JacksonModule());
try {
database = configuration.getDB();
} catch (IllegalArgumentException ie) {
final String message = "Invalid db name: " + configuration.getDBString() + ", supported values are: " + Arrays.toString(Configuration.DB.values());
logger.error(message);
throw new ProvisionException(message, ie);
}
switch(database) {
case REDIS:
case DYNOMITE:
modules.add(new DynomiteClusterModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using dynomite/redis cluster.");
break;
case MYSQL:
modules.add(new MySQLWorkflowModule());
logger.info("Starting conductor server using MySQL data store.");
break;
case POSTGRES:
modules.add(new PostgresWorkflowModule());
logger.info("Starting conductor server using Postgres data store.");
break;
case MEMORY:
modules.add(new LocalRedisModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using in memory data store.");
break;
case REDIS_CLUSTER:
modules.add(new RedisClusterModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using redis_cluster.");
break;
case CASSANDRA:
modules.add(new CassandraModule());
logger.info("Starting conductor server using cassandra.");
break;
case REDIS_SENTINEL:
modules.add(new RedisSentinelModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using redis_sentinel.");
break;
}
if (configuration.isIndexingPersistenceEnabled())
modules.add(new ElasticSearchModule());
else
modules.add(new NoopIndexModule());
modules.add(new WorkflowExecutorModule());
if (configuration.getJerseyEnabled()) {
modules.add(new JerseyModule());
modules.add(new SwaggerModule());
}
if (configuration.enableWorkflowExecutionLock()) {
Configuration.LOCKING_SERVER lockingServer;
try {
lockingServer = configuration.getLockingServer();
} catch (IllegalArgumentException ie) {
final String message = "Invalid locking server name: " + configuration.getLockingServerString() + ", supported values are: " + Arrays.toString(Configuration.LOCKING_SERVER.values());
logger.error(message);
throw new ProvisionException(message, ie);
}
switch(lockingServer) {
case REDIS:
modules.add(new RedisLockModule());
logger.info("Starting locking module using Redis cluster.");
break;
case ZOOKEEPER:
modules.add(new ZookeeperModule());
logger.info("Starting locking module using Zookeeper cluster.");
break;
case LOCAL_ONLY:
modules.add(new LocalOnlyLockModule());
logger.info("Starting locking module using local only JVM locking.");
break;
default:
break;
}
} else {
modules.add(new NoopLockModule());
logger.warn("Starting locking module using Noop Lock.");
}
ExternalPayloadStorageType externalPayloadStorageType = null;
String externalPayloadStorageString = configuration.getProperty("workflow.external.payload.storage", "DUMMY");
try {
externalPayloadStorageType = ExternalPayloadStorageType.valueOf(externalPayloadStorageString);
} catch (IllegalArgumentException e) {
logger.info("External payload storage is not configured, provided: {}, supported values are: {}", externalPayloadStorageString, Arrays.toString(ExternalPayloadStorageType.values()), e);
}
if (externalPayloadStorageType == ExternalPayloadStorageType.S3) {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(ExternalPayloadStorage.class).to(S3PayloadStorage.class);
}
});
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(ExternalPayloadStorage.class).to(DummyPayloadStorage.class);
}
});
}
new HttpTask(new RestClientManager(configuration), configuration, new JsonMapperProvider().get());
new KafkaPublishTask(configuration, new KafkaProducerManager(configuration), new JsonMapperProvider().get());
new JsonJqTransform(new JsonMapperProvider().get());
modules.add(new ServerModule());
return modules;
}
Aggregations