use of com.netflix.conductor.common.utils.JsonMapperProvider in project conductor by Netflix.
the class TestElasticSearchRestDAOV7 method startServer.
@BeforeClass
public static void startServer() throws Exception {
System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9204");
System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "http://localhost:9204");
System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_INDEX_BATCH_SIZE_PROPERTY_NAME, "1");
configuration = new SystemPropertiesElasticSearchConfiguration();
String host = configuration.getEmbeddedHost();
int port = configuration.getEmbeddedPort();
String clusterName = configuration.getEmbeddedClusterName();
embeddedElasticSearch = new EmbeddedElasticSearchV7(clusterName, host, port);
embeddedElasticSearch.start();
ElasticSearchRestClientBuilderProvider restClientProvider = new ElasticSearchRestClientBuilderProvider(configuration);
RestClientBuilder restClientBuilder = restClientProvider.get();
restClient = restClientBuilder.build();
Map<String, String> params = new HashMap<>();
params.put("wait_for_status", "yellow");
params.put("timeout", "30s");
Request request = new Request("GET", "/_cluster/health");
request.addParameters(params);
restClient.performRequest(request);
objectMapper = new JsonMapperProvider().get();
indexDAO = new ElasticSearchRestDAOV7(restClientBuilder, configuration, objectMapper);
}
use of com.netflix.conductor.common.utils.JsonMapperProvider in project conductor by Netflix.
the class TestElasticSearchRestDAOV7 method shouldAddIndexPrefixToIndexTemplate.
@Test
public void shouldAddIndexPrefixToIndexTemplate() throws Exception {
String json = TestUtils.loadJsonResource("expected_template_task_log");
String content = indexDAO.loadTypeMappingSource("/template_task_log.json");
ObjectMapper mapper = new JsonMapperProvider().get();
JsonNode expectedJson = mapper.readTree(json);
JsonNode actual = mapper.readTree(content);
assertEquals(expectedJson, actual);
}
use of com.netflix.conductor.common.utils.JsonMapperProvider in project conductor by Netflix.
the class ModulesProvider method selectModulesToLoad.
private List<AbstractModule> selectModulesToLoad() {
Configuration.DB database;
List<AbstractModule> modules = new ArrayList<>();
// Load Jackson module early to make ObjectMapper provider available across all the usages.
modules.add(new JacksonModule());
try {
database = configuration.getDB();
} catch (IllegalArgumentException ie) {
final String message = "Invalid db name: " + configuration.getDBString() + ", supported values are: " + Arrays.toString(Configuration.DB.values());
logger.error(message);
throw new ProvisionException(message, ie);
}
switch(database) {
case REDIS:
case DYNOMITE:
modules.add(new DynomiteClusterModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using dynomite/redis cluster.");
break;
case MYSQL:
modules.add(new MySQLWorkflowModule());
logger.info("Starting conductor server using MySQL data store.");
break;
case POSTGRES:
modules.add(new PostgresWorkflowModule());
logger.info("Starting conductor server using Postgres data store.");
break;
case MEMORY:
modules.add(new LocalRedisModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using in memory data store.");
break;
case REDIS_CLUSTER:
modules.add(new RedisClusterModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using redis_cluster.");
break;
case CASSANDRA:
modules.add(new CassandraModule());
logger.info("Starting conductor server using cassandra.");
break;
case REDIS_SENTINEL:
modules.add(new RedisSentinelModule());
modules.add(new RedisWorkflowModule());
logger.info("Starting conductor server using redis_sentinel.");
break;
}
if (configuration.isIndexingPersistenceEnabled())
modules.add(new ElasticSearchModule());
else
modules.add(new NoopIndexModule());
modules.add(new WorkflowExecutorModule());
if (configuration.getJerseyEnabled()) {
modules.add(new JerseyModule());
modules.add(new SwaggerModule());
}
if (configuration.enableWorkflowExecutionLock()) {
Configuration.LOCKING_SERVER lockingServer;
try {
lockingServer = configuration.getLockingServer();
} catch (IllegalArgumentException ie) {
final String message = "Invalid locking server name: " + configuration.getLockingServerString() + ", supported values are: " + Arrays.toString(Configuration.LOCKING_SERVER.values());
logger.error(message);
throw new ProvisionException(message, ie);
}
switch(lockingServer) {
case REDIS:
modules.add(new RedisLockModule());
logger.info("Starting locking module using Redis cluster.");
break;
case ZOOKEEPER:
modules.add(new ZookeeperModule());
logger.info("Starting locking module using Zookeeper cluster.");
break;
case LOCAL_ONLY:
modules.add(new LocalOnlyLockModule());
logger.info("Starting locking module using local only JVM locking.");
break;
default:
break;
}
} else {
modules.add(new NoopLockModule());
logger.warn("Starting locking module using Noop Lock.");
}
ExternalPayloadStorageType externalPayloadStorageType = null;
String externalPayloadStorageString = configuration.getProperty("workflow.external.payload.storage", "DUMMY");
try {
externalPayloadStorageType = ExternalPayloadStorageType.valueOf(externalPayloadStorageString);
} catch (IllegalArgumentException e) {
logger.info("External payload storage is not configured, provided: {}, supported values are: {}", externalPayloadStorageString, Arrays.toString(ExternalPayloadStorageType.values()), e);
}
if (externalPayloadStorageType == ExternalPayloadStorageType.S3) {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(ExternalPayloadStorage.class).to(S3PayloadStorage.class);
}
});
} else {
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(ExternalPayloadStorage.class).to(DummyPayloadStorage.class);
}
});
}
new HttpTask(new RestClientManager(configuration), configuration, new JsonMapperProvider().get());
new KafkaPublishTask(configuration, new KafkaProducerManager(configuration), new JsonMapperProvider().get());
new JsonJqTransform(new JsonMapperProvider().get());
modules.add(new ServerModule());
return modules;
}
use of com.netflix.conductor.common.utils.JsonMapperProvider in project conductor by Netflix.
the class JettyServer method createKitchenSink.
private static void createKitchenSink(int port) throws Exception {
Client client = Client.create();
ObjectMapper objectMapper = new JsonMapperProvider().get();
List<TaskDef> taskDefs = new LinkedList<>();
TaskDef taskDef;
for (int i = 0; i < 40; i++) {
taskDef = new TaskDef("task_" + i, "task_" + i, 1, 0);
taskDef.setOwnerEmail("example@email.com");
taskDefs.add(taskDef);
}
taskDef = new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0);
taskDef.setOwnerEmail("example@email.com");
taskDefs.add(taskDef);
client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(objectMapper.writeValueAsString(taskDefs));
/*
* Kitchensink example (stored workflow with stored tasks)
*/
InputStream stream = Main.class.getResourceAsStream("/kitchensink.json");
client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream);
stream = Main.class.getResourceAsStream("/sub_flow_1.json");
client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream);
Map<String, Object> payload = ImmutableMap.of("task2Name", "task_5");
String payloadStr = objectMapper.writeValueAsString(payload);
client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(payloadStr);
logger.info("Kitchen sink workflow is created!");
/*
* Kitchensink example with ephemeral workflow and stored tasks
*/
InputStream ephemeralInputStream = Main.class.getResourceAsStream("/kitchenSink-ephemeralWorkflowWithStoredTasks.json");
client.resource("http://localhost:" + port + "/api/workflow/").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream);
logger.info("Ephemeral Kitchen sink workflow with stored tasks is created!");
/*
* Kitchensink example with ephemeral workflow and ephemeral tasks
*/
ephemeralInputStream = Main.class.getResourceAsStream("/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json");
client.resource("http://localhost:" + port + "/api/workflow/").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream);
logger.info("Ephemeral Kitchen sink workflow with ephemeral tasks is created!");
}
use of com.netflix.conductor.common.utils.JsonMapperProvider in project conductor by Netflix.
the class TaskSummaryTest method testJsonSerializing.
@Test
public void testJsonSerializing() throws Exception {
ObjectMapper om = new JsonMapperProvider().get();
Task task = new Task();
TaskSummary taskSummary = new TaskSummary(task);
String json = om.writeValueAsString(taskSummary);
TaskSummary read = om.readValue(json, TaskSummary.class);
assertNotNull(read);
}
Aggregations