Search in sources :

Example 66 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class ExecutionService method requeuePendingTasks.

public int requeuePendingTasks() {
    long threshold = System.currentTimeMillis() - taskRequeueTimeout;
    List<WorkflowDef> workflowDefs = metadataDAO.getAllWorkflowDefs();
    int count = 0;
    for (WorkflowDef workflowDef : workflowDefs) {
        List<Workflow> workflows = workflowExecutor.getRunningWorkflows(workflowDef.getName(), workflowDef.getVersion());
        for (Workflow workflow : workflows) {
            count += requeuePendingTasks(workflow, threshold);
        }
    }
    return count;
}
Also used : WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) Workflow(com.netflix.conductor.common.run.Workflow)

Example 67 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class ElasticSearchRestDAOV5 method updateWorkflow.

@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
    if (keys.length != values.length) {
        throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
    }
    long startTime = Instant.now().toEpochMilli();
    UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId);
    Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
    request.doc(source);
    logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
    new RetryUtil<UpdateResponse>().retryOnException(() -> {
        try {
            return elasticSearchClient.update(request);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
    long endTime = Instant.now().toEpochMilli();
    logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
    Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
    Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
Also used : ResponseException(org.elasticsearch.client.ResponseException) GetResponse(org.elasticsearch.action.get.GetResponse) Date(java.util.Date) LoggerFactory(org.slf4j.LoggerFactory) HttpStatus(org.apache.http.HttpStatus) Task(com.netflix.conductor.common.metadata.tasks.Task) QueryBuilders(org.elasticsearch.index.query.QueryBuilders) StringUtils(org.apache.commons.lang3.StringUtils) NByteArrayEntity(org.apache.http.nio.entity.NByteArrayEntity) EntityUtils(org.apache.http.util.EntityUtils) WorkflowSummary(com.netflix.conductor.common.run.WorkflowSummary) PreDestroy(javax.annotation.PreDestroy) DeleteRequest(org.elasticsearch.action.delete.DeleteRequest) IndexRequest(org.elasticsearch.action.index.IndexRequest) TypeFactory(com.fasterxml.jackson.databind.type.TypeFactory) UpdateResponse(org.elasticsearch.action.update.UpdateResponse) Workflow(com.netflix.conductor.common.run.Workflow) Map(java.util.Map) SearchResponse(org.elasticsearch.action.search.SearchResponse) JsonNode(com.fasterxml.jackson.databind.JsonNode) EventExecution(com.netflix.conductor.common.metadata.events.EventExecution) ZoneOffset(java.time.ZoneOffset) NStringEntity(org.apache.http.nio.entity.NStringEntity) DeleteResponse(org.elasticsearch.action.delete.DeleteResponse) MapType(com.fasterxml.jackson.databind.type.MapType) SearchHit(org.elasticsearch.search.SearchHit) Message(com.netflix.conductor.core.events.queue.Message) SearchResult(com.netflix.conductor.common.run.SearchResult) GetRequest(org.elasticsearch.action.get.GetRequest) Trace(com.netflix.conductor.annotations.Trace) TaskExecLog(com.netflix.conductor.common.metadata.tasks.TaskExecLog) TimeZone(java.util.TimeZone) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HttpEntity(org.apache.http.HttpEntity) ContentType(org.apache.http.entity.ContentType) BulkResponse(org.elasticsearch.action.bulk.BulkResponse) FieldSortBuilder(org.elasticsearch.search.sort.FieldSortBuilder) Instant(java.time.Instant) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) IOUtils(org.apache.commons.io.IOUtils) List(java.util.List) Response(org.elasticsearch.client.Response) LocalDate(java.time.LocalDate) SortOrder(org.elasticsearch.search.sort.SortOrder) BoolQueryBuilder(org.elasticsearch.index.query.BoolQueryBuilder) IntStream(java.util.stream.IntStream) RestClient(org.elasticsearch.client.RestClient) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) XContentType(org.elasticsearch.common.xcontent.XContentType) QueryStringQueryBuilder(org.elasticsearch.index.query.QueryStringQueryBuilder) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) SearchRequest(org.elasticsearch.action.search.SearchRequest) Singleton(javax.inject.Singleton) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Expression(com.netflix.conductor.dao.es5.index.query.parser.Expression) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) SearchSourceBuilder(org.elasticsearch.search.builder.SearchSourceBuilder) IndexResponse(org.elasticsearch.action.index.IndexResponse) LinkedList(java.util.LinkedList) ExecutorService(java.util.concurrent.ExecutorService) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) QueryBuilder(org.elasticsearch.index.query.QueryBuilder) Logger(org.slf4j.Logger) RetryUtil(com.netflix.conductor.common.utils.RetryUtil) TaskSummary(com.netflix.conductor.common.run.TaskSummary) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) UpdateRequest(org.elasticsearch.action.update.UpdateRequest) DocWriteResponse(org.elasticsearch.action.DocWriteResponse) RestHighLevelClient(org.elasticsearch.client.RestHighLevelClient) Monitors(com.netflix.conductor.metrics.Monitors) TimeUnit(java.util.concurrent.TimeUnit) IndexDAO(com.netflix.conductor.dao.IndexDAO) ElasticSearchConfiguration(com.netflix.conductor.elasticsearch.ElasticSearchConfiguration) Collections(java.util.Collections) BulkRequest(org.elasticsearch.action.bulk.BulkRequest) InputStream(java.io.InputStream) UpdateResponse(org.elasticsearch.action.update.UpdateResponse) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) UpdateRequest(org.elasticsearch.action.update.UpdateRequest) IOException(java.io.IOException) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor)

Example 68 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class ElasticSearchRestDAOV6 method updateWorkflow.

@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
    if (keys.length != values.length) {
        throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
    }
    long startTime = Instant.now().toEpochMilli();
    String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride;
    UpdateRequest request = new UpdateRequest(workflowIndexName, docType, workflowInstanceId);
    Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
    request.doc(source);
    logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
    new RetryUtil<UpdateResponse>().retryOnException(() -> {
        try {
            return elasticSearchClient.update(request);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
    long endTime = Instant.now().toEpochMilli();
    logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
    Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
    Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
Also used : ResponseException(org.elasticsearch.client.ResponseException) GetResponse(org.elasticsearch.action.get.GetResponse) RestClientBuilder(org.elasticsearch.client.RestClientBuilder) Date(java.util.Date) LoggerFactory(org.slf4j.LoggerFactory) HttpStatus(org.apache.http.HttpStatus) Task(com.netflix.conductor.common.metadata.tasks.Task) QueryBuilders(org.elasticsearch.index.query.QueryBuilders) StringUtils(org.apache.commons.lang3.StringUtils) NByteArrayEntity(org.apache.http.nio.entity.NByteArrayEntity) EntityUtils(org.apache.http.util.EntityUtils) WorkflowSummary(com.netflix.conductor.common.run.WorkflowSummary) PreDestroy(javax.annotation.PreDestroy) DeleteRequest(org.elasticsearch.action.delete.DeleteRequest) IndexRequest(org.elasticsearch.action.index.IndexRequest) TypeFactory(com.fasterxml.jackson.databind.type.TypeFactory) UpdateResponse(org.elasticsearch.action.update.UpdateResponse) Workflow(com.netflix.conductor.common.run.Workflow) Map(java.util.Map) SearchResponse(org.elasticsearch.action.search.SearchResponse) JsonNode(com.fasterxml.jackson.databind.JsonNode) EventExecution(com.netflix.conductor.common.metadata.events.EventExecution) NStringEntity(org.apache.http.nio.entity.NStringEntity) DeleteResponse(org.elasticsearch.action.delete.DeleteResponse) MapType(com.fasterxml.jackson.databind.type.MapType) SearchHit(org.elasticsearch.search.SearchHit) Message(com.netflix.conductor.core.events.queue.Message) SearchResult(com.netflix.conductor.common.run.SearchResult) GetRequest(org.elasticsearch.action.get.GetRequest) Trace(com.netflix.conductor.annotations.Trace) TaskExecLog(com.netflix.conductor.common.metadata.tasks.TaskExecLog) TimeZone(java.util.TimeZone) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HttpEntity(org.apache.http.HttpEntity) ContentType(org.apache.http.entity.ContentType) BulkResponse(org.elasticsearch.action.bulk.BulkResponse) FieldSortBuilder(org.elasticsearch.search.sort.FieldSortBuilder) Instant(java.time.Instant) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) IOUtils(org.apache.commons.io.IOUtils) List(java.util.List) Response(org.elasticsearch.client.Response) LocalDate(java.time.LocalDate) SortOrder(org.elasticsearch.search.sort.SortOrder) ParserException(com.netflix.conductor.elasticsearch.query.parser.ParserException) BoolQueryBuilder(org.elasticsearch.index.query.BoolQueryBuilder) IntStream(java.util.stream.IntStream) RestClient(org.elasticsearch.client.RestClient) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) XContentType(org.elasticsearch.common.xcontent.XContentType) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) SearchRequest(org.elasticsearch.action.search.SearchRequest) Singleton(javax.inject.Singleton) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) SearchSourceBuilder(org.elasticsearch.search.builder.SearchSourceBuilder) IndexResponse(org.elasticsearch.action.index.IndexResponse) LinkedList(java.util.LinkedList) ExecutorService(java.util.concurrent.ExecutorService) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) QueryBuilder(org.elasticsearch.index.query.QueryBuilder) Logger(org.slf4j.Logger) RetryUtil(com.netflix.conductor.common.utils.RetryUtil) TaskSummary(com.netflix.conductor.common.run.TaskSummary) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) UpdateRequest(org.elasticsearch.action.update.UpdateRequest) DocWriteResponse(org.elasticsearch.action.DocWriteResponse) RestHighLevelClient(org.elasticsearch.client.RestHighLevelClient) Monitors(com.netflix.conductor.metrics.Monitors) TimeUnit(java.util.concurrent.TimeUnit) IndexDAO(com.netflix.conductor.dao.IndexDAO) ElasticSearchConfiguration(com.netflix.conductor.elasticsearch.ElasticSearchConfiguration) Collections(java.util.Collections) BulkRequest(org.elasticsearch.action.bulk.BulkRequest) InputStream(java.io.InputStream) UpdateResponse(org.elasticsearch.action.update.UpdateResponse) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) UpdateRequest(org.elasticsearch.action.update.UpdateRequest) IOException(java.io.IOException) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor)

Example 69 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class TestElasticSearchRestDAOV5 method createTestWorkflow.

@Before
public void createTestWorkflow() throws Exception {
    // define indices
    indexDAO.setup();
    // initialize workflow
    workflow = new Workflow();
    workflow.getInput().put("requestId", "request id 001");
    workflow.getInput().put("hasAwards", true);
    workflow.getInput().put("channelMapping", 5);
    Map<String, Object> name = new HashMap<>();
    name.put("name", "The Who");
    name.put("year", 1970);
    Map<String, Object> name2 = new HashMap<>();
    name2.put("name", "The Doors");
    name2.put("year", 1975);
    List<Object> names = new LinkedList<>();
    names.add(name);
    names.add(name2);
    workflow.getOutput().put("name", name);
    workflow.getOutput().put("names", names);
    workflow.getOutput().put("awards", 200);
    Task task = new Task();
    task.setReferenceTaskName("task2");
    task.getOutputData().put("location", "http://location");
    task.setStatus(Task.Status.COMPLETED);
    Task task2 = new Task();
    task2.setReferenceTaskName("task3");
    task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc");
    task2.setStatus(Task.Status.SCHEDULED);
    workflow.getTasks().add(task);
    workflow.getTasks().add(task2);
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) HashMap(java.util.HashMap) Workflow(com.netflix.conductor.common.run.Workflow) LinkedList(java.util.LinkedList) Before(org.junit.Before)

Example 70 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class TestElasticSearchDAOV5 method createTestWorkflow.

@Before
public void createTestWorkflow() throws Exception {
    // define indices
    indexDAO.setup();
    // initialize workflow
    workflow = new Workflow();
    workflow.getInput().put("requestId", "request id 001");
    workflow.getInput().put("hasAwards", true);
    workflow.getInput().put("channelMapping", 5);
    Map<String, Object> name = new HashMap<>();
    name.put("name", "The Who");
    name.put("year", 1970);
    Map<String, Object> name2 = new HashMap<>();
    name2.put("name", "The Doors");
    name2.put("year", 1975);
    List<Object> names = new LinkedList<>();
    names.add(name);
    names.add(name2);
    workflow.getOutput().put("name", name);
    workflow.getOutput().put("names", names);
    workflow.getOutput().put("awards", 200);
    Task task = new Task();
    task.setReferenceTaskName("task2");
    task.getOutputData().put("location", "http://location");
    task.setStatus(Task.Status.COMPLETED);
    Task task2 = new Task();
    task2.setReferenceTaskName("task3");
    task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc");
    task2.setStatus(Task.Status.SCHEDULED);
    workflow.getTasks().add(task);
    workflow.getTasks().add(task2);
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) HashMap(java.util.HashMap) Workflow(com.netflix.conductor.common.run.Workflow) LinkedList(java.util.LinkedList) Before(org.junit.Before)

Aggregations

Workflow (com.netflix.conductor.common.run.Workflow)360 Test (org.junit.Test)259 Task (com.netflix.conductor.common.metadata.tasks.Task)246 WorkflowTask (com.netflix.conductor.common.metadata.workflow.WorkflowTask)206 HashMap (java.util.HashMap)154 WorkflowDef (com.netflix.conductor.common.metadata.workflow.WorkflowDef)149 SubWorkflow (com.netflix.conductor.core.execution.tasks.SubWorkflow)130 TaskDef (com.netflix.conductor.common.metadata.tasks.TaskDef)97 UserTask (com.netflix.conductor.tests.utils.UserTask)73 LinkedList (java.util.LinkedList)57 Map (java.util.Map)55 List (java.util.List)54 ApplicationException (com.netflix.conductor.core.execution.ApplicationException)52 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)50 Collections (java.util.Collections)36 Collectors (java.util.stream.Collectors)35 Logger (org.slf4j.Logger)35 LoggerFactory (org.slf4j.LoggerFactory)35 TaskResult (com.netflix.conductor.common.metadata.tasks.TaskResult)34 WorkflowSystemTask (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask)34