Search in sources :

Example 91 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class SimpleTaskMapper method getMappedTasks.

/**
 * This method maps a {@link WorkflowTask} of type {@link TaskType#SIMPLE}
 * to a {@link Task}
 *
 * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
 * @throws TerminateWorkflowException In case if the task definition does not exist
 * @return: a List with just one simple task
 */
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
    logger.debug("TaskMapperContext {} in SimpleTaskMapper", taskMapperContext);
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    int retryCount = taskMapperContext.getRetryCount();
    String retriedTaskId = taskMapperContext.getRetryTaskId();
    TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()).orElseThrow(() -> {
        String reason = String.format("Invalid task. Task %s does not have a definition", taskToSchedule.getName());
        return new TerminateWorkflowException(reason);
    });
    Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, taskDefinition, taskMapperContext.getTaskId());
    Task simpleTask = new Task();
    simpleTask.setStartDelayInSeconds(taskToSchedule.getStartDelay());
    simpleTask.setTaskId(taskMapperContext.getTaskId());
    simpleTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
    simpleTask.setInputData(input);
    simpleTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
    simpleTask.setWorkflowType(workflowInstance.getWorkflowName());
    simpleTask.setStatus(Task.Status.SCHEDULED);
    simpleTask.setTaskType(taskToSchedule.getName());
    simpleTask.setTaskDefName(taskToSchedule.getName());
    simpleTask.setCorrelationId(workflowInstance.getCorrelationId());
    simpleTask.setScheduledTime(System.currentTimeMillis());
    simpleTask.setRetryCount(retryCount);
    simpleTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
    simpleTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds());
    simpleTask.setWorkflowTask(taskToSchedule);
    simpleTask.setRetriedTaskId(retriedTaskId);
    simpleTask.setWorkflowPriority(workflowInstance.getPriority());
    simpleTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
    simpleTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
    return Collections.singletonList(simpleTask);
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) TerminateWorkflowException(com.netflix.conductor.core.execution.TerminateWorkflowException) TaskDef(com.netflix.conductor.common.metadata.tasks.TaskDef) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask)

Example 92 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class TerminateTaskMapper method getMappedTasks.

@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) {
    logger.debug("TaskMapperContext {} in TerminateTaskMapper", taskMapperContext);
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    String taskId = taskMapperContext.getTaskId();
    Map<String, Object> taskInput = parametersUtils.getTaskInputV2(taskMapperContext.getTaskToSchedule().getInputParameters(), workflowInstance, taskId, null);
    Task task = new Task();
    task.setTaskType(Terminate.TASK_NAME);
    task.setTaskDefName(taskToSchedule.getName());
    task.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
    task.setWorkflowInstanceId(workflowInstance.getWorkflowId());
    task.setWorkflowType(workflowInstance.getWorkflowName());
    task.setCorrelationId(workflowInstance.getCorrelationId());
    task.setScheduledTime(System.currentTimeMillis());
    task.setStartTime(System.currentTimeMillis());
    task.setInputData(taskInput);
    task.setTaskId(taskId);
    task.setStatus(Task.Status.IN_PROGRESS);
    task.setWorkflowTask(taskToSchedule);
    task.setWorkflowPriority(workflowInstance.getPriority());
    return singletonList(task);
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask)

Example 93 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class SubWorkflow method execute.

@Override
public boolean execute(Workflow workflow, Task task, WorkflowExecutor provider) {
    String workflowId = task.getSubWorkflowId();
    if (StringUtils.isEmpty(workflowId)) {
        return false;
    }
    Workflow subWorkflow = provider.getWorkflow(workflowId, false);
    WorkflowStatus subWorkflowStatus = subWorkflow.getStatus();
    if (!subWorkflowStatus.isTerminal()) {
        return false;
    }
    updateTaskStatus(subWorkflow, task);
    return true;
}
Also used : Workflow(com.netflix.conductor.common.run.Workflow) WorkflowStatus(com.netflix.conductor.common.run.Workflow.WorkflowStatus)

Example 94 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class ElasticSearchRestDAOV7 method updateWorkflow.

@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
    if (keys.length != values.length) {
        throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
    }
    long startTime = Instant.now().toEpochMilli();
    UpdateRequest request = new UpdateRequest(workflowIndexName, workflowInstanceId);
    Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
    request.doc(source);
    logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
    new RetryUtil<UpdateResponse>().retryOnException(() -> {
        try {
            return elasticSearchClient.update(request, RequestOptions.DEFAULT);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
    long endTime = Instant.now().toEpochMilli();
    logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
    Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
    Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
Also used : GetResponse(org.elasticsearch.action.get.GetResponse) Date(java.util.Date) LoggerFactory(org.slf4j.LoggerFactory) HttpStatus(org.apache.http.HttpStatus) Task(com.netflix.conductor.common.metadata.tasks.Task) org.elasticsearch.client(org.elasticsearch.client) QueryBuilders(org.elasticsearch.index.query.QueryBuilders) StringUtils(org.apache.commons.lang3.StringUtils) NByteArrayEntity(org.apache.http.nio.entity.NByteArrayEntity) EntityUtils(org.apache.http.util.EntityUtils) WorkflowSummary(com.netflix.conductor.common.run.WorkflowSummary) PreDestroy(javax.annotation.PreDestroy) DeleteRequest(org.elasticsearch.action.delete.DeleteRequest) IndexRequest(org.elasticsearch.action.index.IndexRequest) Settings(org.elasticsearch.common.settings.Settings) TypeFactory(com.fasterxml.jackson.databind.type.TypeFactory) UpdateResponse(org.elasticsearch.action.update.UpdateResponse) Workflow(com.netflix.conductor.common.run.Workflow) Map(java.util.Map) SearchResponse(org.elasticsearch.action.search.SearchResponse) JsonNode(com.fasterxml.jackson.databind.JsonNode) EventExecution(com.netflix.conductor.common.metadata.events.EventExecution) NStringEntity(org.apache.http.nio.entity.NStringEntity) DeleteResponse(org.elasticsearch.action.delete.DeleteResponse) MapType(com.fasterxml.jackson.databind.type.MapType) SearchHit(org.elasticsearch.search.SearchHit) Message(com.netflix.conductor.core.events.queue.Message) SearchResult(com.netflix.conductor.common.run.SearchResult) GetRequest(org.elasticsearch.action.get.GetRequest) Trace(com.netflix.conductor.annotations.Trace) TaskExecLog(com.netflix.conductor.common.metadata.tasks.TaskExecLog) TimeZone(java.util.TimeZone) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HttpEntity(org.apache.http.HttpEntity) ContentType(org.apache.http.entity.ContentType) BulkResponse(org.elasticsearch.action.bulk.BulkResponse) FieldSortBuilder(org.elasticsearch.search.sort.FieldSortBuilder) Instant(java.time.Instant) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) IOUtils(org.apache.commons.io.IOUtils) List(java.util.List) LocalDate(java.time.LocalDate) SortOrder(org.elasticsearch.search.sort.SortOrder) ParserException(com.netflix.conductor.elasticsearch.query.parser.ParserException) BoolQueryBuilder(org.elasticsearch.index.query.BoolQueryBuilder) IntStream(java.util.stream.IntStream) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) XContentType(org.elasticsearch.common.xcontent.XContentType) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) SearchRequest(org.elasticsearch.action.search.SearchRequest) Singleton(javax.inject.Singleton) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) SearchSourceBuilder(org.elasticsearch.search.builder.SearchSourceBuilder) IndexResponse(org.elasticsearch.action.index.IndexResponse) LinkedList(java.util.LinkedList) ExecutorService(java.util.concurrent.ExecutorService) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) QueryBuilder(org.elasticsearch.index.query.QueryBuilder) Logger(org.slf4j.Logger) RetryUtil(com.netflix.conductor.common.utils.RetryUtil) CreateIndexRequest(org.elasticsearch.client.indices.CreateIndexRequest) TaskSummary(com.netflix.conductor.common.run.TaskSummary) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) UpdateRequest(org.elasticsearch.action.update.UpdateRequest) DocWriteResponse(org.elasticsearch.action.DocWriteResponse) Monitors(com.netflix.conductor.metrics.Monitors) TimeUnit(java.util.concurrent.TimeUnit) IndexDAO(com.netflix.conductor.dao.IndexDAO) ElasticSearchConfiguration(com.netflix.conductor.elasticsearch.ElasticSearchConfiguration) Collections(java.util.Collections) BulkRequest(org.elasticsearch.action.bulk.BulkRequest) InputStream(java.io.InputStream) UpdateResponse(org.elasticsearch.action.update.UpdateResponse) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) UpdateRequest(org.elasticsearch.action.update.UpdateRequest) IOException(java.io.IOException) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor)

Example 95 with Workflow

use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.

the class TestElasticSearchRestDAOV6 method shouldSearchRecentRunningWorkflows.

@Test
public void shouldSearchRecentRunningWorkflows() throws Exception {
    Workflow oldWorkflow = TestUtils.loadWorkflowSnapshot("workflow");
    oldWorkflow.setStatus(Workflow.WorkflowStatus.RUNNING);
    oldWorkflow.setUpdateTime(new DateTime().minusHours(2).toDate().getTime());
    Workflow recentWorkflow = TestUtils.loadWorkflowSnapshot("workflow");
    recentWorkflow.setStatus(Workflow.WorkflowStatus.RUNNING);
    recentWorkflow.setUpdateTime(new DateTime().minusHours(1).toDate().getTime());
    Workflow tooRecentWorkflow = TestUtils.loadWorkflowSnapshot("workflow");
    tooRecentWorkflow.setStatus(Workflow.WorkflowStatus.RUNNING);
    tooRecentWorkflow.setUpdateTime(new DateTime().toDate().getTime());
    indexDAO.indexWorkflow(oldWorkflow);
    indexDAO.indexWorkflow(recentWorkflow);
    indexDAO.indexWorkflow(tooRecentWorkflow);
    Thread.sleep(1000);
    List<String> ids = indexDAO.searchRecentRunningWorkflows(2, 1);
    assertEquals(1, ids.size());
    assertEquals(recentWorkflow.getWorkflowId(), ids.get(0));
}
Also used : Workflow(com.netflix.conductor.common.run.Workflow) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Aggregations

Workflow (com.netflix.conductor.common.run.Workflow)360 Test (org.junit.Test)259 Task (com.netflix.conductor.common.metadata.tasks.Task)246 WorkflowTask (com.netflix.conductor.common.metadata.workflow.WorkflowTask)206 HashMap (java.util.HashMap)154 WorkflowDef (com.netflix.conductor.common.metadata.workflow.WorkflowDef)149 SubWorkflow (com.netflix.conductor.core.execution.tasks.SubWorkflow)130 TaskDef (com.netflix.conductor.common.metadata.tasks.TaskDef)97 UserTask (com.netflix.conductor.tests.utils.UserTask)73 LinkedList (java.util.LinkedList)57 Map (java.util.Map)55 List (java.util.List)54 ApplicationException (com.netflix.conductor.core.execution.ApplicationException)52 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)50 Collections (java.util.Collections)36 Collectors (java.util.stream.Collectors)35 Logger (org.slf4j.Logger)35 LoggerFactory (org.slf4j.LoggerFactory)35 TaskResult (com.netflix.conductor.common.metadata.tasks.TaskResult)34 WorkflowSystemTask (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask)34