use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class ExecutionService method requeuePendingTasks.
public int requeuePendingTasks() {
long threshold = System.currentTimeMillis() - taskRequeueTimeout;
List<WorkflowDef> workflowDefs = metadataDAO.getAllWorkflowDefs();
int count = 0;
for (WorkflowDef workflowDef : workflowDefs) {
List<Workflow> workflows = workflowExecutor.getRunningWorkflows(workflowDef.getName(), workflowDef.getVersion());
for (Workflow workflow : workflows) {
count += requeuePendingTasks(workflow, threshold);
}
}
return count;
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class ElasticSearchRestDAOV5 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
new RetryUtil<UpdateResponse>().retryOnException(() -> {
try {
return elasticSearchClient.update(request);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class ElasticSearchRestDAOV6 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride;
UpdateRequest request = new UpdateRequest(workflowIndexName, docType, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
new RetryUtil<UpdateResponse>().retryOnException(() -> {
try {
return elasticSearchClient.update(request);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestElasticSearchRestDAOV5 method createTestWorkflow.
@Before
public void createTestWorkflow() throws Exception {
// define indices
indexDAO.setup();
// initialize workflow
workflow = new Workflow();
workflow.getInput().put("requestId", "request id 001");
workflow.getInput().put("hasAwards", true);
workflow.getInput().put("channelMapping", 5);
Map<String, Object> name = new HashMap<>();
name.put("name", "The Who");
name.put("year", 1970);
Map<String, Object> name2 = new HashMap<>();
name2.put("name", "The Doors");
name2.put("year", 1975);
List<Object> names = new LinkedList<>();
names.add(name);
names.add(name2);
workflow.getOutput().put("name", name);
workflow.getOutput().put("names", names);
workflow.getOutput().put("awards", 200);
Task task = new Task();
task.setReferenceTaskName("task2");
task.getOutputData().put("location", "http://location");
task.setStatus(Task.Status.COMPLETED);
Task task2 = new Task();
task2.setReferenceTaskName("task3");
task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc");
task2.setStatus(Task.Status.SCHEDULED);
workflow.getTasks().add(task);
workflow.getTasks().add(task2);
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestElasticSearchDAOV5 method createTestWorkflow.
@Before
public void createTestWorkflow() throws Exception {
// define indices
indexDAO.setup();
// initialize workflow
workflow = new Workflow();
workflow.getInput().put("requestId", "request id 001");
workflow.getInput().put("hasAwards", true);
workflow.getInput().put("channelMapping", 5);
Map<String, Object> name = new HashMap<>();
name.put("name", "The Who");
name.put("year", 1970);
Map<String, Object> name2 = new HashMap<>();
name2.put("name", "The Doors");
name2.put("year", 1975);
List<Object> names = new LinkedList<>();
names.add(name);
names.add(name2);
workflow.getOutput().put("name", name);
workflow.getOutput().put("names", names);
workflow.getOutput().put("awards", 200);
Task task = new Task();
task.setReferenceTaskName("task2");
task.getOutputData().put("location", "http://location");
task.setStatus(Task.Status.COMPLETED);
Task task2 = new Task();
task2.setReferenceTaskName("task3");
task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc");
task2.setStatus(Task.Status.SCHEDULED);
workflow.getTasks().add(task);
workflow.getTasks().add(task2);
}
Aggregations