use of com.netflix.conductor.common.utils.RetryUtil in project conductor by Netflix.
the class ElasticSearchRestDAOV5 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
new RetryUtil<UpdateResponse>().retryOnException(() -> {
try {
return elasticSearchClient.update(request);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
use of com.netflix.conductor.common.utils.RetryUtil in project conductor by Netflix.
the class ElasticSearchRestDAOV6 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
String docType = StringUtils.isBlank(docTypeOverride) ? WORKFLOW_DOC_TYPE : docTypeOverride;
UpdateRequest request = new UpdateRequest(workflowIndexName, docType, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
new RetryUtil<UpdateResponse>().retryOnException(() -> {
try {
return elasticSearchClient.update(request);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
use of com.netflix.conductor.common.utils.RetryUtil in project conductor by Netflix.
the class ElasticSearchRestDAOV7 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
UpdateRequest request = new UpdateRequest(workflowIndexName, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
logger.debug("Updating workflow {} with {}", workflowInstanceId, source);
new RetryUtil<UpdateResponse>().retryOnException(() -> {
try {
return elasticSearchClient.update(request, RequestOptions.DEFAULT);
} catch (IOException e) {
throw new RuntimeException(e);
}
}, null, null, RETRY_COUNT, "Updating workflow document: " + workflowInstanceId, "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
use of com.netflix.conductor.common.utils.RetryUtil in project conductor by Netflix.
the class ElasticSearchDAOV5 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
logger.debug("Updating workflow {} in elasticsearch index: {}", workflowInstanceId, indexName);
new RetryUtil<>().retryOnException(() -> elasticSearchClient.update(request).actionGet(), null, null, RETRY_COUNT, "Updating index for doc_type workflow", "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
logger.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
use of com.netflix.conductor.common.utils.RetryUtil in project conductor by Netflix.
the class ElasticSearchDAOV6 method updateWorkflow.
@Override
public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) {
if (keys.length != values.length) {
throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match");
}
long startTime = Instant.now().toEpochMilli();
UpdateRequest request = new UpdateRequest(workflowIndexName, WORKFLOW_DOC_TYPE, workflowInstanceId);
Map<String, Object> source = IntStream.range(0, keys.length).boxed().collect(Collectors.toMap(i -> keys[i], i -> values[i]));
request.doc(source);
LOGGER.debug("Updating workflow {} in elasticsearch index: {}", workflowInstanceId, workflowIndexName);
new RetryUtil<>().retryOnException(() -> elasticSearchClient.update(request).actionGet(), null, null, RETRY_COUNT, "Updating index for doc_type workflow", "updateWorkflow");
long endTime = Instant.now().toEpochMilli();
LOGGER.debug("Time taken {} for updating workflow: {}", endTime - startTime, workflowInstanceId);
Monitors.recordESIndexTime("update_workflow", WORKFLOW_DOC_TYPE, endTime - startTime);
Monitors.recordWorkerQueueSize("indexQueue", ((ThreadPoolExecutor) executorService).getQueue().size());
}
Aggregations