use of org.apache.helix.task.Workflow in project helix by apache.
the class AbstractTestClass method createWorkflows.
protected Map<String, Workflow> createWorkflows(String cluster, int numWorkflows) {
Map<String, Workflow> workflows = new HashMap<>();
for (int i = 0; i < numWorkflows; i++) {
Workflow.Builder workflow = new Workflow.Builder(WORKFLOW_PREFIX + i);
int j = 0;
for (JobConfig.Builder job : createJobs(cluster, WORKFLOW_PREFIX + i, 3)) {
workflow.addJob(JOB_PREFIX + j++, job);
}
workflows.put(WORKFLOW_PREFIX + i, workflow.build());
WorkflowContext workflowContext = TaskTestUtil.buildWorkflowContext(WORKFLOW_PREFIX + i, TaskState.IN_PROGRESS, System.currentTimeMillis(), TaskState.COMPLETED, TaskState.COMPLETED, TaskState.IN_PROGRESS);
_baseAccessor.set(String.format("/%s/%s%s/%s/%s", cluster, PropertyType.PROPERTYSTORE.name(), TaskConstants.REBALANCER_CONTEXT_ROOT, WORKFLOW_PREFIX + i, TaskConstants.CONTEXT_NODE), workflowContext.getRecord(), AccessOption.PERSISTENT);
_configAccessor.setResourceConfig(cluster, WORKFLOW_PREFIX + i, workflow.getWorkflowConfig());
}
return workflows;
}
use of org.apache.helix.task.Workflow in project helix by apache.
the class WorkflowsResource method post.
@Override
public Representation post(Representation entity) {
try {
String clusterName = (String) getRequest().getAttributes().get("clusterName");
Form form = new Form(entity);
// Get the workflow and submit it
if (form.size() < 1) {
throw new HelixException("yaml workflow is required!");
}
Parameter payload = form.get(0);
String yamlPayload = payload.getName();
if (yamlPayload == null) {
throw new HelixException("yaml workflow is required!");
}
String zkAddr = (String) getContext().getAttributes().get(RestAdminApplication.ZKSERVERADDRESS);
HelixManager manager = HelixManagerFactory.getZKHelixManager(clusterName, null, InstanceType.ADMINISTRATOR, zkAddr);
manager.connect();
try {
Workflow workflow = Workflow.parse(yamlPayload);
TaskDriver driver = new TaskDriver(manager);
driver.start(workflow);
} finally {
manager.disconnect();
}
getResponse().setEntity(getHostedEntitiesRepresentation(clusterName));
getResponse().setStatus(Status.SUCCESS_OK);
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("Error in posting " + entity, e);
}
return null;
}
Aggregations