use of org.apache.helix.task.WorkflowContext in project helix by apache.
the class TestRecurringJobQueue method testDeletingRecurrentQueueWithHistory.
@Test
public void testDeletingRecurrentQueueWithHistory() throws Exception {
final String queueName = TestHelper.getTestMethodName();
// Create a queue
LOG.info("Starting job-queue: " + queueName);
JobQueue.Builder queueBuild = TaskTestUtil.buildRecurrentJobQueue(queueName, 0, 60, TargetState.STOP);
createAndEnqueueJob(queueBuild, 2);
_driver.createQueue(queueBuild.build());
WorkflowConfig workflowConfig = _driver.getWorkflowConfig(queueName);
Assert.assertEquals(workflowConfig.getTargetState(), TargetState.STOP);
_driver.resume(queueName);
WorkflowContext wCtx;
// wait until at least 2 workflows are scheduled based on template queue
do {
Thread.sleep(60000);
wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName);
} while (wCtx.getScheduledWorkflows().size() < 2);
// Stop recurring workflow
_driver.stop(queueName);
_driver.pollForWorkflowState(queueName, TaskState.STOPPED);
// Record all scheduled workflows
wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName);
List<String> scheduledWorkflows = new ArrayList<String>(wCtx.getScheduledWorkflows());
final String lastScheduledWorkflow = wCtx.getLastScheduledSingleWorkflow();
// Delete recurrent workflow
_driver.delete(queueName);
// Wait until recurrent workflow and the last scheduled workflow are cleaned up
boolean result = TestHelper.verify(new TestHelper.Verifier() {
@Override
public boolean verify() throws Exception {
WorkflowContext wCtx = _driver.getWorkflowContext(queueName);
WorkflowContext lastWfCtx = _driver.getWorkflowContext(lastScheduledWorkflow);
return (wCtx == null && lastWfCtx == null);
}
}, 5 * 1000);
Assert.assertTrue(result);
for (String scheduledWorkflow : scheduledWorkflows) {
WorkflowContext scheduledWorkflowCtx = _driver.getWorkflowContext(scheduledWorkflow);
WorkflowConfig scheduledWorkflowCfg = _driver.getWorkflowConfig(scheduledWorkflow);
Assert.assertNull(scheduledWorkflowCtx);
Assert.assertNull(scheduledWorkflowCfg);
}
}
use of org.apache.helix.task.WorkflowContext in project helix by apache.
the class TestWorkflowJobDependency method testWorkflowWithOutDependencies.
@Test
public void testWorkflowWithOutDependencies() throws InterruptedException {
String workflowName = TestHelper.getTestMethodName();
// Workflow setup
LOG.info("Start setup for workflow: " + workflowName);
Workflow.Builder builder = new Workflow.Builder(workflowName);
for (int i = 0; i < _numDbs; i++) {
// Let each job delay for 2 secs.
JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE", "MASTER")).setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
String jobName = "job" + _testDbs.get(i);
builder.addJob(jobName, jobConfig);
}
// Start workflow
Workflow workflow = builder.build();
_driver.start(workflow);
// Wait until the workflow completes
_driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);
WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName);
long startTime = workflowContext.getStartTime();
long finishTime = workflowContext.getFinishTime();
// Update the start time range.
for (String jobName : workflow.getJobConfigs().keySet()) {
JobContext context = _driver.getJobContext(jobName);
LOG.info(String.format("JOB: %s starts from %s finishes at %s.", jobName, context.getStartTime(), context.getFinishTime()));
// Find job start time range.
startTime = Math.max(context.getStartTime(), startTime);
finishTime = Math.min(context.getFinishTime(), finishTime);
}
// All jobs have a valid overlap time range.
Assert.assertTrue(startTime <= finishTime);
}
use of org.apache.helix.task.WorkflowContext in project helix by apache.
the class WorkflowAccessor method getWorkflow.
@GET
@Path("{workflowId}")
public Response getWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) {
TaskDriver taskDriver = getTaskDriver(clusterId);
WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
TextNode id = JsonNodeFactory.instance.textNode(workflowId);
root.put(Properties.id.name(), id);
ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();
if (workflowConfig != null) {
getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
}
if (workflowContext != null) {
getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
}
root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode);
root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode);
JobDag jobDag = workflowConfig.getJobDag();
ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes());
ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents());
root.put(WorkflowProperties.Jobs.name(), jobs);
root.put(WorkflowProperties.ParentJobs.name(), parentJobs);
return JSONRepresentation(root);
}
use of org.apache.helix.task.WorkflowContext in project helix by apache.
the class AbstractTestClass method createWorkflows.
protected Map<String, Workflow> createWorkflows(String cluster, int numWorkflows) {
Map<String, Workflow> workflows = new HashMap<>();
for (int i = 0; i < numWorkflows; i++) {
Workflow.Builder workflow = new Workflow.Builder(WORKFLOW_PREFIX + i);
int j = 0;
for (JobConfig.Builder job : createJobs(cluster, WORKFLOW_PREFIX + i, 3)) {
workflow.addJob(JOB_PREFIX + j++, job);
}
workflows.put(WORKFLOW_PREFIX + i, workflow.build());
WorkflowContext workflowContext = TaskTestUtil.buildWorkflowContext(WORKFLOW_PREFIX + i, TaskState.IN_PROGRESS, System.currentTimeMillis(), TaskState.COMPLETED, TaskState.COMPLETED, TaskState.IN_PROGRESS);
_baseAccessor.set(String.format("/%s/%s%s/%s/%s", cluster, PropertyType.PROPERTYSTORE.name(), TaskConstants.REBALANCER_CONTEXT_ROOT, WORKFLOW_PREFIX + i, TaskConstants.CONTEXT_NODE), workflowContext.getRecord(), AccessOption.PERSISTENT);
_configAccessor.setResourceConfig(cluster, WORKFLOW_PREFIX + i, workflow.getWorkflowConfig());
}
return workflows;
}
use of org.apache.helix.task.WorkflowContext in project helix by apache.
the class TestGetWorkflowContext method testGetWorkflowContext.
/**
* This test method tests whether PropertyKey.Builder successfully creates a path for WorkflowContext instances.
* TODO: KeyBuilder must handle the case for future versions of Task Framework with a different path structure
*/
@Test
public void testGetWorkflowContext() {
// Manually create a WorkflowContext instance
ZNRecord znRecord = new ZNRecord(WORKFLOW_NAME);
WorkflowContext workflowContext = new WorkflowContext(znRecord);
_manager.getHelixPropertyStore().set(Joiner.on("/").join(TaskConstants.REBALANCER_CONTEXT_ROOT, WORKFLOW_NAME, CONTEXT_NODE), workflowContext.getRecord(), AccessOption.PERSISTENT);
// Test retrieving this WorkflowContext using PropertyKey.Builder.getPath()
PropertyKey.Builder keyBuilder = new PropertyKey.Builder(CLUSTER_NAME);
String path = keyBuilder.workflowContext(WORKFLOW_NAME).getPath();
WorkflowContext workflowCtx = new WorkflowContext(_baseAccessor.get(path, null, AccessOption.PERSISTENT));
Assert.assertEquals(workflowContext, workflowCtx);
}
Aggregations