use of org.apache.helix.task.JobDag in project helix by apache.
the class TestTaskRebalancer method testNamedQueue.
@Test
public void testNamedQueue() throws Exception {
String queueName = TestHelper.getTestMethodName();
// Create a queue
JobQueue queue = new JobQueue.Builder(queueName).build();
_driver.createQueue(queue);
// Enqueue jobs
Set<String> master = Sets.newHashSet("MASTER");
Set<String> slave = Sets.newHashSet("SLAVE");
JobConfig.Builder job1 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
JobConfig.Builder job2 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(slave);
_driver.enqueueJob(queueName, "masterJob", job1);
_driver.enqueueJob(queueName, "slaveJob", job2);
// Ensure successful completion
String namespacedJob1 = queueName + "_masterJob";
String namespacedJob2 = queueName + "_slaveJob";
_driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
_driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
JobContext masterJobContext = _driver.getJobContext(namespacedJob1);
JobContext slaveJobContext = _driver.getJobContext(namespacedJob2);
// Ensure correct ordering
long job1Finish = masterJobContext.getFinishTime();
long job2Start = slaveJobContext.getStartTime();
Assert.assertTrue(job2Start >= job1Finish);
// Flush queue and check cleanup
_driver.flushQueue(queueName);
HelixDataAccessor accessor = _manager.getHelixDataAccessor();
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob1)));
Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob1)));
Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob2)));
Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob2)));
WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
JobDag dag = workflowCfg.getJobDag();
Assert.assertFalse(dag.getAllNodes().contains(namespacedJob1));
Assert.assertFalse(dag.getAllNodes().contains(namespacedJob2));
Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob1));
Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob2));
Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob1));
Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob2));
}
use of org.apache.helix.task.JobDag in project helix by apache.
the class TestTaskRebalancerFailover method test.
@Test
public void test() throws Exception {
String queueName = TestHelper.getTestMethodName();
// Create a queue
LOG.info("Starting job-queue: " + queueName);
JobQueue queue = new JobQueue.Builder(queueName).build();
_driver.createQueue(queue);
// Enqueue jobs
Set<String> master = Sets.newHashSet("MASTER");
JobConfig.Builder job = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
String job1Name = "masterJob";
LOG.info("Enqueuing job: " + job1Name);
_driver.enqueueJob(queueName, job1Name, job);
// check all tasks completed on MASTER
String namespacedJob1 = String.format("%s_%s", queueName, job1Name);
_driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
HelixDataAccessor accessor = _manager.getHelixDataAccessor();
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
ExternalView ev = accessor.getProperty(keyBuilder.externalView(WorkflowGenerator.DEFAULT_TGT_DB));
JobContext ctx = _driver.getJobContext(namespacedJob1);
Set<String> failOverPartitions = Sets.newHashSet();
for (int p = 0; p < _numParitions; p++) {
String instanceName = ctx.getAssignedParticipant(p);
Assert.assertNotNull(instanceName);
String partitionName = ctx.getTargetForPartition(p);
Assert.assertNotNull(partitionName);
String state = ev.getStateMap(partitionName).get(instanceName);
Assert.assertNotNull(state);
Assert.assertEquals(state, "MASTER");
if (instanceName.equals("localhost_12918")) {
failOverPartitions.add(partitionName);
}
}
// enqueue another master job and fail localhost_12918
String job2Name = "masterJob2";
String namespacedJob2 = String.format("%s_%s", queueName, job2Name);
LOG.info("Enqueuing job: " + job2Name);
_driver.enqueueJob(queueName, job2Name, job);
_driver.pollForJobState(queueName, namespacedJob2, TaskState.IN_PROGRESS);
_participants[0].syncStop();
_driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
// tasks previously assigned to localhost_12918 should be re-scheduled on new master
ctx = _driver.getJobContext(namespacedJob2);
ev = accessor.getProperty(keyBuilder.externalView(WorkflowGenerator.DEFAULT_TGT_DB));
for (int p = 0; p < _numParitions; p++) {
String partitionName = ctx.getTargetForPartition(p);
Assert.assertNotNull(partitionName);
if (failOverPartitions.contains(partitionName)) {
String instanceName = ctx.getAssignedParticipant(p);
Assert.assertNotNull(instanceName);
Assert.assertNotSame(instanceName, "localhost_12918");
String state = ev.getStateMap(partitionName).get(instanceName);
Assert.assertNotNull(state);
Assert.assertEquals(state, "MASTER");
}
}
// Flush queue and check cleanup
_driver.flushQueue(queueName);
Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob1)));
Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob1)));
Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob2)));
Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob2)));
WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
JobDag dag = workflowCfg.getJobDag();
Assert.assertFalse(dag.getAllNodes().contains(namespacedJob1));
Assert.assertFalse(dag.getAllNodes().contains(namespacedJob2));
Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob1));
Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob2));
Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob1));
Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob2));
}
use of org.apache.helix.task.JobDag in project helix by apache.
the class TestTaskRebalancerStopResume method verifyJobNotInQueue.
private void verifyJobNotInQueue(String queueName, String namedSpacedJobName) {
WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
JobDag dag = workflowCfg.getJobDag();
Assert.assertFalse(dag.getAllNodes().contains(namedSpacedJobName));
Assert.assertFalse(dag.getChildrenToParents().containsKey(namedSpacedJobName));
Assert.assertFalse(dag.getParentsToChildren().containsKey(namedSpacedJobName));
}
use of org.apache.helix.task.JobDag in project helix by apache.
the class WorkflowAccessor method getWorkflow.
@GET
@Path("{workflowId}")
public Response getWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) {
TaskDriver taskDriver = getTaskDriver(clusterId);
WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
TextNode id = JsonNodeFactory.instance.textNode(workflowId);
root.put(Properties.id.name(), id);
ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();
if (workflowConfig != null) {
getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
}
if (workflowContext != null) {
getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
}
root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode);
root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode);
JobDag jobDag = workflowConfig.getJobDag();
ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes());
ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents());
root.put(WorkflowProperties.Jobs.name(), jobs);
root.put(WorkflowProperties.ParentJobs.name(), parentJobs);
return JSONRepresentation(root);
}
Aggregations