Search in sources :

Example 1 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class JobResource method getHostedEntitiesRepresentation.

StringRepresentation getHostedEntitiesRepresentation(String clusterName, String jobQueueName, String jobName) throws Exception {
    ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
    HelixDataAccessor accessor = ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    // Get job queue config
    String namespacedJobName = TaskUtil.getNamespacedJobName(jobQueueName, jobName);
    HelixProperty jobConfig = accessor.getProperty(keyBuilder.resourceConfig(namespacedJobName));
    TaskDriver taskDriver = new TaskDriver(zkClient, clusterName);
    // Get job queue context
    JobContext ctx = taskDriver.getJobContext(namespacedJobName);
    // Create the result
    ZNRecord hostedEntitiesRecord = new ZNRecord(namespacedJobName);
    if (jobConfig != null) {
        hostedEntitiesRecord.merge(jobConfig.getRecord());
    }
    if (ctx != null) {
        hostedEntitiesRecord.merge(ctx.getRecord());
    }
    StringRepresentation representation = new StringRepresentation(ClusterRepresentationUtil.ZNRecordToJson(hostedEntitiesRecord), MediaType.APPLICATION_JSON);
    return representation;
}
Also used : ZkClient(org.apache.helix.manager.zk.ZkClient) HelixDataAccessor(org.apache.helix.HelixDataAccessor) HelixProperty(org.apache.helix.HelixProperty) StringRepresentation(org.restlet.representation.StringRepresentation) TaskDriver(org.apache.helix.task.TaskDriver) JobContext(org.apache.helix.task.JobContext) PropertyKey(org.apache.helix.PropertyKey) ZNRecord(org.apache.helix.ZNRecord)

Example 2 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class JobAccessor method getJobContext.

@GET
@Path("{jobName}/context")
public Response getJobContext(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
    TaskDriver driver = getTaskDriver(clusterId);
    JobContext jobContext = driver.getJobContext(jobName);
    if (jobContext != null) {
        return JSONRepresentation(jobContext.getRecord());
    }
    return badRequest("Job context for " + jobName + " does not exists");
}
Also used : TaskDriver(org.apache.helix.task.TaskDriver) JobContext(org.apache.helix.task.JobContext) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Example 3 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class TestTaskRebalancer method basic.

private void basic(long jobCompletionTime) throws Exception {
    // We use a different resource name in each test method as a work around for a helix participant
    // bug where it does
    // not clear locally cached state when a resource partition is dropped. Once that is fixed we
    // should change these
    // tests to use the same resource name and implement a beforeMethod that deletes the task
    // resource.
    final String jobResource = "basic" + jobCompletionTime;
    Map<String, String> commandConfig = ImmutableMap.of(TIMEOUT_CONFIG, String.valueOf(jobCompletionTime));
    JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG);
    jobBuilder.setJobCommandConfigMap(commandConfig);
    Workflow flow = WorkflowGenerator.generateSingleJobWorkflowBuilder(jobResource, jobBuilder).build();
    _driver.start(flow);
    // Wait for job completion
    _driver.pollForWorkflowState(jobResource, TaskState.COMPLETED);
    // Ensure all partitions are completed individually
    JobContext ctx = _driver.getJobContext(TaskUtil.getNamespacedJobName(jobResource));
    for (int i = 0; i < _numParitions; i++) {
        Assert.assertEquals(ctx.getPartitionState(i), TaskPartitionState.COMPLETED);
        Assert.assertEquals(ctx.getPartitionNumAttempts(i), 1);
    }
}
Also used : Workflow(org.apache.helix.task.Workflow) JobContext(org.apache.helix.task.JobContext) JobConfig(org.apache.helix.task.JobConfig)

Example 4 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class TestTaskRebalancer method testNamedQueue.

@Test
public void testNamedQueue() throws Exception {
    String queueName = TestHelper.getTestMethodName();
    // Create a queue
    JobQueue queue = new JobQueue.Builder(queueName).build();
    _driver.createQueue(queue);
    // Enqueue jobs
    Set<String> master = Sets.newHashSet("MASTER");
    Set<String> slave = Sets.newHashSet("SLAVE");
    JobConfig.Builder job1 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
    JobConfig.Builder job2 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(slave);
    _driver.enqueueJob(queueName, "masterJob", job1);
    _driver.enqueueJob(queueName, "slaveJob", job2);
    // Ensure successful completion
    String namespacedJob1 = queueName + "_masterJob";
    String namespacedJob2 = queueName + "_slaveJob";
    _driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
    _driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
    JobContext masterJobContext = _driver.getJobContext(namespacedJob1);
    JobContext slaveJobContext = _driver.getJobContext(namespacedJob2);
    // Ensure correct ordering
    long job1Finish = masterJobContext.getFinishTime();
    long job2Start = slaveJobContext.getStartTime();
    Assert.assertTrue(job2Start >= job1Finish);
    // Flush queue and check cleanup
    _driver.flushQueue(queueName);
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob2)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob2)));
    WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
    JobDag dag = workflowCfg.getJobDag();
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob1));
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob2));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob2));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob2));
}
Also used : JobQueue(org.apache.helix.task.JobQueue) JobConfig(org.apache.helix.task.JobConfig) WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixDataAccessor(org.apache.helix.HelixDataAccessor) JobContext(org.apache.helix.task.JobContext) JobDag(org.apache.helix.task.JobDag) PropertyKey(org.apache.helix.PropertyKey) Test(org.testng.annotations.Test)

Example 5 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class TestTaskRebalancerFailover method test.

@Test
public void test() throws Exception {
    String queueName = TestHelper.getTestMethodName();
    // Create a queue
    LOG.info("Starting job-queue: " + queueName);
    JobQueue queue = new JobQueue.Builder(queueName).build();
    _driver.createQueue(queue);
    // Enqueue jobs
    Set<String> master = Sets.newHashSet("MASTER");
    JobConfig.Builder job = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
    String job1Name = "masterJob";
    LOG.info("Enqueuing job: " + job1Name);
    _driver.enqueueJob(queueName, job1Name, job);
    // check all tasks completed on MASTER
    String namespacedJob1 = String.format("%s_%s", queueName, job1Name);
    _driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    ExternalView ev = accessor.getProperty(keyBuilder.externalView(WorkflowGenerator.DEFAULT_TGT_DB));
    JobContext ctx = _driver.getJobContext(namespacedJob1);
    Set<String> failOverPartitions = Sets.newHashSet();
    for (int p = 0; p < _numParitions; p++) {
        String instanceName = ctx.getAssignedParticipant(p);
        Assert.assertNotNull(instanceName);
        String partitionName = ctx.getTargetForPartition(p);
        Assert.assertNotNull(partitionName);
        String state = ev.getStateMap(partitionName).get(instanceName);
        Assert.assertNotNull(state);
        Assert.assertEquals(state, "MASTER");
        if (instanceName.equals("localhost_12918")) {
            failOverPartitions.add(partitionName);
        }
    }
    // enqueue another master job and fail localhost_12918
    String job2Name = "masterJob2";
    String namespacedJob2 = String.format("%s_%s", queueName, job2Name);
    LOG.info("Enqueuing job: " + job2Name);
    _driver.enqueueJob(queueName, job2Name, job);
    _driver.pollForJobState(queueName, namespacedJob2, TaskState.IN_PROGRESS);
    _participants[0].syncStop();
    _driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
    // tasks previously assigned to localhost_12918 should be re-scheduled on new master
    ctx = _driver.getJobContext(namespacedJob2);
    ev = accessor.getProperty(keyBuilder.externalView(WorkflowGenerator.DEFAULT_TGT_DB));
    for (int p = 0; p < _numParitions; p++) {
        String partitionName = ctx.getTargetForPartition(p);
        Assert.assertNotNull(partitionName);
        if (failOverPartitions.contains(partitionName)) {
            String instanceName = ctx.getAssignedParticipant(p);
            Assert.assertNotNull(instanceName);
            Assert.assertNotSame(instanceName, "localhost_12918");
            String state = ev.getStateMap(partitionName).get(instanceName);
            Assert.assertNotNull(state);
            Assert.assertEquals(state, "MASTER");
        }
    }
    // Flush queue and check cleanup
    _driver.flushQueue(queueName);
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob2)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob2)));
    WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
    JobDag dag = workflowCfg.getJobDag();
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob1));
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob2));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob2));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob2));
}
Also used : ExternalView(org.apache.helix.model.ExternalView) JobQueue(org.apache.helix.task.JobQueue) JobConfig(org.apache.helix.task.JobConfig) WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixDataAccessor(org.apache.helix.HelixDataAccessor) JobContext(org.apache.helix.task.JobContext) JobDag(org.apache.helix.task.JobDag) PropertyKey(org.apache.helix.PropertyKey) Test(org.testng.annotations.Test)

Aggregations

JobContext (org.apache.helix.task.JobContext)35 JobConfig (org.apache.helix.task.JobConfig)28 Test (org.testng.annotations.Test)25 Workflow (org.apache.helix.task.Workflow)18 WorkflowConfig (org.apache.helix.task.WorkflowConfig)11 WorkflowContext (org.apache.helix.task.WorkflowContext)9 TaskPartitionState (org.apache.helix.task.TaskPartitionState)8 JobQueue (org.apache.helix.task.JobQueue)7 ArrayList (java.util.ArrayList)6 HashSet (java.util.HashSet)4 TaskConfig (org.apache.helix.task.TaskConfig)4 HelixDataAccessor (org.apache.helix.HelixDataAccessor)3 PropertyKey (org.apache.helix.PropertyKey)3 ZNRecord (org.apache.helix.ZNRecord)3 TaskDriver (org.apache.helix.task.TaskDriver)3 HashMap (java.util.HashMap)2 Map (java.util.Map)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 HelixException (org.apache.helix.HelixException)2