Search in sources :

Example 31 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class TestZkHelixAdmin method testEnableDisablePartitions.

@Test
public void testEnableDisablePartitions() throws InterruptedException {
    HelixAdmin admin = new ZKHelixAdmin(_gZkClient);
    admin.enablePartition(false, CLUSTER_NAME, (PARTICIPANT_PREFIX + "_" + _startPort), WorkflowGenerator.DEFAULT_TGT_DB, Arrays.asList(new String[] { "TestDB_0", "TestDB_2" }));
    IdealState idealState = admin.getResourceIdealState(CLUSTER_NAME, WorkflowGenerator.DEFAULT_TGT_DB);
    List<String> preferenceList = Arrays.asList(new String[] { "localhost_12919", "localhost_12918" });
    for (String partitionName : idealState.getPartitionSet()) {
        idealState.setPreferenceList(partitionName, preferenceList);
    }
    idealState.setRebalanceMode(IdealState.RebalanceMode.SEMI_AUTO);
    admin.setResourceIdealState(CLUSTER_NAME, WorkflowGenerator.DEFAULT_TGT_DB, idealState);
    String workflowName = TestHelper.getTestMethodName();
    Workflow.Builder builder = new Workflow.Builder(workflowName);
    JobConfig.Builder jobBuilder = new JobConfig.Builder().setWorkflow(workflowName).setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(Collections.singleton("SLAVE"));
    builder.addJob("JOB", jobBuilder);
    _driver.start(builder.build());
    Thread.sleep(2000L);
    JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, "JOB"));
    Assert.assertEquals(jobContext.getPartitionState(0), null);
    Assert.assertEquals(jobContext.getPartitionState(1), TaskPartitionState.COMPLETED);
    Assert.assertEquals(jobContext.getPartitionState(2), null);
}
Also used : ZKHelixAdmin(org.apache.helix.manager.zk.ZKHelixAdmin) Workflow(org.apache.helix.task.Workflow) JobContext(org.apache.helix.task.JobContext) HelixAdmin(org.apache.helix.HelixAdmin) ZKHelixAdmin(org.apache.helix.manager.zk.ZKHelixAdmin) IdealState(org.apache.helix.model.IdealState) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 32 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class TestJobTimeout method testNoSlaveToRunTask.

@Test
public void testNoSlaveToRunTask() throws InterruptedException {
    // first job can't be assigned to any instance and stuck, timeout, second job runs and workflow succeed.
    final String FIRST_JOB = "first_job";
    final String SECOND_JOB = "second_job";
    final String WORKFLOW_NAME = TestHelper.getTestMethodName();
    final String DB_NAME = WorkflowGenerator.DEFAULT_TGT_DB;
    JobConfig.Builder firstJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.SLAVE.name())).setCommand(MockTask.TASK_COMMAND).setTimeout(1000);
    JobConfig.Builder secondJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())).setCommand(MockTask.TASK_COMMAND).setIgnoreDependentJobFailure(// ignore first job's timeout
    true);
    WorkflowConfig.Builder workflowConfigBuilder = new WorkflowConfig.Builder(WORKFLOW_NAME).setFailureThreshold(// workflow ignores first job's timeout and schedule second job and succeed.
    1);
    Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW_NAME).setWorkflowConfig(workflowConfigBuilder.build()).addJob(FIRST_JOB, firstJobBuilder).addJob(SECOND_JOB, secondJobBuilder).addParentChildDependency(FIRST_JOB, SECOND_JOB);
    _driver.start(workflowBuilder.build());
    _driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, FIRST_JOB), TaskState.TIMED_OUT);
    _driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, SECOND_JOB), TaskState.COMPLETED);
    _driver.pollForWorkflowState(WORKFLOW_NAME, TaskState.COMPLETED);
    JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(WORKFLOW_NAME, FIRST_JOB));
    for (int pId : jobContext.getPartitionSet()) {
        // No task assigned for first job
        Assert.assertEquals(jobContext.getPartitionState(pId), null);
    }
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) Workflow(org.apache.helix.task.Workflow) JobContext(org.apache.helix.task.JobContext) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 33 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class TestWorkflowJobDependency method testWorkflowWithOutDependencies.

@Test
public void testWorkflowWithOutDependencies() throws InterruptedException {
    String workflowName = TestHelper.getTestMethodName();
    // Workflow setup
    LOG.info("Start setup for workflow: " + workflowName);
    Workflow.Builder builder = new Workflow.Builder(workflowName);
    for (int i = 0; i < _numDbs; i++) {
        // Let each job delay for 2 secs.
        JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE", "MASTER")).setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
        String jobName = "job" + _testDbs.get(i);
        builder.addJob(jobName, jobConfig);
    }
    // Start workflow
    Workflow workflow = builder.build();
    _driver.start(workflow);
    // Wait until the workflow completes
    _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);
    WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName);
    long startTime = workflowContext.getStartTime();
    long finishTime = workflowContext.getFinishTime();
    // Update the start time range.
    for (String jobName : workflow.getJobConfigs().keySet()) {
        JobContext context = _driver.getJobContext(jobName);
        LOG.info(String.format("JOB: %s starts from %s finishes at %s.", jobName, context.getStartTime(), context.getFinishTime()));
        // Find job start time range.
        startTime = Math.max(context.getStartTime(), startTime);
        finishTime = Math.min(context.getFinishTime(), finishTime);
    }
    // All jobs have a valid overlap time range.
    Assert.assertTrue(startTime <= finishTime);
}
Also used : WorkflowContext(org.apache.helix.task.WorkflowContext) Workflow(org.apache.helix.task.Workflow) JobContext(org.apache.helix.task.JobContext) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 34 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class AbstractTestClass method createJobs.

protected Set<JobConfig.Builder> createJobs(String cluster, String workflowName, int numJobs) {
    Set<JobConfig.Builder> jobCfgs = new HashSet<>();
    for (int i = 0; i < numJobs; i++) {
        JobConfig.Builder job = new JobConfig.Builder().setCommand("DummyCommand").setTargetResource("RESOURCE").setWorkflow(workflowName);
        jobCfgs.add(job);
        JobContext jobContext = TaskTestUtil.buildJobContext(System.currentTimeMillis(), System.currentTimeMillis() + 1, TaskPartitionState.COMPLETED);
        _baseAccessor.set(String.format("/%s/%s%s/%s/%s", cluster, PropertyType.PROPERTYSTORE.name(), TaskConstants.REBALANCER_CONTEXT_ROOT, workflowName + "_" + JOB_PREFIX + i, TaskConstants.CONTEXT_NODE), jobContext.getRecord(), AccessOption.PERSISTENT);
        _configAccessor.setResourceConfig(cluster, workflowName + "_" + JOB_PREFIX + i, job.build());
    }
    return jobCfgs;
}
Also used : JobContext(org.apache.helix.task.JobContext) JobConfig(org.apache.helix.task.JobConfig) HashSet(java.util.HashSet)

Example 35 with JobContext

use of org.apache.helix.task.JobContext in project helix by apache.

the class JobAccessor method getJob.

@GET
@Path("{jobName}")
public Response getJob(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
    TaskDriver driver = getTaskDriver(clusterId);
    Map<String, ZNRecord> jobMap = new HashMap<>();
    JobConfig jobConfig = driver.getJobConfig(jobName);
    if (jobConfig != null) {
        jobMap.put(JobProperties.JobConfig.name(), jobConfig.getRecord());
    } else {
        return badRequest(String.format("Job config for %s does not exists", jobName));
    }
    JobContext jobContext = driver.getJobContext(jobName);
    jobMap.put(JobProperties.JobContext.name(), null);
    if (jobContext != null) {
        jobMap.put(JobProperties.JobContext.name(), jobContext.getRecord());
    }
    return JSONRepresentation(jobMap);
}
Also used : HashMap(java.util.HashMap) TaskDriver(org.apache.helix.task.TaskDriver) JobContext(org.apache.helix.task.JobContext) ZNRecord(org.apache.helix.ZNRecord) JobConfig(org.apache.helix.task.JobConfig) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Aggregations

JobContext (org.apache.helix.task.JobContext)35 JobConfig (org.apache.helix.task.JobConfig)28 Test (org.testng.annotations.Test)25 Workflow (org.apache.helix.task.Workflow)18 WorkflowConfig (org.apache.helix.task.WorkflowConfig)11 WorkflowContext (org.apache.helix.task.WorkflowContext)9 TaskPartitionState (org.apache.helix.task.TaskPartitionState)8 JobQueue (org.apache.helix.task.JobQueue)7 ArrayList (java.util.ArrayList)6 HashSet (java.util.HashSet)4 TaskConfig (org.apache.helix.task.TaskConfig)4 HelixDataAccessor (org.apache.helix.HelixDataAccessor)3 PropertyKey (org.apache.helix.PropertyKey)3 ZNRecord (org.apache.helix.ZNRecord)3 TaskDriver (org.apache.helix.task.TaskDriver)3 HashMap (java.util.HashMap)2 Map (java.util.Map)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 HelixException (org.apache.helix.HelixException)2