Search in sources :

Example 26 with Workflow

use of org.apache.helix.task.Workflow in project helix by apache.

the class TestJobTimeout method testNoSlaveToRunTask.

@Test
public void testNoSlaveToRunTask() throws InterruptedException {
    // first job can't be assigned to any instance and stuck, timeout, second job runs and workflow succeed.
    final String FIRST_JOB = "first_job";
    final String SECOND_JOB = "second_job";
    final String WORKFLOW_NAME = TestHelper.getTestMethodName();
    final String DB_NAME = WorkflowGenerator.DEFAULT_TGT_DB;
    JobConfig.Builder firstJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.SLAVE.name())).setCommand(MockTask.TASK_COMMAND).setTimeout(1000);
    JobConfig.Builder secondJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())).setCommand(MockTask.TASK_COMMAND).setIgnoreDependentJobFailure(// ignore first job's timeout
    true);
    WorkflowConfig.Builder workflowConfigBuilder = new WorkflowConfig.Builder(WORKFLOW_NAME).setFailureThreshold(// workflow ignores first job's timeout and schedule second job and succeed.
    1);
    Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW_NAME).setWorkflowConfig(workflowConfigBuilder.build()).addJob(FIRST_JOB, firstJobBuilder).addJob(SECOND_JOB, secondJobBuilder).addParentChildDependency(FIRST_JOB, SECOND_JOB);
    _driver.start(workflowBuilder.build());
    _driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, FIRST_JOB), TaskState.TIMED_OUT);
    _driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, SECOND_JOB), TaskState.COMPLETED);
    _driver.pollForWorkflowState(WORKFLOW_NAME, TaskState.COMPLETED);
    JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(WORKFLOW_NAME, FIRST_JOB));
    for (int pId : jobContext.getPartitionSet()) {
        // No task assigned for first job
        Assert.assertEquals(jobContext.getPartitionState(pId), null);
    }
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) Workflow(org.apache.helix.task.Workflow) JobContext(org.apache.helix.task.JobContext) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 27 with Workflow

use of org.apache.helix.task.Workflow in project helix by apache.

the class TestRebalanceRunningTask method testFixedTargetTaskAndDisabledRebalanceAndNodeAdded.

/**
 * Task type: fixed target
 * Rebalance raunning task: disabled
 * Story: new node added
 */
@Test
public void testFixedTargetTaskAndDisabledRebalanceAndNodeAdded() throws InterruptedException {
    WORKFLOW = TestHelper.getTestMethodName();
    JobConfig.Builder jobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW).setTargetResource(DATABASE).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())).setNumConcurrentTasksPerInstance(100).setFailureThreshold(2).setMaxAttemptsPerTask(2).setCommand(MockTask.TASK_COMMAND).setJobCommandConfigMap(// task stuck
    ImmutableMap.of(MockTask.TIMEOUT_CONFIG, "99999999"));
    Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW).addJob(JOB, jobBuilder);
    _driver.start(workflowBuilder.build());
    // All tasks stuck on the same instance
    Assert.assertTrue(checkTasksOnSameInstances());
    // Add a new instance, partition is rebalanced
    startParticipant(_initialNumNodes);
    HelixClusterVerifier clusterVerifier = new BestPossibleExternalViewVerifier.Builder(CLUSTER_NAME).setZkClient(_gZkClient).setResources(Sets.newHashSet(DATABASE)).build();
    Assert.assertTrue(clusterVerifier.verify(10 * 1000));
    // Running tasks are also rebalanced, even though RebalanceRunningTask is disabled
    Assert.assertTrue(checkTasksOnDifferentInstances());
}
Also used : HelixClusterVerifier(org.apache.helix.tools.ClusterVerifiers.HelixClusterVerifier) Workflow(org.apache.helix.task.Workflow) BestPossibleExternalViewVerifier(org.apache.helix.tools.ClusterVerifiers.BestPossibleExternalViewVerifier) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 28 with Workflow

use of org.apache.helix.task.Workflow in project helix by apache.

the class TestTaskAssignment method testTaskAssignment.

@Test
public void testTaskAssignment() throws InterruptedException {
    _setupTool.getClusterManagementTool().enableInstance(CLUSTER_NAME, PARTICIPANT_PREFIX + "_" + (_startPort + 0), false);
    String jobResource = TestHelper.getTestMethodName();
    JobConfig.Builder jobBuilder = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB);
    Workflow flow = WorkflowGenerator.generateSingleJobWorkflowBuilder(jobResource, jobBuilder).build();
    _driver.start(flow);
    // Wait 1 sec. The task should not be complete since it is not assigned.
    Thread.sleep(1000L);
    // The task is not assigned so the task state should be null in this case.
    Assert.assertNull(_driver.getJobContext(TaskUtil.getNamespacedJobName(jobResource)).getPartitionState(0));
}
Also used : Workflow(org.apache.helix.task.Workflow) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 29 with Workflow

use of org.apache.helix.task.Workflow in project helix by apache.

the class TestWorkflowJobDependency method testWorkflowWithOutDependencies.

@Test
public void testWorkflowWithOutDependencies() throws InterruptedException {
    String workflowName = TestHelper.getTestMethodName();
    // Workflow setup
    LOG.info("Start setup for workflow: " + workflowName);
    Workflow.Builder builder = new Workflow.Builder(workflowName);
    for (int i = 0; i < _numDbs; i++) {
        // Let each job delay for 2 secs.
        JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE", "MASTER")).setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
        String jobName = "job" + _testDbs.get(i);
        builder.addJob(jobName, jobConfig);
    }
    // Start workflow
    Workflow workflow = builder.build();
    _driver.start(workflow);
    // Wait until the workflow completes
    _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);
    WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName);
    long startTime = workflowContext.getStartTime();
    long finishTime = workflowContext.getFinishTime();
    // Update the start time range.
    for (String jobName : workflow.getJobConfigs().keySet()) {
        JobContext context = _driver.getJobContext(jobName);
        LOG.info(String.format("JOB: %s starts from %s finishes at %s.", jobName, context.getStartTime(), context.getFinishTime()));
        // Find job start time range.
        startTime = Math.max(context.getStartTime(), startTime);
        finishTime = Math.min(context.getFinishTime(), finishTime);
    }
    // All jobs have a valid overlap time range.
    Assert.assertTrue(startTime <= finishTime);
}
Also used : WorkflowContext(org.apache.helix.task.WorkflowContext) Workflow(org.apache.helix.task.Workflow) JobContext(org.apache.helix.task.JobContext) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Example 30 with Workflow

use of org.apache.helix.task.Workflow in project helix by apache.

the class WorkflowAccessor method createWorkflow.

@PUT
@Path("{workflowId}")
public Response createWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId, String content) {
    TaskDriver driver = getTaskDriver(clusterId);
    Map<String, String> cfgMap;
    try {
        JsonNode root = OBJECT_MAPPER.readTree(content);
        cfgMap = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.WorkflowConfig.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, String.class));
        WorkflowConfig workflowConfig = WorkflowConfig.Builder.fromMap(cfgMap).build();
        // Since JobQueue can keep adding jobs, Helix create JobQueue will ignore the jobs
        if (workflowConfig.isJobQueue()) {
            driver.start(new JobQueue.Builder(workflowId).setWorkflowConfig(workflowConfig).build());
            return OK();
        }
        Workflow.Builder workflow = new Workflow.Builder(workflowId);
        if (root.get(WorkflowProperties.Jobs.name()) != null) {
            Map<String, JobConfig.Builder> jobConfigs = getJobConfigs((ArrayNode) root.get(WorkflowProperties.Jobs.name()));
            for (Map.Entry<String, JobConfig.Builder> job : jobConfigs.entrySet()) {
                workflow.addJob(job.getKey(), job.getValue());
            }
        }
        if (root.get(WorkflowProperties.ParentJobs.name()) != null) {
            Map<String, List<String>> parentJobs = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.ParentJobs.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, List.class));
            for (Map.Entry<String, List<String>> entry : parentJobs.entrySet()) {
                String parentJob = entry.getKey();
                for (String childJob : entry.getValue()) {
                    workflow.addParentChildDependency(parentJob, childJob);
                }
            }
        }
        driver.start(workflow.build());
    } catch (IOException e) {
        return badRequest(String.format("Invalid input of Workflow %s for reason : %s", workflowId, e.getMessage()));
    } catch (HelixException e) {
        return badRequest(String.format("Failed to create workflow %s for reason : %s", workflowId, e.getMessage()));
    }
    return OK();
}
Also used : JobQueue(org.apache.helix.task.JobQueue) HashMap(java.util.HashMap) TaskDriver(org.apache.helix.task.TaskDriver) Workflow(org.apache.helix.task.Workflow) JsonNode(org.codehaus.jackson.JsonNode) IOException(java.io.IOException) WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixException(org.apache.helix.HelixException) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) Path(javax.ws.rs.Path) PUT(javax.ws.rs.PUT)

Aggregations

Workflow (org.apache.helix.task.Workflow)32 JobConfig (org.apache.helix.task.JobConfig)26 Test (org.testng.annotations.Test)25 JobContext (org.apache.helix.task.JobContext)13 TaskDriver (org.apache.helix.task.TaskDriver)6 WorkflowConfig (org.apache.helix.task.WorkflowConfig)6 WorkflowContext (org.apache.helix.task.WorkflowContext)6 ArrayList (java.util.ArrayList)5 TaskConfig (org.apache.helix.task.TaskConfig)5 HashMap (java.util.HashMap)4 HelixException (org.apache.helix.HelixException)4 TaskPartitionState (org.apache.helix.task.TaskPartitionState)4 IOException (java.io.IOException)3 Map (java.util.Map)3 ZkClient (org.apache.helix.manager.zk.ZkClient)3 Form (org.restlet.data.Form)3 JobQueue (org.apache.helix.task.JobQueue)2 BestPossibleExternalViewVerifier (org.apache.helix.tools.ClusterVerifiers.BestPossibleExternalViewVerifier)2 HelixClusterVerifier (org.apache.helix.tools.ClusterVerifiers.HelixClusterVerifier)2 JsonGenerationException (org.codehaus.jackson.JsonGenerationException)2