Search in sources :

Example 1 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project incubator-gobblin by apache.

the class GobblinHelixJobLauncher method submitJobToHelix.

/**
 * Submit a job to run.
 */
private void submitJobToHelix(JobConfig.Builder jobConfigBuilder) throws Exception {
    WorkflowConfig workflowConfig = this.helixTaskDriver.getWorkflowConfig(this.helixManager, this.helixQueueName);
    // If the queue is present, but in delete state then wait for cleanup before recreating the queue
    if (workflowConfig != null && workflowConfig.getTargetState() == TargetState.DELETE) {
        GobblinHelixTaskDriver gobblinHelixTaskDriver = new GobblinHelixTaskDriver(this.helixManager);
        gobblinHelixTaskDriver.deleteWorkflow(this.helixQueueName, this.jobQueueDeleteTimeoutSeconds);
        // if we get here then the workflow was successfully deleted
        workflowConfig = null;
    }
    // Create one queue for each job with the job name being the queue name
    if (workflowConfig == null) {
        JobQueue jobQueue = new JobQueue.Builder(this.helixQueueName).build();
        this.helixTaskDriver.createQueue(jobQueue);
        LOGGER.info("Created job queue {}", this.helixQueueName);
    } else {
        LOGGER.info("Job queue {} already exists", this.helixQueueName);
    }
    // Put the job into the queue
    this.helixTaskDriver.enqueueJob(this.jobContext.getJobName(), this.jobContext.getJobId(), jobConfigBuilder);
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) JobQueue(org.apache.helix.task.JobQueue)

Example 2 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class WorkflowAccessor method getWorkflows.

@GET
public Response getWorkflows(@PathParam("clusterId") String clusterId) {
    TaskDriver taskDriver = getTaskDriver(clusterId);
    Map<String, WorkflowConfig> workflowConfigMap = taskDriver.getWorkflows();
    Map<String, List<String>> dataMap = new HashMap<>();
    dataMap.put(WorkflowProperties.Workflows.name(), new ArrayList<>(workflowConfigMap.keySet()));
    return JSONRepresentation(dataMap);
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) HashMap(java.util.HashMap) TaskDriver(org.apache.helix.task.TaskDriver) ArrayList(java.util.ArrayList) List(java.util.List) GET(javax.ws.rs.GET)

Example 3 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class TestWorkflowAccessor method testCreateWorkflow.

@Test(dependsOnMethods = "testGetWorkflowContext")
public void testCreateWorkflow() throws IOException {
    System.out.println("Start test :" + TestHelper.getTestMethodName());
    TaskDriver driver = getTaskDriver(CLUSTER_NAME);
    // Create one time workflow
    Entity entity = Entity.entity(WORKFLOW_INPUT, MediaType.APPLICATION_JSON_TYPE);
    put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_WORKFLOW_NAME, null, entity, Response.Status.OK.getStatusCode());
    WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_WORKFLOW_NAME);
    Assert.assertNotNull(workflowConfig);
    Assert.assertEquals(workflowConfig.getJobDag().getAllNodes().size(), 2);
    // Create JobQueue
    JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME).setWorkflowConfig(driver.getWorkflowConfig(TEST_WORKFLOW_NAME));
    entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections.singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE);
    put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode());
    workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
    Assert.assertNotNull(workflowConfig);
    Assert.assertTrue(workflowConfig.isJobQueue());
    Assert.assertEquals(workflowConfig.getJobDag().getAllNodes().size(), 0);
}
Also used : Entity(javax.ws.rs.client.Entity) WorkflowConfig(org.apache.helix.task.WorkflowConfig) JobQueue(org.apache.helix.task.JobQueue) TaskDriver(org.apache.helix.task.TaskDriver) Test(org.testng.annotations.Test)

Example 4 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class TestTaskRebalancer method testNamedQueue.

@Test
public void testNamedQueue() throws Exception {
    String queueName = TestHelper.getTestMethodName();
    // Create a queue
    JobQueue queue = new JobQueue.Builder(queueName).build();
    _driver.createQueue(queue);
    // Enqueue jobs
    Set<String> master = Sets.newHashSet("MASTER");
    Set<String> slave = Sets.newHashSet("SLAVE");
    JobConfig.Builder job1 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
    JobConfig.Builder job2 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(slave);
    _driver.enqueueJob(queueName, "masterJob", job1);
    _driver.enqueueJob(queueName, "slaveJob", job2);
    // Ensure successful completion
    String namespacedJob1 = queueName + "_masterJob";
    String namespacedJob2 = queueName + "_slaveJob";
    _driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
    _driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
    JobContext masterJobContext = _driver.getJobContext(namespacedJob1);
    JobContext slaveJobContext = _driver.getJobContext(namespacedJob2);
    // Ensure correct ordering
    long job1Finish = masterJobContext.getFinishTime();
    long job2Start = slaveJobContext.getStartTime();
    Assert.assertTrue(job2Start >= job1Finish);
    // Flush queue and check cleanup
    _driver.flushQueue(queueName);
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob2)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob2)));
    WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
    JobDag dag = workflowCfg.getJobDag();
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob1));
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob2));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob2));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob2));
}
Also used : JobQueue(org.apache.helix.task.JobQueue) JobConfig(org.apache.helix.task.JobConfig) WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixDataAccessor(org.apache.helix.HelixDataAccessor) JobContext(org.apache.helix.task.JobContext) JobDag(org.apache.helix.task.JobDag) PropertyKey(org.apache.helix.PropertyKey) Test(org.testng.annotations.Test)

Example 5 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class TestTaskRebalancerFailover method test.

@Test
public void test() throws Exception {
    String queueName = TestHelper.getTestMethodName();
    // Create a queue
    LOG.info("Starting job-queue: " + queueName);
    JobQueue queue = new JobQueue.Builder(queueName).build();
    _driver.createQueue(queue);
    // Enqueue jobs
    Set<String> master = Sets.newHashSet("MASTER");
    JobConfig.Builder job = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
    String job1Name = "masterJob";
    LOG.info("Enqueuing job: " + job1Name);
    _driver.enqueueJob(queueName, job1Name, job);
    // check all tasks completed on MASTER
    String namespacedJob1 = String.format("%s_%s", queueName, job1Name);
    _driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    ExternalView ev = accessor.getProperty(keyBuilder.externalView(WorkflowGenerator.DEFAULT_TGT_DB));
    JobContext ctx = _driver.getJobContext(namespacedJob1);
    Set<String> failOverPartitions = Sets.newHashSet();
    for (int p = 0; p < _numParitions; p++) {
        String instanceName = ctx.getAssignedParticipant(p);
        Assert.assertNotNull(instanceName);
        String partitionName = ctx.getTargetForPartition(p);
        Assert.assertNotNull(partitionName);
        String state = ev.getStateMap(partitionName).get(instanceName);
        Assert.assertNotNull(state);
        Assert.assertEquals(state, "MASTER");
        if (instanceName.equals("localhost_12918")) {
            failOverPartitions.add(partitionName);
        }
    }
    // enqueue another master job and fail localhost_12918
    String job2Name = "masterJob2";
    String namespacedJob2 = String.format("%s_%s", queueName, job2Name);
    LOG.info("Enqueuing job: " + job2Name);
    _driver.enqueueJob(queueName, job2Name, job);
    _driver.pollForJobState(queueName, namespacedJob2, TaskState.IN_PROGRESS);
    _participants[0].syncStop();
    _driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
    // tasks previously assigned to localhost_12918 should be re-scheduled on new master
    ctx = _driver.getJobContext(namespacedJob2);
    ev = accessor.getProperty(keyBuilder.externalView(WorkflowGenerator.DEFAULT_TGT_DB));
    for (int p = 0; p < _numParitions; p++) {
        String partitionName = ctx.getTargetForPartition(p);
        Assert.assertNotNull(partitionName);
        if (failOverPartitions.contains(partitionName)) {
            String instanceName = ctx.getAssignedParticipant(p);
            Assert.assertNotNull(instanceName);
            Assert.assertNotSame(instanceName, "localhost_12918");
            String state = ev.getStateMap(partitionName).get(instanceName);
            Assert.assertNotNull(state);
            Assert.assertEquals(state, "MASTER");
        }
    }
    // Flush queue and check cleanup
    _driver.flushQueue(queueName);
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob1)));
    Assert.assertNull(accessor.getProperty(keyBuilder.idealStates(namespacedJob2)));
    Assert.assertNull(accessor.getProperty(keyBuilder.resourceConfig(namespacedJob2)));
    WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
    JobDag dag = workflowCfg.getJobDag();
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob1));
    Assert.assertFalse(dag.getAllNodes().contains(namespacedJob2));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getChildrenToParents().containsKey(namespacedJob2));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob1));
    Assert.assertFalse(dag.getParentsToChildren().containsKey(namespacedJob2));
}
Also used : ExternalView(org.apache.helix.model.ExternalView) JobQueue(org.apache.helix.task.JobQueue) JobConfig(org.apache.helix.task.JobConfig) WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixDataAccessor(org.apache.helix.HelixDataAccessor) JobContext(org.apache.helix.task.JobContext) JobDag(org.apache.helix.task.JobDag) PropertyKey(org.apache.helix.PropertyKey) Test(org.testng.annotations.Test)

Aggregations

WorkflowConfig (org.apache.helix.task.WorkflowConfig)28 Test (org.testng.annotations.Test)14 JobQueue (org.apache.helix.task.JobQueue)13 WorkflowContext (org.apache.helix.task.WorkflowContext)12 JobConfig (org.apache.helix.task.JobConfig)11 TaskDriver (org.apache.helix.task.TaskDriver)11 JobContext (org.apache.helix.task.JobContext)7 ArrayList (java.util.ArrayList)6 Map (java.util.Map)4 GET (javax.ws.rs.GET)4 Path (javax.ws.rs.Path)4 JobDag (org.apache.helix.task.JobDag)4 IOException (java.io.IOException)3 HashMap (java.util.HashMap)3 HelixDataAccessor (org.apache.helix.HelixDataAccessor)3 HelixException (org.apache.helix.HelixException)3 PropertyKey (org.apache.helix.PropertyKey)3 ObjectNode (org.codehaus.jackson.node.ObjectNode)3 Calendar (java.util.Calendar)2 HashSet (java.util.HashSet)2