Search in sources :

Example 21 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class WorkflowAccessor method createWorkflow.

@PUT
@Path("{workflowId}")
public Response createWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId, String content) {
    TaskDriver driver = getTaskDriver(clusterId);
    Map<String, String> cfgMap;
    try {
        JsonNode root = OBJECT_MAPPER.readTree(content);
        cfgMap = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.WorkflowConfig.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, String.class));
        WorkflowConfig workflowConfig = WorkflowConfig.Builder.fromMap(cfgMap).build();
        // Since JobQueue can keep adding jobs, Helix create JobQueue will ignore the jobs
        if (workflowConfig.isJobQueue()) {
            driver.start(new JobQueue.Builder(workflowId).setWorkflowConfig(workflowConfig).build());
            return OK();
        }
        Workflow.Builder workflow = new Workflow.Builder(workflowId);
        if (root.get(WorkflowProperties.Jobs.name()) != null) {
            Map<String, JobConfig.Builder> jobConfigs = getJobConfigs((ArrayNode) root.get(WorkflowProperties.Jobs.name()));
            for (Map.Entry<String, JobConfig.Builder> job : jobConfigs.entrySet()) {
                workflow.addJob(job.getKey(), job.getValue());
            }
        }
        if (root.get(WorkflowProperties.ParentJobs.name()) != null) {
            Map<String, List<String>> parentJobs = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.ParentJobs.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, List.class));
            for (Map.Entry<String, List<String>> entry : parentJobs.entrySet()) {
                String parentJob = entry.getKey();
                for (String childJob : entry.getValue()) {
                    workflow.addParentChildDependency(parentJob, childJob);
                }
            }
        }
        driver.start(workflow.build());
    } catch (IOException e) {
        return badRequest(String.format("Invalid input of Workflow %s for reason : %s", workflowId, e.getMessage()));
    } catch (HelixException e) {
        return badRequest(String.format("Failed to create workflow %s for reason : %s", workflowId, e.getMessage()));
    }
    return OK();
}
Also used : JobQueue(org.apache.helix.task.JobQueue) HashMap(java.util.HashMap) TaskDriver(org.apache.helix.task.TaskDriver) Workflow(org.apache.helix.task.Workflow) JsonNode(org.codehaus.jackson.JsonNode) IOException(java.io.IOException) WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixException(org.apache.helix.HelixException) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) Path(javax.ws.rs.Path) PUT(javax.ws.rs.PUT)

Example 22 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class WorkflowAccessor method updateWorkflowConfig.

@POST
@Path("{workflowId}/configs")
public Response updateWorkflowConfig(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId, String content) {
    ZNRecord record;
    TaskDriver driver = getTaskDriver(clusterId);
    try {
        record = toZNRecord(content);
        WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowId);
        if (workflowConfig == null) {
            return badRequest(String.format("WorkflowConfig for workflow %s does not exists!", workflowId));
        }
        workflowConfig.getRecord().update(record);
        driver.updateWorkflow(workflowId, workflowConfig);
    } catch (HelixException e) {
        return badRequest(String.format("Failed to update WorkflowConfig for workflow %s", workflowId));
    } catch (Exception e) {
        return badRequest(String.format("Invalid WorkflowConfig for workflow %s", workflowId));
    }
    return OK();
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) HelixException(org.apache.helix.HelixException) TaskDriver(org.apache.helix.task.TaskDriver) ZNRecord(org.apache.helix.ZNRecord) HelixException(org.apache.helix.HelixException) IOException(java.io.IOException) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST)

Example 23 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class WorkflowAccessor method getWorkflow.

@GET
@Path("{workflowId}")
public Response getWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) {
    TaskDriver taskDriver = getTaskDriver(clusterId);
    WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
    WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);
    ObjectNode root = JsonNodeFactory.instance.objectNode();
    TextNode id = JsonNodeFactory.instance.textNode(workflowId);
    root.put(Properties.id.name(), id);
    ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
    ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();
    if (workflowConfig != null) {
        getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
    }
    if (workflowContext != null) {
        getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
    }
    root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode);
    root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode);
    JobDag jobDag = workflowConfig.getJobDag();
    ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes());
    ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents());
    root.put(WorkflowProperties.Jobs.name(), jobs);
    root.put(WorkflowProperties.ParentJobs.name(), parentJobs);
    return JSONRepresentation(root);
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) ObjectNode(org.codehaus.jackson.node.ObjectNode) TaskDriver(org.apache.helix.task.TaskDriver) WorkflowContext(org.apache.helix.task.WorkflowContext) TextNode(org.codehaus.jackson.node.TextNode) ArrayNode(org.codehaus.jackson.node.ArrayNode) JobDag(org.apache.helix.task.JobDag) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Example 24 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class WorkflowAccessor method getWorkflowConfig.

@GET
@Path("{workflowId}/configs")
public Response getWorkflowConfig(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) {
    TaskDriver taskDriver = getTaskDriver(clusterId);
    WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
    ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
    if (workflowConfig != null) {
        getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
    }
    return JSONRepresentation(workflowConfigNode);
}
Also used : WorkflowConfig(org.apache.helix.task.WorkflowConfig) ObjectNode(org.codehaus.jackson.node.ObjectNode) TaskDriver(org.apache.helix.task.TaskDriver) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Example 25 with WorkflowConfig

use of org.apache.helix.task.WorkflowConfig in project helix by apache.

the class TestJobAccessor method testCreateJob.

@Test(dependsOnMethods = "testGetJobContext")
public void testCreateJob() throws IOException {
    System.out.println("Start test :" + TestHelper.getTestMethodName());
    TaskDriver driver = getTaskDriver(CLUSTER_NAME);
    // Create JobQueue
    JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME).setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME));
    Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections.singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE);
    put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode());
    // Test enqueue job
    entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE);
    put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, null, entity, Response.Status.OK.getStatusCode());
    String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
    JobConfig jobConfig = driver.getJobConfig(jobName);
    Assert.assertNotNull(jobConfig);
    WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
    Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName));
}
Also used : Entity(javax.ws.rs.client.Entity) WorkflowConfig(org.apache.helix.task.WorkflowConfig) JobQueue(org.apache.helix.task.JobQueue) TaskDriver(org.apache.helix.task.TaskDriver) JobConfig(org.apache.helix.task.JobConfig) Test(org.testng.annotations.Test)

Aggregations

WorkflowConfig (org.apache.helix.task.WorkflowConfig)28 Test (org.testng.annotations.Test)14 JobQueue (org.apache.helix.task.JobQueue)13 WorkflowContext (org.apache.helix.task.WorkflowContext)12 JobConfig (org.apache.helix.task.JobConfig)11 TaskDriver (org.apache.helix.task.TaskDriver)11 JobContext (org.apache.helix.task.JobContext)7 ArrayList (java.util.ArrayList)6 Map (java.util.Map)4 GET (javax.ws.rs.GET)4 Path (javax.ws.rs.Path)4 JobDag (org.apache.helix.task.JobDag)4 IOException (java.io.IOException)3 HashMap (java.util.HashMap)3 HelixDataAccessor (org.apache.helix.HelixDataAccessor)3 HelixException (org.apache.helix.HelixException)3 PropertyKey (org.apache.helix.PropertyKey)3 ObjectNode (org.codehaus.jackson.node.ObjectNode)3 Calendar (java.util.Calendar)2 HashSet (java.util.HashSet)2