use of org.apache.helix.task.JobQueue in project helix by apache.
the class WorkflowAccessor method createWorkflow.
@PUT
@Path("{workflowId}")
public Response createWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId, String content) {
TaskDriver driver = getTaskDriver(clusterId);
Map<String, String> cfgMap;
try {
JsonNode root = OBJECT_MAPPER.readTree(content);
cfgMap = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.WorkflowConfig.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, String.class));
WorkflowConfig workflowConfig = WorkflowConfig.Builder.fromMap(cfgMap).build();
// Since JobQueue can keep adding jobs, Helix create JobQueue will ignore the jobs
if (workflowConfig.isJobQueue()) {
driver.start(new JobQueue.Builder(workflowId).setWorkflowConfig(workflowConfig).build());
return OK();
}
Workflow.Builder workflow = new Workflow.Builder(workflowId);
if (root.get(WorkflowProperties.Jobs.name()) != null) {
Map<String, JobConfig.Builder> jobConfigs = getJobConfigs((ArrayNode) root.get(WorkflowProperties.Jobs.name()));
for (Map.Entry<String, JobConfig.Builder> job : jobConfigs.entrySet()) {
workflow.addJob(job.getKey(), job.getValue());
}
}
if (root.get(WorkflowProperties.ParentJobs.name()) != null) {
Map<String, List<String>> parentJobs = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.ParentJobs.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, List.class));
for (Map.Entry<String, List<String>> entry : parentJobs.entrySet()) {
String parentJob = entry.getKey();
for (String childJob : entry.getValue()) {
workflow.addParentChildDependency(parentJob, childJob);
}
}
}
driver.start(workflow.build());
} catch (IOException e) {
return badRequest(String.format("Invalid input of Workflow %s for reason : %s", workflowId, e.getMessage()));
} catch (HelixException e) {
return badRequest(String.format("Failed to create workflow %s for reason : %s", workflowId, e.getMessage()));
}
return OK();
}
use of org.apache.helix.task.JobQueue in project helix by apache.
the class TestJobAccessor method testCreateJob.
@Test(dependsOnMethods = "testGetJobContext")
public void testCreateJob() throws IOException {
System.out.println("Start test :" + TestHelper.getTestMethodName());
TaskDriver driver = getTaskDriver(CLUSTER_NAME);
// Create JobQueue
JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME).setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME));
Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections.singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE);
put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode());
// Test enqueue job
entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE);
put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, null, entity, Response.Status.OK.getStatusCode());
String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
JobConfig jobConfig = driver.getJobConfig(jobName);
Assert.assertNotNull(jobConfig);
WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName));
}
Aggregations