use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestWorkflowJobDependency method testWorkflowWithOutDependencies.
@Test
public void testWorkflowWithOutDependencies() throws InterruptedException {
String workflowName = TestHelper.getTestMethodName();
// Workflow setup
LOG.info("Start setup for workflow: " + workflowName);
Workflow.Builder builder = new Workflow.Builder(workflowName);
for (int i = 0; i < _numDbs; i++) {
// Let each job delay for 2 secs.
JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE", "MASTER")).setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
String jobName = "job" + _testDbs.get(i);
builder.addJob(jobName, jobConfig);
}
// Start workflow
Workflow workflow = builder.build();
_driver.start(workflow);
// Wait until the workflow completes
_driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);
WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName);
long startTime = workflowContext.getStartTime();
long finishTime = workflowContext.getFinishTime();
// Update the start time range.
for (String jobName : workflow.getJobConfigs().keySet()) {
JobContext context = _driver.getJobContext(jobName);
LOG.info(String.format("JOB: %s starts from %s finishes at %s.", jobName, context.getStartTime(), context.getFinishTime()));
// Find job start time range.
startTime = Math.max(context.getStartTime(), startTime);
finishTime = Math.min(context.getFinishTime(), finishTime);
}
// All jobs have a valid overlap time range.
Assert.assertTrue(startTime <= finishTime);
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestJobAccessor method testCreateJob.
@Test(dependsOnMethods = "testGetJobContext")
public void testCreateJob() throws IOException {
System.out.println("Start test :" + TestHelper.getTestMethodName());
TaskDriver driver = getTaskDriver(CLUSTER_NAME);
// Create JobQueue
JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME).setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME));
Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections.singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE);
put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode());
// Test enqueue job
entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE);
put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, null, entity, Response.Status.OK.getStatusCode());
String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
JobConfig jobConfig = driver.getJobConfig(jobName);
Assert.assertNotNull(jobConfig);
WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName));
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestJobAccessor method testDeleteJob.
@Test(dependsOnMethods = "testCreateJob")
public void testDeleteJob() {
System.out.println("Start test :" + TestHelper.getTestMethodName());
TaskDriver driver = getTaskDriver(CLUSTER_NAME);
delete("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, Response.Status.OK.getStatusCode());
String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
JobConfig jobConfig = driver.getJobConfig(jobName);
Assert.assertNull(jobConfig);
WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
Assert.assertTrue(!workflowConfig.getJobDag().getAllNodes().contains(jobName));
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class JobAccessor method addJob.
@PUT
@Path("{jobName}")
public Response addJob(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName, String content) {
ZNRecord record;
TaskDriver driver = getTaskDriver(clusterId);
try {
record = toZNRecord(content);
JobConfig.Builder jobConfig = JobAccessor.getJobConfig(record);
driver.enqueueJob(workflowName, jobName, jobConfig);
} catch (HelixException e) {
return badRequest(String.format("Failed to enqueue job %s for reason : %s", jobName, e.getMessage()));
} catch (IOException e) {
return badRequest(String.format("Invalid input for Job Config of Job : %s", jobName));
}
return OK();
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class JobAccessor method getJob.
@GET
@Path("{jobName}")
public Response getJob(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
TaskDriver driver = getTaskDriver(clusterId);
Map<String, ZNRecord> jobMap = new HashMap<>();
JobConfig jobConfig = driver.getJobConfig(jobName);
if (jobConfig != null) {
jobMap.put(JobProperties.JobConfig.name(), jobConfig.getRecord());
} else {
return badRequest(String.format("Job config for %s does not exists", jobName));
}
JobContext jobContext = driver.getJobContext(jobName);
jobMap.put(JobProperties.JobContext.name(), null);
if (jobContext != null) {
jobMap.put(JobProperties.JobContext.name(), jobContext.getRecord());
}
return JSONRepresentation(jobMap);
}
Aggregations