use of org.apache.helix.task.JobContext in project helix by apache.
the class TestZkHelixAdmin method testEnableDisablePartitions.
@Test
public void testEnableDisablePartitions() throws InterruptedException {
HelixAdmin admin = new ZKHelixAdmin(_gZkClient);
admin.enablePartition(false, CLUSTER_NAME, (PARTICIPANT_PREFIX + "_" + _startPort), WorkflowGenerator.DEFAULT_TGT_DB, Arrays.asList(new String[] { "TestDB_0", "TestDB_2" }));
IdealState idealState = admin.getResourceIdealState(CLUSTER_NAME, WorkflowGenerator.DEFAULT_TGT_DB);
List<String> preferenceList = Arrays.asList(new String[] { "localhost_12919", "localhost_12918" });
for (String partitionName : idealState.getPartitionSet()) {
idealState.setPreferenceList(partitionName, preferenceList);
}
idealState.setRebalanceMode(IdealState.RebalanceMode.SEMI_AUTO);
admin.setResourceIdealState(CLUSTER_NAME, WorkflowGenerator.DEFAULT_TGT_DB, idealState);
String workflowName = TestHelper.getTestMethodName();
Workflow.Builder builder = new Workflow.Builder(workflowName);
JobConfig.Builder jobBuilder = new JobConfig.Builder().setWorkflow(workflowName).setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(Collections.singleton("SLAVE"));
builder.addJob("JOB", jobBuilder);
_driver.start(builder.build());
Thread.sleep(2000L);
JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, "JOB"));
Assert.assertEquals(jobContext.getPartitionState(0), null);
Assert.assertEquals(jobContext.getPartitionState(1), TaskPartitionState.COMPLETED);
Assert.assertEquals(jobContext.getPartitionState(2), null);
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class TestJobTimeout method testNoSlaveToRunTask.
@Test
public void testNoSlaveToRunTask() throws InterruptedException {
// first job can't be assigned to any instance and stuck, timeout, second job runs and workflow succeed.
final String FIRST_JOB = "first_job";
final String SECOND_JOB = "second_job";
final String WORKFLOW_NAME = TestHelper.getTestMethodName();
final String DB_NAME = WorkflowGenerator.DEFAULT_TGT_DB;
JobConfig.Builder firstJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.SLAVE.name())).setCommand(MockTask.TASK_COMMAND).setTimeout(1000);
JobConfig.Builder secondJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())).setCommand(MockTask.TASK_COMMAND).setIgnoreDependentJobFailure(// ignore first job's timeout
true);
WorkflowConfig.Builder workflowConfigBuilder = new WorkflowConfig.Builder(WORKFLOW_NAME).setFailureThreshold(// workflow ignores first job's timeout and schedule second job and succeed.
1);
Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW_NAME).setWorkflowConfig(workflowConfigBuilder.build()).addJob(FIRST_JOB, firstJobBuilder).addJob(SECOND_JOB, secondJobBuilder).addParentChildDependency(FIRST_JOB, SECOND_JOB);
_driver.start(workflowBuilder.build());
_driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, FIRST_JOB), TaskState.TIMED_OUT);
_driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, SECOND_JOB), TaskState.COMPLETED);
_driver.pollForWorkflowState(WORKFLOW_NAME, TaskState.COMPLETED);
JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(WORKFLOW_NAME, FIRST_JOB));
for (int pId : jobContext.getPartitionSet()) {
// No task assigned for first job
Assert.assertEquals(jobContext.getPartitionState(pId), null);
}
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class TestWorkflowJobDependency method testWorkflowWithOutDependencies.
@Test
public void testWorkflowWithOutDependencies() throws InterruptedException {
String workflowName = TestHelper.getTestMethodName();
// Workflow setup
LOG.info("Start setup for workflow: " + workflowName);
Workflow.Builder builder = new Workflow.Builder(workflowName);
for (int i = 0; i < _numDbs; i++) {
// Let each job delay for 2 secs.
JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE", "MASTER")).setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
String jobName = "job" + _testDbs.get(i);
builder.addJob(jobName, jobConfig);
}
// Start workflow
Workflow workflow = builder.build();
_driver.start(workflow);
// Wait until the workflow completes
_driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);
WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName);
long startTime = workflowContext.getStartTime();
long finishTime = workflowContext.getFinishTime();
// Update the start time range.
for (String jobName : workflow.getJobConfigs().keySet()) {
JobContext context = _driver.getJobContext(jobName);
LOG.info(String.format("JOB: %s starts from %s finishes at %s.", jobName, context.getStartTime(), context.getFinishTime()));
// Find job start time range.
startTime = Math.max(context.getStartTime(), startTime);
finishTime = Math.min(context.getFinishTime(), finishTime);
}
// All jobs have a valid overlap time range.
Assert.assertTrue(startTime <= finishTime);
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class AbstractTestClass method createJobs.
protected Set<JobConfig.Builder> createJobs(String cluster, String workflowName, int numJobs) {
Set<JobConfig.Builder> jobCfgs = new HashSet<>();
for (int i = 0; i < numJobs; i++) {
JobConfig.Builder job = new JobConfig.Builder().setCommand("DummyCommand").setTargetResource("RESOURCE").setWorkflow(workflowName);
jobCfgs.add(job);
JobContext jobContext = TaskTestUtil.buildJobContext(System.currentTimeMillis(), System.currentTimeMillis() + 1, TaskPartitionState.COMPLETED);
_baseAccessor.set(String.format("/%s/%s%s/%s/%s", cluster, PropertyType.PROPERTYSTORE.name(), TaskConstants.REBALANCER_CONTEXT_ROOT, workflowName + "_" + JOB_PREFIX + i, TaskConstants.CONTEXT_NODE), jobContext.getRecord(), AccessOption.PERSISTENT);
_configAccessor.setResourceConfig(cluster, workflowName + "_" + JOB_PREFIX + i, job.build());
}
return jobCfgs;
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class JobAccessor method getJob.
@GET
@Path("{jobName}")
public Response getJob(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
TaskDriver driver = getTaskDriver(clusterId);
Map<String, ZNRecord> jobMap = new HashMap<>();
JobConfig jobConfig = driver.getJobConfig(jobName);
if (jobConfig != null) {
jobMap.put(JobProperties.JobConfig.name(), jobConfig.getRecord());
} else {
return badRequest(String.format("Job config for %s does not exists", jobName));
}
JobContext jobContext = driver.getJobContext(jobName);
jobMap.put(JobProperties.JobContext.name(), null);
if (jobContext != null) {
jobMap.put(JobProperties.JobContext.name(), jobContext.getRecord());
}
return JSONRepresentation(jobMap);
}
Aggregations