use of org.apache.helix.task.JobContext in project helix by apache.
the class TestTaskRebalancerRetryLimit method test.
@Test
public void test() throws Exception {
String jobResource = TestHelper.getTestMethodName();
JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG);
jobBuilder.setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG).setMaxAttemptsPerTask(2).setCommand(MockTask.TASK_COMMAND).setFailureThreshold(Integer.MAX_VALUE).setJobCommandConfigMap(ImmutableMap.of(MockTask.THROW_EXCEPTION, "true"));
Workflow flow = WorkflowGenerator.generateSingleJobWorkflowBuilder(jobResource, jobBuilder).build();
_driver.start(flow);
// Wait until the job completes.
_driver.pollForWorkflowState(jobResource, TaskState.COMPLETED);
JobContext ctx = _driver.getJobContext(TaskUtil.getNamespacedJobName(jobResource));
for (int i = 0; i < _numParitions; i++) {
TaskPartitionState state = ctx.getPartitionState(i);
if (state != null) {
Assert.assertEquals(state, TaskPartitionState.TASK_ERROR);
Assert.assertEquals(ctx.getPartitionNumAttempts(i), 2);
}
}
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class TestTaskRebalancerStopResume method stopAndResumeNamedQueue.
@Test
public void stopAndResumeNamedQueue() throws Exception {
String queueName = TestHelper.getTestMethodName();
// Create a queue
LOG.info("Starting job-queue: " + queueName);
JobQueue queue = new JobQueue.Builder(queueName).build();
_driver.createQueue(queue);
// Enqueue jobs
Set<String> master = Sets.newHashSet("MASTER");
JobConfig.Builder job1 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(master);
String job1Name = "masterJob";
LOG.info("Enqueuing job: " + job1Name);
_driver.enqueueJob(queueName, job1Name, job1);
Set<String> slave = Sets.newHashSet("SLAVE");
JobConfig.Builder job2 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(slave);
String job2Name = "slaveJob";
LOG.info("Enqueuing job: " + job2Name);
_driver.enqueueJob(queueName, job2Name, job2);
String namespacedJob1 = String.format("%s_%s", queueName, job1Name);
_driver.pollForJobState(queueName, namespacedJob1, TaskState.IN_PROGRESS);
// stop job1
LOG.info("Pausing job-queue: " + queueName);
_driver.stop(queueName);
_driver.pollForJobState(queueName, namespacedJob1, TaskState.STOPPED);
_driver.pollForWorkflowState(queueName, TaskState.STOPPED);
// Ensure job2 is not started
TimeUnit.MILLISECONDS.sleep(200);
String namespacedJob2 = String.format("%s_%s", queueName, job2Name);
TaskTestUtil.pollForEmptyJobState(_driver, queueName, job2Name);
LOG.info("Resuming job-queue: " + queueName);
_driver.resume(queueName);
// Ensure successful completion
_driver.pollForJobState(queueName, namespacedJob1, TaskState.COMPLETED);
_driver.pollForJobState(queueName, namespacedJob2, TaskState.COMPLETED);
JobContext masterJobContext = _driver.getJobContext(namespacedJob1);
JobContext slaveJobContext = _driver.getJobContext(namespacedJob2);
// Ensure correct ordering
long job1Finish = masterJobContext.getFinishTime();
long job2Start = slaveJobContext.getStartTime();
Assert.assertTrue(job2Start >= job1Finish);
// Flush queue and check cleanup
LOG.info("Flusing job-queue: " + queueName);
_driver.flushQueue(queueName);
verifyJobDeleted(queueName, namespacedJob1);
verifyJobDeleted(queueName, namespacedJob2);
verifyJobNotInQueue(queueName, namespacedJob1);
verifyJobNotInQueue(queueName, namespacedJob2);
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class TestTaskRebalancerStopResume method stopDeleteJobAndResumeRecurrentQueue.
@Test
public void stopDeleteJobAndResumeRecurrentQueue() throws Exception {
String queueName = TestHelper.getTestMethodName();
// Create a queue
LOG.info("Starting job-queue: " + queueName);
JobQueue.Builder queueBuilder = TaskTestUtil.buildRecurrentJobQueue(queueName);
// Create and Enqueue jobs
List<String> currentJobNames = new ArrayList<String>();
Map<String, String> commandConfig = ImmutableMap.of(TIMEOUT_CONFIG, String.valueOf(500));
for (int i = 0; i <= 4; i++) {
String targetPartition = (i == 0) ? "MASTER" : "SLAVE";
JobConfig.Builder job = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setJobCommandConfigMap(commandConfig).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setTargetPartitionStates(Sets.newHashSet(targetPartition));
String jobName = targetPartition.toLowerCase() + "Job" + i;
LOG.info("Enqueuing job: " + jobName);
queueBuilder.enqueueJob(jobName, job);
currentJobNames.add(i, jobName);
}
_driver.createQueue(queueBuilder.build());
WorkflowContext wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName);
String scheduledQueue = wCtx.getLastScheduledSingleWorkflow();
// ensure job 1 is started before deleting it
String deletedJob1 = currentJobNames.get(0);
String namedSpaceDeletedJob1 = String.format("%s_%s", scheduledQueue, deletedJob1);
_driver.pollForJobState(scheduledQueue, namedSpaceDeletedJob1, TaskState.IN_PROGRESS);
// stop the queue
LOG.info("Pausing job-queue: " + scheduledQueue);
_driver.stop(queueName);
_driver.pollForJobState(scheduledQueue, namedSpaceDeletedJob1, TaskState.STOPPED);
_driver.pollForWorkflowState(scheduledQueue, TaskState.STOPPED);
// delete the in-progress job (job 1) and verify it being deleted
_driver.deleteJob(queueName, deletedJob1);
verifyJobDeleted(queueName, namedSpaceDeletedJob1);
verifyJobDeleted(scheduledQueue, namedSpaceDeletedJob1);
LOG.info("Resuming job-queue: " + queueName);
_driver.resume(queueName);
// ensure job 2 is started
_driver.pollForJobState(scheduledQueue, String.format("%s_%s", scheduledQueue, currentJobNames.get(1)), TaskState.IN_PROGRESS);
// stop the queue
LOG.info("Pausing job-queue: " + queueName);
_driver.stop(queueName);
_driver.pollForJobState(scheduledQueue, String.format("%s_%s", scheduledQueue, currentJobNames.get(1)), TaskState.STOPPED);
_driver.pollForWorkflowState(scheduledQueue, TaskState.STOPPED);
// Ensure job 3 is not started before deleting it
String deletedJob2 = currentJobNames.get(2);
String namedSpaceDeletedJob2 = String.format("%s_%s", scheduledQueue, deletedJob2);
TaskTestUtil.pollForEmptyJobState(_driver, scheduledQueue, namedSpaceDeletedJob2);
// delete not-started job (job 3) and verify it being deleted
_driver.deleteJob(queueName, deletedJob2);
verifyJobDeleted(queueName, namedSpaceDeletedJob2);
verifyJobDeleted(scheduledQueue, namedSpaceDeletedJob2);
LOG.info("Resuming job-queue: " + queueName);
_driver.resume(queueName);
// Ensure the jobs left are successful completed in the correct order
currentJobNames.remove(deletedJob1);
currentJobNames.remove(deletedJob2);
long preJobFinish = 0;
for (int i = 0; i < currentJobNames.size(); i++) {
String namedSpaceJobName = String.format("%s_%s", scheduledQueue, currentJobNames.get(i));
_driver.pollForJobState(scheduledQueue, namedSpaceJobName, TaskState.COMPLETED);
JobContext jobContext = _driver.getJobContext(namedSpaceJobName);
long jobStart = jobContext.getStartTime();
Assert.assertTrue(jobStart >= preJobFinish);
preJobFinish = jobContext.getFinishTime();
}
// verify the job is not there for the next recurrence of queue schedule
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class TestTaskWithInstanceDisabled method testTaskWithInstanceDisabled.
@Test
public void testTaskWithInstanceDisabled() throws InterruptedException {
_setupTool.getClusterManagementTool().enableInstance(CLUSTER_NAME, PARTICIPANT_PREFIX + "_" + (_startPort + 0), false);
String jobResource = TestHelper.getTestMethodName();
JobConfig.Builder jobBuilder = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB);
Workflow flow = WorkflowGenerator.generateSingleJobWorkflowBuilder(jobResource, jobBuilder).build();
_driver.start(flow);
_driver.pollForWorkflowState(jobResource, TaskState.COMPLETED);
JobContext ctx = _driver.getJobContext(TaskUtil.getNamespacedJobName(jobResource));
Assert.assertEquals(ctx.getAssignedParticipant(0), PARTICIPANT_PREFIX + "_" + (_startPort + 1));
}
use of org.apache.helix.task.JobContext in project helix by apache.
the class TestJobFailure method testNormalJobFailure.
@Test(dataProvider = "testJobFailureInput")
public void testNormalJobFailure(String comment, List<String> taskStates, List<String> expectedTaskEndingStates, String expectedJobEndingStates, String expectedWorkflowEndingStates) throws InterruptedException {
final String JOB_NAME = "test_job";
final String WORKFLOW_NAME = TestHelper.getTestMethodName() + testNum++;
System.out.println("Test case comment: " + comment);
Map<String, Map<String, String>> targetPartitionConfigs = createPartitionConfig(taskStates, expectedTaskEndingStates);
JobConfig.Builder firstJobBuilder = new JobConfig.Builder().setWorkflow(WORKFLOW_NAME).setTargetResource(DB_NAME).setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())).setCommand(MockTask.TASK_COMMAND).setJobCommandConfigMap(ImmutableMap.of(MockTask.TARGET_PARTITION_CONFIG, MockTask.serializeTargetPartitionConfig(targetPartitionConfigs)));
Workflow.Builder workflowBuilder = new Workflow.Builder(WORKFLOW_NAME).addJob(JOB_NAME, firstJobBuilder);
_driver.start(workflowBuilder.build());
_driver.pollForJobState(WORKFLOW_NAME, TaskUtil.getNamespacedJobName(WORKFLOW_NAME, JOB_NAME), TaskState.valueOf(expectedJobEndingStates));
_driver.pollForWorkflowState(WORKFLOW_NAME, TaskState.valueOf(expectedWorkflowEndingStates));
JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(WORKFLOW_NAME, JOB_NAME));
for (int pId : jobContext.getPartitionSet()) {
Map<String, String> targetPartitionConfig = targetPartitionConfigs.get(jobContext.getTargetForPartition(pId));
Assert.assertEquals(jobContext.getPartitionState(pId).name(), targetPartitionConfig.get(EXPECTED_ENDING_STATE));
}
}
Aggregations