use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestRecurringJobQueue method testGetNoExistWorkflowConfig.
@Test
public void testGetNoExistWorkflowConfig() {
String randomName = "randomJob";
WorkflowConfig workflowConfig = _driver.getWorkflowConfig(randomName);
Assert.assertNull(workflowConfig);
JobConfig jobConfig = _driver.getJobConfig(randomName);
Assert.assertNull(jobConfig);
WorkflowContext workflowContext = _driver.getWorkflowContext(randomName);
Assert.assertNull(workflowContext);
JobContext jobContext = _driver.getJobContext(randomName);
Assert.assertNull(jobContext);
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestRetrieveWorkflows method testGetAllWorkflows.
@Test
public void testGetAllWorkflows() throws Exception {
List<Workflow> workflowList = new ArrayList<Workflow>();
for (int i = 0; i < 2; i++) {
Workflow workflow = WorkflowGenerator.generateDefaultRepeatedJobWorkflowBuilder(TestHelper.getTestMethodName() + i).build();
_driver.start(workflow);
workflowList.add(workflow);
}
for (Workflow workflow : workflowList) {
_driver.pollForWorkflowState(workflow.getName(), TaskState.COMPLETED);
}
Map<String, WorkflowConfig> workflowConfigMap = _driver.getWorkflows();
Assert.assertEquals(workflowConfigMap.size(), workflowList.size());
for (Map.Entry<String, WorkflowConfig> workflow : workflowConfigMap.entrySet()) {
WorkflowConfig workflowConfig = workflow.getValue();
WorkflowContext workflowContext = _driver.getWorkflowContext(workflow.getKey());
Assert.assertNotNull(workflowContext);
for (String job : workflowConfig.getJobDag().getAllNodes()) {
JobConfig jobConfig = _driver.getJobConfig(job);
JobContext jobContext = _driver.getJobContext(job);
Assert.assertNotNull(jobConfig);
Assert.assertNotNull(jobContext);
}
}
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestJobFailureDependence method testWorkflowFailureJobThreshold.
@Test
public void testWorkflowFailureJobThreshold() throws Exception {
String queueName = TestHelper.getTestMethodName();
// Create a queue
LOG.info("Starting job-queue: " + queueName);
JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 3);
// Create and Enqueue jobs
List<String> currentJobNames = new ArrayList<String>();
for (int i = 0; i < _numDbs; i++) {
JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE")).setIgnoreDependentJobFailure(true);
String jobName = "job" + _testDbs.get(i);
queueBuilder.enqueueJob(jobName, jobConfig);
currentJobNames.add(jobName);
}
_driver.start(queueBuilder.build());
_setupTool.dropResourceFromCluster(CLUSTER_NAME, _testDbs.get(1));
String namedSpaceJob1 = String.format("%s_%s", queueName, currentJobNames.get(1));
_driver.pollForJobState(queueName, namedSpaceJob1, TaskState.FAILED);
String lastJob = String.format("%s_%s", queueName, currentJobNames.get(currentJobNames.size() - 1));
_driver.pollForJobState(queueName, lastJob, TaskState.COMPLETED);
_driver.flushQueue(queueName);
WorkflowConfig currentWorkflowConfig = _driver.getWorkflowConfig(queueName);
WorkflowConfig.Builder configBuilder = new WorkflowConfig.Builder(currentWorkflowConfig);
configBuilder.setFailureThreshold(0);
_driver.updateWorkflow(queueName, configBuilder.build());
_driver.stop(queueName);
for (int i = 0; i < _numDbs; i++) {
JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)).setTargetPartitionStates(Sets.newHashSet("SLAVE")).setIgnoreDependentJobFailure(true);
String jobName = "job" + _testDbs.get(i);
queueBuilder.enqueueJob(jobName, jobConfig);
_driver.enqueueJob(queueName, jobName, jobConfig);
}
_driver.resume(queueName);
namedSpaceJob1 = String.format("%s_%s", queueName, currentJobNames.get(1));
_driver.pollForJobState(queueName, namedSpaceJob1, TaskState.FAILED);
}
use of org.apache.helix.task.JobConfig in project helix by apache.
the class TestJobQueueCleanUp method testJobQueueAutoCleanUp.
@Test
public void testJobQueueAutoCleanUp() throws InterruptedException {
int capacity = 10;
String queueName = TestHelper.getTestMethodName();
JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName, capacity);
WorkflowConfig.Builder cfgBuilder = new WorkflowConfig.Builder(builder.getWorkflowConfig());
cfgBuilder.setJobPurgeInterval(1000);
builder.setWorkflowConfig(cfgBuilder.build());
JobConfig.Builder jobBuilder = new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB).setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2).setJobCommandConfigMap(ImmutableMap.of(MockTask.SUCCESS_COUNT_BEFORE_FAIL, String.valueOf(capacity / 2))).setExpiry(200L);
Set<String> deletedJobs = new HashSet<String>();
Set<String> remainJobs = new HashSet<String>();
for (int i = 0; i < capacity; i++) {
builder.enqueueJob("JOB" + i, jobBuilder);
if (i < capacity / 2) {
deletedJobs.add("JOB" + i);
} else {
remainJobs.add(TaskUtil.getNamespacedJobName(queueName, "JOB" + i));
}
}
_driver.start(builder.build());
_driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + (capacity - 1)), TaskState.FAILED);
Thread.sleep(2000);
WorkflowConfig config = _driver.getWorkflowConfig(queueName);
Assert.assertEquals(config.getJobDag().getAllNodes(), remainJobs);
WorkflowContext context = _driver.getWorkflowContext(queueName);
Assert.assertEquals(context.getJobStates().keySet(), remainJobs);
Assert.assertTrue(remainJobs.containsAll(context.getJobStartTimes().keySet()));
for (String job : deletedJobs) {
JobConfig cfg = _driver.getJobConfig(job);
JobContext ctx = _driver.getJobContext(job);
Assert.assertNull(cfg);
Assert.assertNull(ctx);
}
}
use of org.apache.helix.task.JobConfig in project incubator-gobblin by apache.
the class GobblinHelixJobLauncher method createJob.
/**
* Create a job from a given batch of {@link WorkUnit}s.
*/
private JobConfig.Builder createJob(List<WorkUnit> workUnits) throws IOException {
Map<String, TaskConfig> taskConfigMap = Maps.newHashMap();
try (ParallelRunner stateSerDeRunner = new ParallelRunner(this.stateSerDeRunnerThreads, this.fs)) {
int multiTaskIdSequence = 0;
for (WorkUnit workUnit : workUnits) {
if (workUnit instanceof MultiWorkUnit) {
workUnit.setId(JobLauncherUtils.newMultiTaskId(this.jobContext.getJobId(), multiTaskIdSequence++));
}
addWorkUnit(workUnit, stateSerDeRunner, taskConfigMap);
}
Path jobStateFilePath;
// write the job.state using the state store if present, otherwise serialize directly to the file
if (this.stateStores.haveJobStateStore()) {
jobStateFilePath = GobblinClusterUtils.getJobStateFilePath(true, this.appWorkDir, this.jobContext.getJobId());
this.stateStores.getJobStateStore().put(jobStateFilePath.getParent().getName(), jobStateFilePath.getName(), this.jobContext.getJobState());
} else {
jobStateFilePath = GobblinClusterUtils.getJobStateFilePath(false, this.appWorkDir, this.jobContext.getJobId());
SerializationUtils.serializeState(this.fs, jobStateFilePath, this.jobContext.getJobState());
}
LOGGER.debug("GobblinHelixJobLauncher.createJob: jobStateFilePath {}, jobState {} jobProperties {}", jobStateFilePath, this.jobContext.getJobState().toString(), this.jobContext.getJobState().getProperties());
}
JobConfig.Builder jobConfigBuilder = new JobConfig.Builder();
jobConfigBuilder.setMaxAttemptsPerTask(this.jobContext.getJobState().getPropAsInt(ConfigurationKeys.MAX_TASK_RETRIES_KEY, ConfigurationKeys.DEFAULT_MAX_TASK_RETRIES));
jobConfigBuilder.setFailureThreshold(workUnits.size());
jobConfigBuilder.addTaskConfigMap(taskConfigMap).setCommand(GobblinTaskRunner.GOBBLIN_TASK_FACTORY_NAME);
jobConfigBuilder.setNumConcurrentTasksPerInstance(ConfigUtils.getInt(jobConfig, GobblinClusterConfigurationKeys.HELIX_CLUSTER_TASK_CONCURRENCY, GobblinClusterConfigurationKeys.HELIX_CLUSTER_TASK_CONCURRENCY_DEFAULT));
if (Task.getExecutionModel(ConfigUtils.configToState(jobConfig)).equals(ExecutionModel.STREAMING)) {
jobConfigBuilder.setRebalanceRunningTask(true);
}
return jobConfigBuilder;
}
Aggregations