use of org.apache.helix.task.TaskConfig in project incubator-gobblin by apache.
the class GobblinHelixTaskTest method testPrepareTask.
@Test
public void testPrepareTask() throws IOException {
// Serialize the JobState that will be read later in GobblinHelixTask
Path jobStateFilePath = new Path(appWorkDir, TestHelper.TEST_JOB_ID + "." + AbstractJobLauncher.JOB_STATE_FILE_NAME);
JobState jobState = new JobState();
jobState.setJobName(TestHelper.TEST_JOB_NAME);
jobState.setJobId(TestHelper.TEST_JOB_ID);
SerializationUtils.serializeState(this.localFs, jobStateFilePath, jobState);
// Prepare the WorkUnit
WorkUnit workUnit = WorkUnit.createEmpty();
prepareWorkUnit(workUnit);
// Prepare the source Json file
File sourceJsonFile = new File(this.appWorkDir.toString(), TestHelper.TEST_JOB_NAME + ".json");
TestHelper.createSourceJsonFile(sourceJsonFile);
workUnit.setProp(SimpleJsonSource.SOURCE_FILE_KEY, sourceJsonFile.getAbsolutePath());
// Serialize the WorkUnit into a file
// expected path is appWorkDir/_workunits/job_id/job_id.wu
Path workUnitDirPath = new Path(this.appWorkDir, GobblinClusterConfigurationKeys.INPUT_WORK_UNIT_DIR_NAME);
FsStateStore<WorkUnit> wuStateStore = new FsStateStore<>(this.localFs, workUnitDirPath.toString(), WorkUnit.class);
Path workUnitFilePath = new Path(new Path(workUnitDirPath, TestHelper.TEST_JOB_ID), TestHelper.TEST_JOB_NAME + ".wu");
wuStateStore.put(TestHelper.TEST_JOB_ID, TestHelper.TEST_JOB_NAME + ".wu", workUnit);
Assert.assertTrue(this.localFs.exists(workUnitFilePath));
// Prepare the GobblinHelixTask
Map<String, String> taskConfigMap = Maps.newHashMap();
taskConfigMap.put(GobblinClusterConfigurationKeys.WORK_UNIT_FILE_PATH, workUnitFilePath.toString());
taskConfigMap.put(ConfigurationKeys.JOB_NAME_KEY, TestHelper.TEST_JOB_NAME);
taskConfigMap.put(ConfigurationKeys.JOB_ID_KEY, TestHelper.TEST_JOB_ID);
taskConfigMap.put(ConfigurationKeys.TASK_KEY_KEY, Long.toString(Id.parse(TestHelper.TEST_JOB_ID).getSequence()));
TaskConfig taskConfig = new TaskConfig("", taskConfigMap, true);
TaskCallbackContext taskCallbackContext = Mockito.mock(TaskCallbackContext.class);
Mockito.when(taskCallbackContext.getTaskConfig()).thenReturn(taskConfig);
Mockito.when(taskCallbackContext.getManager()).thenReturn(this.helixManager);
GobblinHelixTaskFactory gobblinHelixTaskFactory = new GobblinHelixTaskFactory(Optional.<ContainerMetrics>absent(), this.taskExecutor, this.taskStateTracker, this.localFs, this.appWorkDir, ConfigFactory.empty(), this.helixManager);
this.gobblinHelixTask = (GobblinHelixTask) gobblinHelixTaskFactory.createNewTask(taskCallbackContext);
}
use of org.apache.helix.task.TaskConfig in project helix by apache.
the class TestTaskRebalancerStopResume method testStopWorkflowInStoppingState.
@Test
public void testStopWorkflowInStoppingState() throws InterruptedException {
final String workflowName = TestHelper.getTestMethodName();
// Create a workflow
Workflow.Builder builder = new Workflow.Builder(workflowName);
// Add 2 jobs
Map<String, String> jobCommandConfigMap = new HashMap<String, String>();
jobCommandConfigMap.put(MockTask.TIMEOUT_CONFIG, "1000000");
jobCommandConfigMap.put(MockTask.NOT_ALLOW_TO_CANCEL, String.valueOf(true));
List<TaskConfig> taskConfigs = ImmutableList.of(new TaskConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTaskId("testTask").build());
JobConfig.Builder job1 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).addTaskConfigs(taskConfigs).setJobCommandConfigMap(jobCommandConfigMap);
String job1Name = "Job1";
JobConfig.Builder job2 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).addTaskConfigs(taskConfigs);
String job2Name = "Job2";
builder.addJob(job1Name, job1);
builder.addJob(job2Name, job2);
_driver.start(builder.build());
Thread.sleep(2000);
_driver.stop(workflowName);
_driver.pollForWorkflowState(workflowName, TaskState.STOPPING);
// Expect job and workflow stuck in STOPPING state.
WorkflowContext workflowContext = _driver.getWorkflowContext(workflowName);
Assert.assertEquals(workflowContext.getJobState(TaskUtil.getNamespacedJobName(workflowName, job1Name)), TaskState.STOPPING);
}
use of org.apache.helix.task.TaskConfig in project helix by apache.
the class TestUserContentStore method testJobContentPutAndGetWithDependency.
@Test
public void testJobContentPutAndGetWithDependency() throws InterruptedException {
String queueName = TestHelper.getTestMethodName();
JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 100);
List<TaskConfig> taskConfigs1 = Lists.newArrayListWithCapacity(1);
List<TaskConfig> taskConfigs2 = Lists.newArrayListWithCapacity(1);
Map<String, String> taskConfigMap1 = Maps.newHashMap();
Map<String, String> taskConfigMap2 = Maps.newHashMap();
TaskConfig taskConfig1 = new TaskConfig("TaskOne", taskConfigMap1);
TaskConfig taskConfig2 = new TaskConfig("TaskTwo", taskConfigMap2);
taskConfigs1.add(taskConfig1);
taskConfigs2.add(taskConfig2);
Map<String, String> jobCommandMap = Maps.newHashMap();
jobCommandMap.put("Timeout", "1000");
JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setCommand("DummyCommand").addTaskConfigs(taskConfigs1).setJobCommandConfigMap(jobCommandMap).setWorkflow(queueName);
JobConfig.Builder jobBuilder2 = new JobConfig.Builder().setCommand("DummyCommand").addTaskConfigs(taskConfigs2).setJobCommandConfigMap(jobCommandMap).setWorkflow(queueName);
queueBuilder.enqueueJob(queueName + 0, jobBuilder1);
queueBuilder.enqueueJob(queueName + 1, jobBuilder2);
_driver.start(queueBuilder.build());
_driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, queueName + 1), TaskState.COMPLETED);
Assert.assertEquals(_driver.getWorkflowContext(queueName).getJobState(TaskUtil.getNamespacedJobName(queueName, queueName + 1)), TaskState.COMPLETED);
}
use of org.apache.helix.task.TaskConfig in project helix by apache.
the class TaskAdmin method list.
private static void list(TaskDriver taskDriver, String workflow) {
WorkflowConfig wCfg = taskDriver.getWorkflowConfig(workflow);
if (wCfg == null) {
LOG.error("Workflow " + workflow + " does not exist!");
return;
}
WorkflowContext wCtx = taskDriver.getWorkflowContext(workflow);
LOG.info("Workflow " + workflow + " consists of the following tasks: " + wCfg.getJobDag().getAllNodes());
String workflowState = (wCtx != null) ? wCtx.getWorkflowState().name() : TaskState.NOT_STARTED.name();
LOG.info("Current state of workflow is " + workflowState);
LOG.info("Job states are: ");
LOG.info("-------");
for (String job : wCfg.getJobDag().getAllNodes()) {
TaskState jobState = (wCtx != null) ? wCtx.getJobState(job) : TaskState.NOT_STARTED;
LOG.info("Job " + job + " is " + jobState);
// fetch job information
JobConfig jCfg = taskDriver.getJobConfig(job);
JobContext jCtx = taskDriver.getJobContext(job);
if (jCfg == null || jCtx == null) {
LOG.info("-------");
continue;
}
// calculate taskPartitions
List<Integer> partitions = Lists.newArrayList(jCtx.getPartitionSet());
Collections.sort(partitions);
// report status
for (Integer partition : partitions) {
String taskId = jCtx.getTaskIdForPartition(partition);
taskId = (taskId != null) ? taskId : jCtx.getTargetForPartition(partition);
LOG.info("Task: " + taskId);
TaskConfig taskConfig = jCfg.getTaskConfig(taskId);
if (taskConfig != null) {
LOG.info("Configuration: " + taskConfig.getConfigMap());
}
TaskPartitionState state = jCtx.getPartitionState(partition);
state = (state != null) ? state : TaskPartitionState.INIT;
LOG.info("State: " + state);
String assignedParticipant = jCtx.getAssignedParticipant(partition);
if (assignedParticipant != null) {
LOG.info("Assigned participant: " + assignedParticipant);
}
LOG.info("-------");
}
LOG.info("-------");
}
}
use of org.apache.helix.task.TaskConfig in project helix by apache.
the class TestTaskConditionalRetry method test.
@Test
public void test() throws Exception {
int taskRetryCount = 5;
int num_tasks = 5;
String jobResource = TestHelper.getTestMethodName();
JobConfig.Builder jobBuilder = new JobConfig.Builder();
jobBuilder.setCommand(MockTask.TASK_COMMAND).setTimeoutPerTask(10000).setMaxAttemptsPerTask(taskRetryCount).setFailureThreshold(Integer.MAX_VALUE);
// create each task configs.
final int abortedTask = 1;
final int failedTask = 2;
final int exceptionTask = 3;
List<TaskConfig> taskConfigs = new ArrayList<TaskConfig>();
for (int j = 0; j < num_tasks; j++) {
TaskConfig.Builder configBuilder = new TaskConfig.Builder().setTaskId("task_" + j);
switch(j) {
case abortedTask:
configBuilder.addConfig(MockTask.TASK_RESULT_STATUS, TaskResult.Status.FATAL_FAILED.name());
break;
case failedTask:
configBuilder.addConfig(MockTask.TASK_RESULT_STATUS, TaskResult.Status.FAILED.name());
break;
case exceptionTask:
configBuilder.addConfig(MockTask.THROW_EXCEPTION, Boolean.TRUE.toString());
break;
default:
break;
}
configBuilder.setTargetPartition(String.valueOf(j));
taskConfigs.add(configBuilder.build());
}
jobBuilder.addTaskConfigs(taskConfigs);
Workflow flow = WorkflowGenerator.generateSingleJobWorkflowBuilder(jobResource, jobBuilder).build();
_driver.start(flow);
// Wait until the job completes.
_driver.pollForWorkflowState(jobResource, TaskState.COMPLETED);
JobContext ctx = _driver.getJobContext(TaskUtil.getNamespacedJobName(jobResource));
for (int i = 0; i < num_tasks; i++) {
TaskPartitionState state = ctx.getPartitionState(i);
int retriedCount = ctx.getPartitionNumAttempts(i);
String taskId = ctx.getTaskIdForPartition(i);
if (taskId.equals("task_" + abortedTask)) {
Assert.assertEquals(state, TaskPartitionState.TASK_ABORTED);
Assert.assertEquals(retriedCount, 1);
} else if (taskId.equals("task_" + failedTask) || taskId.equals("task_" + exceptionTask)) {
Assert.assertEquals(state, TaskPartitionState.TASK_ERROR);
Assert.assertEquals(retriedCount, taskRetryCount);
} else {
Assert.assertEquals(state, TaskPartitionState.COMPLETED);
Assert.assertEquals(retriedCount, 1);
}
}
}
Aggregations