use of org.apache.helix.task.TaskDriver in project helix by apache.
the class TestIndependentTaskRebalancer method beforeClass.
@BeforeClass
public void beforeClass() throws Exception {
_participants = new MockParticipantManager[_numNodes];
String namespace = "/" + CLUSTER_NAME;
if (_gZkClient.exists(namespace)) {
_gZkClient.deleteRecursively(namespace);
}
// Setup cluster and instances
ClusterSetup setupTool = new ClusterSetup(ZK_ADDR);
setupTool.addCluster(CLUSTER_NAME, true);
for (int i = 0; i < _numNodes; i++) {
String storageNodeName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
setupTool.addInstanceToCluster(CLUSTER_NAME, storageNodeName);
}
// start dummy participants
for (int i = 0; i < _numNodes; i++) {
final String instanceName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
// Set task callbacks
Map<String, TaskFactory> taskFactoryReg = new HashMap<String, TaskFactory>();
taskFactoryReg.put("TaskOne", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new TaskOne(context, instanceName);
}
});
taskFactoryReg.put("TaskTwo", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new TaskTwo(context, instanceName);
}
});
taskFactoryReg.put("SingleFailTask", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new SingleFailTask();
}
});
_participants[i] = new MockParticipantManager(ZK_ADDR, CLUSTER_NAME, instanceName);
// Register a Task state model factory.
StateMachineEngine stateMachine = _participants[i].getStateMachineEngine();
stateMachine.registerStateModelFactory("Task", new TaskStateModelFactory(_participants[i], taskFactoryReg));
_participants[i].syncStart();
}
// Start controller
String controllerName = CONTROLLER_PREFIX + "_0";
_controller = new ClusterControllerManager(ZK_ADDR, CLUSTER_NAME, controllerName);
_controller.syncStart();
// Start an admin connection
_manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin", InstanceType.ADMINISTRATOR, ZK_ADDR);
_manager.connect();
_driver = new TaskDriver(_manager);
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class TestUserContentStore method beforeClass.
@BeforeClass
public void beforeClass() throws Exception {
_participants = new MockParticipantManager[_numNodes];
String namespace = "/" + CLUSTER_NAME;
if (_gZkClient.exists(namespace)) {
_gZkClient.deleteRecursively(namespace);
}
// Setup cluster and instances
ClusterSetup setupTool = new ClusterSetup(ZK_ADDR);
setupTool.addCluster(CLUSTER_NAME, true);
for (int i = 0; i < _numNodes; i++) {
String storageNodeName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
setupTool.addInstanceToCluster(CLUSTER_NAME, storageNodeName);
}
// start dummy participants
for (int i = 0; i < _numNodes; i++) {
final String instanceName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
// Set task callbacks
Map<String, TaskFactory> taskFactoryReg = new HashMap<String, TaskFactory>();
taskFactoryReg.put("ContentStoreTask", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new ContentStoreTask();
}
});
taskFactoryReg.put("TaskOne", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new TaskOne();
}
});
taskFactoryReg.put("TaskTwo", new TaskFactory() {
@Override
public Task createNewTask(TaskCallbackContext context) {
return new TaskTwo();
}
});
_participants[i] = new MockParticipantManager(ZK_ADDR, CLUSTER_NAME, instanceName);
// Register a Task state model factory.
StateMachineEngine stateMachine = _participants[i].getStateMachineEngine();
stateMachine.registerStateModelFactory("Task", new TaskStateModelFactory(_participants[i], taskFactoryReg));
_participants[i].syncStart();
}
// Start controller
String controllerName = CONTROLLER_PREFIX + "_0";
_controller = new ClusterControllerManager(ZK_ADDR, CLUSTER_NAME, controllerName);
_controller.syncStart();
// Start an admin connection
_manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin", InstanceType.ADMINISTRATOR, ZK_ADDR);
_manager.connect();
_driver = new TaskDriver(_manager);
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class WorkflowAccessor method createWorkflow.
@PUT
@Path("{workflowId}")
public Response createWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId, String content) {
TaskDriver driver = getTaskDriver(clusterId);
Map<String, String> cfgMap;
try {
JsonNode root = OBJECT_MAPPER.readTree(content);
cfgMap = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.WorkflowConfig.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, String.class));
WorkflowConfig workflowConfig = WorkflowConfig.Builder.fromMap(cfgMap).build();
// Since JobQueue can keep adding jobs, Helix create JobQueue will ignore the jobs
if (workflowConfig.isJobQueue()) {
driver.start(new JobQueue.Builder(workflowId).setWorkflowConfig(workflowConfig).build());
return OK();
}
Workflow.Builder workflow = new Workflow.Builder(workflowId);
if (root.get(WorkflowProperties.Jobs.name()) != null) {
Map<String, JobConfig.Builder> jobConfigs = getJobConfigs((ArrayNode) root.get(WorkflowProperties.Jobs.name()));
for (Map.Entry<String, JobConfig.Builder> job : jobConfigs.entrySet()) {
workflow.addJob(job.getKey(), job.getValue());
}
}
if (root.get(WorkflowProperties.ParentJobs.name()) != null) {
Map<String, List<String>> parentJobs = OBJECT_MAPPER.readValue(root.get(WorkflowProperties.ParentJobs.name()).toString(), TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, List.class));
for (Map.Entry<String, List<String>> entry : parentJobs.entrySet()) {
String parentJob = entry.getKey();
for (String childJob : entry.getValue()) {
workflow.addParentChildDependency(parentJob, childJob);
}
}
}
driver.start(workflow.build());
} catch (IOException e) {
return badRequest(String.format("Invalid input of Workflow %s for reason : %s", workflowId, e.getMessage()));
} catch (HelixException e) {
return badRequest(String.format("Failed to create workflow %s for reason : %s", workflowId, e.getMessage()));
}
return OK();
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class WorkflowAccessor method updateWorkflowConfig.
@POST
@Path("{workflowId}/configs")
public Response updateWorkflowConfig(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId, String content) {
ZNRecord record;
TaskDriver driver = getTaskDriver(clusterId);
try {
record = toZNRecord(content);
WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowId);
if (workflowConfig == null) {
return badRequest(String.format("WorkflowConfig for workflow %s does not exists!", workflowId));
}
workflowConfig.getRecord().update(record);
driver.updateWorkflow(workflowId, workflowConfig);
} catch (HelixException e) {
return badRequest(String.format("Failed to update WorkflowConfig for workflow %s", workflowId));
} catch (Exception e) {
return badRequest(String.format("Invalid WorkflowConfig for workflow %s", workflowId));
}
return OK();
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class WorkflowAccessor method getWorkflow.
@GET
@Path("{workflowId}")
public Response getWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) {
TaskDriver taskDriver = getTaskDriver(clusterId);
WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
TextNode id = JsonNodeFactory.instance.textNode(workflowId);
root.put(Properties.id.name(), id);
ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();
if (workflowConfig != null) {
getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
}
if (workflowContext != null) {
getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
}
root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode);
root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode);
JobDag jobDag = workflowConfig.getJobDag();
ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes());
ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents());
root.put(WorkflowProperties.Jobs.name(), jobs);
root.put(WorkflowProperties.ParentJobs.name(), parentJobs);
return JSONRepresentation(root);
}
Aggregations