use of org.apache.helix.task.TaskDriver in project helix by apache.
the class JobQueuesResource method post.
/**
* Add a new job queue
* <p>
* Usage:
* <code>curl -d @'{jobQueueConfig.yaml}'
* -H 'Content-Type: application/json' http://{host:port}/clusters/{clusterName}/jobQueues
* <p>
* For jobQueueConfig.yaml, see {@link Workflow#parse(String)}
*/
@Override
public Representation post(Representation entity) {
try {
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
Form form = new Form(entity);
// Get the job queue and submit it
if (form.size() < 1) {
throw new HelixException("Yaml job queue config is required!");
}
Parameter payload = form.get(0);
String yamlPayload = payload.getName();
if (yamlPayload == null) {
throw new HelixException("Yaml job queue config is required!");
}
Workflow workflow = Workflow.parse(yamlPayload);
JobQueue.Builder jobQueueCfgBuilder = new JobQueue.Builder(workflow.getName());
jobQueueCfgBuilder.fromMap(workflow.getWorkflowConfig().getResourceConfigMap());
TaskDriver driver = new TaskDriver(zkClient, clusterName);
driver.createQueue(jobQueueCfgBuilder.build());
getResponse().setEntity(getHostedEntitiesRepresentation(clusterName));
getResponse().setStatus(Status.SUCCESS_OK);
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("Exception in posting job queue: " + entity, e);
}
return null;
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class JobResource method getHostedEntitiesRepresentation.
StringRepresentation getHostedEntitiesRepresentation(String clusterName, String jobQueueName, String jobName) throws Exception {
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
HelixDataAccessor accessor = ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
// Get job queue config
String namespacedJobName = TaskUtil.getNamespacedJobName(jobQueueName, jobName);
HelixProperty jobConfig = accessor.getProperty(keyBuilder.resourceConfig(namespacedJobName));
TaskDriver taskDriver = new TaskDriver(zkClient, clusterName);
// Get job queue context
JobContext ctx = taskDriver.getJobContext(namespacedJobName);
// Create the result
ZNRecord hostedEntitiesRecord = new ZNRecord(namespacedJobName);
if (jobConfig != null) {
hostedEntitiesRecord.merge(jobConfig.getRecord());
}
if (ctx != null) {
hostedEntitiesRecord.merge(ctx.getRecord());
}
StringRepresentation representation = new StringRepresentation(ClusterRepresentationUtil.ZNRecordToJson(hostedEntitiesRecord), MediaType.APPLICATION_JSON);
return representation;
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class JobResource method delete.
@Override
public Representation delete() {
StringRepresentation representation = null;
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
String jobQueueName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.JOB_QUEUE);
String jobName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.JOB);
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
TaskDriver driver = new TaskDriver(zkClient, clusterName);
try {
driver.deleteJob(jobQueueName, jobName);
getResponse().setStatus(Status.SUCCESS_NO_CONTENT);
} catch (Exception e) {
String error = ClusterRepresentationUtil.getErrorAsJsonStringFromException(e);
representation = new StringRepresentation(error, MediaType.APPLICATION_JSON);
LOG.error("Fail to delete job: " + jobName, e);
}
return representation;
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class JobQueueResource method post.
/**
* Start a new job in a job queue, or stop/resume/flush/delete a job queue
* <p>
* Usage:
* <p>
* <li>Start a new job in a job queue:
* <code>curl -d @'./{input.txt}' -H 'Content-Type: application/json'
* http://{host:port}/clusters/{clusterName}/jobQueues/{jobQueue}
* <p>
* input.txt: <code>jsonParameters={"command":"start"}&newJob={newJobConfig.yaml}
* <p>
* For newJobConfig.yaml, see {@link Workflow#parse(String)}
* <li>Stop/resume/flush/delete a job queue:
* <code>curl -d 'jsonParameters={"command":"{stop/resume/flush/delete}"}'
* -H "Content-Type: application/json" http://{host:port}/clusters/{clusterName}/jobQueues/{jobQueue}
*/
@Override
public Representation post(Representation entity) {
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
String jobQueueName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.JOB_QUEUE);
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
try {
TaskDriver driver = new TaskDriver(zkClient, clusterName);
Form form = new Form(entity);
JsonParameters jsonParameters = new JsonParameters(form);
TaskDriver.DriverCommand cmd = TaskDriver.DriverCommand.valueOf(jsonParameters.getCommand());
switch(cmd) {
case start:
{
// Get the job queue and submit it
String yamlPayload = ResourceUtil.getYamlParameters(form, ResourceUtil.YamlParamKey.NEW_JOB);
if (yamlPayload == null) {
throw new HelixException("Yaml job config is required!");
}
Workflow workflow = Workflow.parse(yamlPayload);
for (String jobName : workflow.getJobConfigs().keySet()) {
Map<String, String> jobCfgMap = workflow.getJobConfigs().get(jobName);
JobConfig.Builder jobCfgBuilder = JobConfig.Builder.fromMap(jobCfgMap);
if (workflow.getTaskConfigs() != null && workflow.getTaskConfigs().containsKey(jobName)) {
jobCfgBuilder.addTaskConfigs(workflow.getTaskConfigs().get(jobName));
}
driver.enqueueJob(jobQueueName, TaskUtil.getDenamespacedJobName(jobQueueName, jobName), jobCfgBuilder);
}
break;
}
case stop:
{
driver.stop(jobQueueName);
break;
}
case resume:
{
driver.resume(jobQueueName);
break;
}
case flush:
{
driver.flushQueue(jobQueueName);
break;
}
case delete:
{
driver.delete(jobQueueName);
break;
}
case clean:
{
driver.cleanupQueue(jobQueueName);
break;
}
default:
throw new HelixException("Unsupported job queue command: " + cmd);
}
getResponse().setEntity(getHostedEntitiesRepresentation(clusterName, jobQueueName));
getResponse().setStatus(Status.SUCCESS_OK);
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("Error in posting job queue: " + entity, e);
}
return null;
}
use of org.apache.helix.task.TaskDriver in project helix by apache.
the class WorkflowAccessor method getWorkflows.
@GET
public Response getWorkflows(@PathParam("clusterId") String clusterId) {
TaskDriver taskDriver = getTaskDriver(clusterId);
Map<String, WorkflowConfig> workflowConfigMap = taskDriver.getWorkflows();
Map<String, List<String>> dataMap = new HashMap<>();
dataMap.put(WorkflowProperties.Workflows.name(), new ArrayList<>(workflowConfigMap.keySet()));
return JSONRepresentation(dataMap);
}
Aggregations