use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class JobQueuesResource method post.
/**
* Add a new job queue
* <p>
* Usage:
* <code>curl -d @'{jobQueueConfig.yaml}'
* -H 'Content-Type: application/json' http://{host:port}/clusters/{clusterName}/jobQueues
* <p>
* For jobQueueConfig.yaml, see {@link Workflow#parse(String)}
*/
@Override
public Representation post(Representation entity) {
try {
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
Form form = new Form(entity);
// Get the job queue and submit it
if (form.size() < 1) {
throw new HelixException("Yaml job queue config is required!");
}
Parameter payload = form.get(0);
String yamlPayload = payload.getName();
if (yamlPayload == null) {
throw new HelixException("Yaml job queue config is required!");
}
Workflow workflow = Workflow.parse(yamlPayload);
JobQueue.Builder jobQueueCfgBuilder = new JobQueue.Builder(workflow.getName());
jobQueueCfgBuilder.fromMap(workflow.getWorkflowConfig().getResourceConfigMap());
TaskDriver driver = new TaskDriver(zkClient, clusterName);
driver.createQueue(jobQueueCfgBuilder.build());
getResponse().setEntity(getHostedEntitiesRepresentation(clusterName));
getResponse().setStatus(Status.SUCCESS_OK);
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("Exception in posting job queue: " + entity, e);
}
return null;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class JobResource method getHostedEntitiesRepresentation.
StringRepresentation getHostedEntitiesRepresentation(String clusterName, String jobQueueName, String jobName) throws Exception {
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
HelixDataAccessor accessor = ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
// Get job queue config
String namespacedJobName = TaskUtil.getNamespacedJobName(jobQueueName, jobName);
HelixProperty jobConfig = accessor.getProperty(keyBuilder.resourceConfig(namespacedJobName));
TaskDriver taskDriver = new TaskDriver(zkClient, clusterName);
// Get job queue context
JobContext ctx = taskDriver.getJobContext(namespacedJobName);
// Create the result
ZNRecord hostedEntitiesRecord = new ZNRecord(namespacedJobName);
if (jobConfig != null) {
hostedEntitiesRecord.merge(jobConfig.getRecord());
}
if (ctx != null) {
hostedEntitiesRecord.merge(ctx.getRecord());
}
StringRepresentation representation = new StringRepresentation(ClusterRepresentationUtil.ZNRecordToJson(hostedEntitiesRecord), MediaType.APPLICATION_JSON);
return representation;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class JobResource method delete.
@Override
public Representation delete() {
StringRepresentation representation = null;
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
String jobQueueName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.JOB_QUEUE);
String jobName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.JOB);
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
TaskDriver driver = new TaskDriver(zkClient, clusterName);
try {
driver.deleteJob(jobQueueName, jobName);
getResponse().setStatus(Status.SUCCESS_NO_CONTENT);
} catch (Exception e) {
String error = ClusterRepresentationUtil.getErrorAsJsonStringFromException(e);
representation = new StringRepresentation(error, MediaType.APPLICATION_JSON);
LOG.error("Fail to delete job: " + jobName, e);
}
return representation;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class ResourceGroupResource method post.
@Override
public Representation post(Representation entity) {
try {
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
String resourceName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.RESOURCE_NAME);
ZkClient zkclient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
ClusterSetup setupTool = new ClusterSetup(zkclient);
JsonParameters jsonParameters = new JsonParameters(entity);
String command = jsonParameters.getCommand();
if (command.equalsIgnoreCase(ClusterSetup.resetResource)) {
setupTool.getClusterManagementTool().resetResource(clusterName, Arrays.asList(resourceName));
} else if (command.equalsIgnoreCase(ClusterSetup.enableResource)) {
jsonParameters.verifyCommand(ClusterSetup.enableResource);
boolean enabled = Boolean.parseBoolean(jsonParameters.getParameter(JsonParameters.ENABLED));
setupTool.getClusterManagementTool().enableResource(clusterName, resourceName, enabled);
} else {
throw new HelixException("Unsupported command: " + command + ". Should be one of [" + ClusterSetup.resetResource + "]");
}
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("", e);
}
return null;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class ResourceGroupsResource method post.
@Override
public Representation post(Representation entity) {
try {
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
JsonParameters jsonParameters = new JsonParameters(entity);
String command = jsonParameters.getCommand();
if (command.equalsIgnoreCase(ClusterSetup.addResource) || JsonParameters.CLUSTERSETUP_COMMAND_ALIASES.get(ClusterSetup.addResource).contains(command)) {
jsonParameters.verifyCommand(ClusterSetup.addResource);
String entityName = jsonParameters.getParameter(JsonParameters.RESOURCE_GROUP_NAME);
String stateModelDefRef = jsonParameters.getParameter(JsonParameters.STATE_MODEL_DEF_REF);
int partitions = Integer.parseInt(jsonParameters.getParameter(JsonParameters.PARTITIONS));
String mode = RebalanceMode.SEMI_AUTO.toString();
if (jsonParameters.getParameter(JsonParameters.IDEAL_STATE_MODE) != null) {
mode = jsonParameters.getParameter(JsonParameters.IDEAL_STATE_MODE);
}
int bucketSize = 0;
if (jsonParameters.getParameter(JsonParameters.BUCKET_SIZE) != null) {
try {
bucketSize = Integer.parseInt(jsonParameters.getParameter(JsonParameters.BUCKET_SIZE));
} catch (Exception e) {
}
}
int maxPartitionsPerNode = -1;
if (jsonParameters.getParameter(JsonParameters.MAX_PARTITIONS_PER_NODE) != null) {
try {
maxPartitionsPerNode = Integer.parseInt(jsonParameters.getParameter(JsonParameters.MAX_PARTITIONS_PER_NODE));
} catch (Exception e) {
}
}
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
ClusterSetup setupTool = new ClusterSetup(zkClient);
setupTool.addResourceToCluster(clusterName, entityName, partitions, stateModelDefRef, mode, bucketSize, maxPartitionsPerNode);
} else {
throw new HelixException("Unsupported command: " + command + ". Should be one of [" + ClusterSetup.addResource + "]");
}
getResponse().setEntity(getHostedEntitiesRepresentation(clusterName));
getResponse().setStatus(Status.SUCCESS_OK);
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("Error in posting " + entity, e);
}
return null;
}
Aggregations