use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class ErrorsResource method getInstanceErrorsRepresentation.
StringRepresentation getInstanceErrorsRepresentation(String clusterName, String instanceName) throws JsonGenerationException, JsonMappingException, IOException {
ZkClient zkClient = (ZkClient) getContext().getAttributes().get(RestAdminApplication.ZKCLIENT);
;
String instanceSessionId = ClusterRepresentationUtil.getInstanceSessionId(zkClient, clusterName, instanceName);
String message = ClusterRepresentationUtil.getInstancePropertyNameListAsString(zkClient, clusterName, instanceName, PropertyType.CURRENTSTATES, instanceSessionId, MediaType.APPLICATION_JSON);
StringRepresentation representation = new StringRepresentation(message, MediaType.APPLICATION_JSON);
return representation;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class ExternalViewResource method getExternalViewRepresentation.
StringRepresentation getExternalViewRepresentation(String clusterName, String resourceName) throws JsonGenerationException, JsonMappingException, IOException {
Builder keyBuilder = new PropertyKey.Builder(clusterName);
ZkClient zkclient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.RAW_ZKCLIENT);
String extViewStr = ResourceUtil.readZkAsBytes(zkclient, keyBuilder.externalView(resourceName));
StringRepresentation representation = new StringRepresentation(extViewStr, MediaType.APPLICATION_JSON);
return representation;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class IdealStateResource method getIdealStateRepresentation.
StringRepresentation getIdealStateRepresentation(String clusterName, String resourceName) throws JsonGenerationException, JsonMappingException, IOException {
Builder keyBuilder = new PropertyKey.Builder(clusterName);
ZkClient zkclient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.RAW_ZKCLIENT);
String idealStateStr = ResourceUtil.readZkAsBytes(zkclient, keyBuilder.idealStates(resourceName));
StringRepresentation representation = new StringRepresentation(idealStateStr, MediaType.APPLICATION_JSON);
return representation;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class InstancesResource method getInstancesRepresentation.
StringRepresentation getInstancesRepresentation(String clusterName) throws JsonGenerationException, JsonMappingException, IOException {
ZkClient zkClient = (ZkClient) getContext().getAttributes().get(RestAdminApplication.ZKCLIENT);
HelixDataAccessor accessor = ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
Map<String, LiveInstance> liveInstancesMap = accessor.getChildValuesMap(accessor.keyBuilder().liveInstances());
Map<String, InstanceConfig> instanceConfigsMap = accessor.getChildValuesMap(accessor.keyBuilder().instanceConfigs());
Map<String, List<String>> tagInstanceLists = new TreeMap<String, List<String>>();
for (String instanceName : instanceConfigsMap.keySet()) {
boolean isAlive = liveInstancesMap.containsKey(instanceName);
instanceConfigsMap.get(instanceName).getRecord().setSimpleField("Alive", isAlive + "");
InstanceConfig config = instanceConfigsMap.get(instanceName);
for (String tag : config.getTags()) {
if (!tagInstanceLists.containsKey(tag)) {
tagInstanceLists.put(tag, new LinkedList<String>());
}
if (!tagInstanceLists.get(tag).contains(instanceName)) {
tagInstanceLists.get(tag).add(instanceName);
}
}
}
// Wrap raw data into an object, then serialize it
List<ZNRecord> recordList = Lists.newArrayList();
for (InstanceConfig instanceConfig : instanceConfigsMap.values()) {
recordList.add(instanceConfig.getRecord());
}
ListInstancesWrapper wrapper = new ListInstancesWrapper();
wrapper.instanceInfo = recordList;
wrapper.tagInfo = tagInstanceLists;
StringRepresentation representation = new StringRepresentation(ClusterRepresentationUtil.ObjectToJson(wrapper), MediaType.APPLICATION_JSON);
return representation;
}
use of org.apache.helix.manager.zk.ZkClient in project helix by apache.
the class JobQueueResource method post.
/**
* Start a new job in a job queue, or stop/resume/flush/delete a job queue
* <p>
* Usage:
* <p>
* <li>Start a new job in a job queue:
* <code>curl -d @'./{input.txt}' -H 'Content-Type: application/json'
* http://{host:port}/clusters/{clusterName}/jobQueues/{jobQueue}
* <p>
* input.txt: <code>jsonParameters={"command":"start"}&newJob={newJobConfig.yaml}
* <p>
* For newJobConfig.yaml, see {@link Workflow#parse(String)}
* <li>Stop/resume/flush/delete a job queue:
* <code>curl -d 'jsonParameters={"command":"{stop/resume/flush/delete}"}'
* -H "Content-Type: application/json" http://{host:port}/clusters/{clusterName}/jobQueues/{jobQueue}
*/
@Override
public Representation post(Representation entity) {
String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
String jobQueueName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.JOB_QUEUE);
ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
try {
TaskDriver driver = new TaskDriver(zkClient, clusterName);
Form form = new Form(entity);
JsonParameters jsonParameters = new JsonParameters(form);
TaskDriver.DriverCommand cmd = TaskDriver.DriverCommand.valueOf(jsonParameters.getCommand());
switch(cmd) {
case start:
{
// Get the job queue and submit it
String yamlPayload = ResourceUtil.getYamlParameters(form, ResourceUtil.YamlParamKey.NEW_JOB);
if (yamlPayload == null) {
throw new HelixException("Yaml job config is required!");
}
Workflow workflow = Workflow.parse(yamlPayload);
for (String jobName : workflow.getJobConfigs().keySet()) {
Map<String, String> jobCfgMap = workflow.getJobConfigs().get(jobName);
JobConfig.Builder jobCfgBuilder = JobConfig.Builder.fromMap(jobCfgMap);
if (workflow.getTaskConfigs() != null && workflow.getTaskConfigs().containsKey(jobName)) {
jobCfgBuilder.addTaskConfigs(workflow.getTaskConfigs().get(jobName));
}
driver.enqueueJob(jobQueueName, TaskUtil.getDenamespacedJobName(jobQueueName, jobName), jobCfgBuilder);
}
break;
}
case stop:
{
driver.stop(jobQueueName);
break;
}
case resume:
{
driver.resume(jobQueueName);
break;
}
case flush:
{
driver.flushQueue(jobQueueName);
break;
}
case delete:
{
driver.delete(jobQueueName);
break;
}
case clean:
{
driver.cleanupQueue(jobQueueName);
break;
}
default:
throw new HelixException("Unsupported job queue command: " + cmd);
}
getResponse().setEntity(getHostedEntitiesRepresentation(clusterName, jobQueueName));
getResponse().setStatus(Status.SUCCESS_OK);
} catch (Exception e) {
getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON);
getResponse().setStatus(Status.SUCCESS_OK);
LOG.error("Error in posting job queue: " + entity, e);
}
return null;
}
Aggregations