use of org.apache.helix.HelixDataAccessor in project helix by apache.
the class ClusterAccessor method getClusterInfo.
@GET
@Path("{clusterId}")
public Response getClusterInfo(@PathParam("clusterId") String clusterId) {
if (!isClusterExist(clusterId)) {
return notFound();
}
HelixDataAccessor dataAccessor = getDataAccssor(clusterId);
PropertyKey.Builder keyBuilder = dataAccessor.keyBuilder();
Map<String, Object> clusterInfo = new HashMap<>();
clusterInfo.put(Properties.id.name(), clusterId);
LiveInstance controller = dataAccessor.getProperty(keyBuilder.controllerLeader());
if (controller != null) {
clusterInfo.put(ClusterProperties.controller.name(), controller.getInstanceName());
} else {
clusterInfo.put(ClusterProperties.controller.name(), "No Lead Controller!");
}
boolean paused = (dataAccessor.getProperty(keyBuilder.pause()) == null ? false : true);
clusterInfo.put(ClusterProperties.paused.name(), paused);
boolean maintenance = (dataAccessor.getProperty(keyBuilder.maintenance()) == null ? false : true);
clusterInfo.put(ClusterProperties.maintenance.name(), maintenance);
List<String> idealStates = dataAccessor.getChildNames(keyBuilder.idealStates());
clusterInfo.put(ClusterProperties.resources.name(), idealStates);
List<String> instances = dataAccessor.getChildNames(keyBuilder.instanceConfigs());
clusterInfo.put(ClusterProperties.instances.name(), instances);
List<String> liveInstances = dataAccessor.getChildNames(keyBuilder.liveInstances());
clusterInfo.put(ClusterProperties.liveInstances.name(), liveInstances);
return JSONRepresentation(clusterInfo);
}
use of org.apache.helix.HelixDataAccessor in project helix by apache.
the class InstanceAccessor method getHealthReportsOnInstance.
@GET
@Path("{instanceName}/healthreports")
public Response getHealthReportsOnInstance(@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
root.put(Properties.id.name(), instanceName);
ArrayNode healthReportsNode = root.putArray(InstanceProperties.healthreports.name());
List<String> healthReports = accessor.getChildNames(accessor.keyBuilder().healthReports(instanceName));
if (healthReports != null && healthReports.size() > 0) {
healthReportsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(healthReports));
}
return JSONRepresentation(root);
}
use of org.apache.helix.HelixDataAccessor in project helix by apache.
the class InstanceAccessor method getResourcesOnInstance.
@GET
@Path("{instanceName}/resources")
public Response getResourcesOnInstance(@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
ObjectNode root = JsonNodeFactory.instance.objectNode();
root.put(Properties.id.name(), instanceName);
ArrayNode resourcesNode = root.putArray(InstanceProperties.resources.name());
List<String> sessionIds = accessor.getChildNames(accessor.keyBuilder().sessions(instanceName));
if (sessionIds == null || sessionIds.size() == 0) {
return null;
}
// Only get resource list from current session id
String currentSessionId = sessionIds.get(0);
List<String> resources = accessor.getChildNames(accessor.keyBuilder().currentStates(instanceName, currentSessionId));
if (resources != null && resources.size() > 0) {
resourcesNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(resources));
}
return JSONRepresentation(root);
}
use of org.apache.helix.HelixDataAccessor in project helix by apache.
the class InstanceAccessor method getInstanceConfig.
@GET
@Path("{instanceName}/configs")
public Response getInstanceConfig(@PathParam("clusterId") String clusterId, @PathParam("instanceName") String instanceName) throws IOException {
HelixDataAccessor accessor = getDataAccssor(clusterId);
InstanceConfig instanceConfig = accessor.getProperty(accessor.keyBuilder().instanceConfig(instanceName));
if (instanceConfig != null) {
return JSONRepresentation(instanceConfig.getRecord());
}
return notFound();
}
use of org.apache.helix.HelixDataAccessor in project helix by apache.
the class DeprecatedTaskRebalancer method cloneWorkflow.
/**
* Create a new workflow based on an existing one
* @param manager connection to Helix
* @param origWorkflowName the name of the existing workflow
* @param newWorkflowName the name of the new workflow
* @param newStartTime a provided start time that deviates from the desired start time
* @return the cloned workflow, or null if there was a problem cloning the existing one
*/
private Workflow cloneWorkflow(HelixManager manager, String origWorkflowName, String newWorkflowName, Date newStartTime) {
// Read all resources, including the workflow and jobs of interest
HelixDataAccessor accessor = manager.getHelixDataAccessor();
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
Map<String, HelixProperty> resourceConfigMap = accessor.getChildValuesMap(keyBuilder.resourceConfigs());
if (!resourceConfigMap.containsKey(origWorkflowName)) {
LOG.error("No such workflow named " + origWorkflowName);
return null;
}
if (resourceConfigMap.containsKey(newWorkflowName)) {
LOG.error("Workflow with name " + newWorkflowName + " already exists!");
return null;
}
// Create a new workflow with a new name
HelixProperty workflowConfig = resourceConfigMap.get(origWorkflowName);
Map<String, String> wfSimpleFields = workflowConfig.getRecord().getSimpleFields();
JobDag jobDag = JobDag.fromJson(wfSimpleFields.get(WorkflowConfig.WorkflowConfigProperty.Dag.name()));
Map<String, Set<String>> parentsToChildren = jobDag.getParentsToChildren();
Workflow.Builder builder = new Workflow.Builder(newWorkflowName);
// Set the workflow expiry
builder.setExpiry(Long.parseLong(wfSimpleFields.get(WorkflowConfig.WorkflowConfigProperty.Expiry.name())));
// Set the schedule, if applicable
ScheduleConfig scheduleConfig;
if (newStartTime != null) {
scheduleConfig = ScheduleConfig.oneTimeDelayedStart(newStartTime);
} else {
scheduleConfig = WorkflowConfig.parseScheduleFromConfigMap(wfSimpleFields);
}
if (scheduleConfig != null) {
builder.setScheduleConfig(scheduleConfig);
}
// Add each job back as long as the original exists
Set<String> namespacedJobs = jobDag.getAllNodes();
for (String namespacedJob : namespacedJobs) {
if (resourceConfigMap.containsKey(namespacedJob)) {
// Copy over job-level and task-level configs
String job = TaskUtil.getDenamespacedJobName(origWorkflowName, namespacedJob);
HelixProperty jobConfig = resourceConfigMap.get(namespacedJob);
Map<String, String> jobSimpleFields = jobConfig.getRecord().getSimpleFields();
// overwrite workflow name
jobSimpleFields.put(JobConfig.JobConfigProperty.WorkflowID.name(), newWorkflowName);
for (Map.Entry<String, String> e : jobSimpleFields.entrySet()) {
builder.addConfig(job, e.getKey(), e.getValue());
}
Map<String, Map<String, String>> rawTaskConfigMap = jobConfig.getRecord().getMapFields();
List<TaskConfig> taskConfigs = Lists.newLinkedList();
for (Map<String, String> rawTaskConfig : rawTaskConfigMap.values()) {
TaskConfig taskConfig = TaskConfig.Builder.from(rawTaskConfig);
taskConfigs.add(taskConfig);
}
builder.addTaskConfigs(job, taskConfigs);
// Add dag dependencies
Set<String> children = parentsToChildren.get(namespacedJob);
if (children != null) {
for (String namespacedChild : children) {
String child = TaskUtil.getDenamespacedJobName(origWorkflowName, namespacedChild);
builder.addParentChildDependency(job, child);
}
}
}
}
return builder.build();
}
Aggregations