Search in sources :

Example 66 with ZNRecord

use of org.apache.helix.ZNRecord in project helix by apache.

the class TestJobQueuesResource method test.

@Test
public void test() throws Exception {
    String className = TestHelper.getTestClassName();
    String methodName = TestHelper.getTestMethodName();
    String clusterName = className + "_" + methodName;
    final int n = 5;
    final int p = 20;
    final int r = 3;
    System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
    _gSetupTool.addCluster(clusterName, true);
    for (int i = 0; i < n; i++) {
        String instanceName = "localhost_" + (12918 + i);
        _gSetupTool.addInstanceToCluster(clusterName, instanceName);
    }
    // Set up target db
    _gSetupTool.addResourceToCluster(clusterName, WorkflowGenerator.DEFAULT_TGT_DB, p, "MasterSlave");
    _gSetupTool.rebalanceStorageCluster(clusterName, WorkflowGenerator.DEFAULT_TGT_DB, r);
    Map<String, TaskFactory> taskFactoryReg = new HashMap<String, TaskFactory>();
    taskFactoryReg.put("DummyTask", new TaskFactory() {

        @Override
        public Task createNewTask(TaskCallbackContext context) {
            return new MockTask(context);
        }
    });
    // Start dummy participants
    MockParticipantManager[] participants = new MockParticipantManager[n];
    for (int i = 0; i < n; i++) {
        String instanceName = "localhost_" + (12918 + i);
        participants[i] = new MockParticipantManager(ZK_ADDR, clusterName, instanceName);
        // Register a Task state model factory.
        StateMachineEngine stateMachine = participants[i].getStateMachineEngine();
        stateMachine.registerStateModelFactory("Task", new TaskStateModelFactory(participants[i], taskFactoryReg));
        participants[i].syncStart();
    }
    // start controller
    String controllerName = "controller";
    ClusterControllerManager controller = new ClusterControllerManager(ZK_ADDR, clusterName, controllerName);
    controller.syncStart();
    boolean result = ClusterStateVerifier.verifyByZkCallback(new ClusterStateVerifier.BestPossAndExtViewZkVerifier(ZK_ADDR, clusterName));
    Assert.assertTrue(result);
    // Start a queue
    String queueName = "myQueue1";
    LOG.info("Starting job-queue: " + queueName);
    String jobQueueYamlConfig = "name: " + queueName;
    String resourceUrl = "http://localhost:" + ADMIN_PORT + "/clusters/" + clusterName + "/jobQueues";
    ZNRecord postRet = AdminTestHelper.post(_gClient, resourceUrl, jobQueueYamlConfig);
    LOG.info("Started job-queue: " + queueName + ", ret: " + postRet);
    LOG.info("Getting all job-queues");
    ZNRecord getRet = AdminTestHelper.get(_gClient, resourceUrl);
    LOG.info("Got job-queues: " + getRet);
    // Enqueue job
    resourceUrl = "http://localhost:" + ADMIN_PORT + "/clusters/" + clusterName + "/jobQueues/" + queueName;
    WorkflowBean wfBean = new WorkflowBean();
    wfBean.name = queueName;
    JobBean jBean1 = new JobBean();
    jBean1.name = "myJob1";
    jBean1.command = "DummyTask";
    jBean1.targetResource = WorkflowGenerator.DEFAULT_TGT_DB;
    jBean1.targetPartitionStates = Lists.newArrayList("MASTER");
    JobBean jBean2 = new JobBean();
    jBean2.name = "myJob2";
    jBean2.command = "DummyTask";
    jBean2.targetResource = WorkflowGenerator.DEFAULT_TGT_DB;
    jBean2.targetPartitionStates = Lists.newArrayList("SLAVE");
    wfBean.jobs = Lists.newArrayList(jBean1, jBean2);
    String jobYamlConfig = new Yaml().dump(wfBean);
    LOG.info("Enqueuing jobs: " + jobQueueYamlConfig);
    Map<String, String> paraMap = new HashMap<String, String>();
    paraMap.put(JsonParameters.MANAGEMENT_COMMAND, TaskDriver.DriverCommand.start.toString());
    String postBody = String.format("%s=%s&%s=%s", JsonParameters.JSON_PARAMETERS, ClusterRepresentationUtil.ObjectToJson(paraMap), ResourceUtil.YamlParamKey.NEW_JOB.toString(), jobYamlConfig);
    postRet = AdminTestHelper.post(_gClient, resourceUrl, postBody);
    LOG.info("Enqueued job, ret: " + postRet);
    // Get job
    resourceUrl = "http://localhost:" + ADMIN_PORT + "/clusters/" + clusterName + "/jobQueues/" + queueName + "/" + jBean1.name;
    getRet = AdminTestHelper.get(_gClient, resourceUrl);
    LOG.info("Got job: " + getRet);
    // Stop job queue
    resourceUrl = "http://localhost:" + ADMIN_PORT + "/clusters/" + clusterName + "/jobQueues/" + queueName;
    paraMap.put(JsonParameters.MANAGEMENT_COMMAND, TaskDriver.DriverCommand.stop.toString());
    postBody = String.format("%s=%s", JsonParameters.JSON_PARAMETERS, ClusterRepresentationUtil.ObjectToJson(paraMap));
    postRet = AdminTestHelper.post(_gClient, resourceUrl, postBody);
    LOG.info("Stopped job-queue, ret: " + postRet);
    // Delete a job
    resourceUrl = "http://localhost:" + ADMIN_PORT + "/clusters/" + clusterName + "/jobQueues/" + queueName + "/" + jBean2.name;
    AdminTestHelper.delete(_gClient, resourceUrl);
    LOG.info("Delete a job: ");
    // Resume job queue
    resourceUrl = "http://localhost:" + ADMIN_PORT + "/clusters/" + clusterName + "/jobQueues/" + queueName;
    paraMap.put(JsonParameters.MANAGEMENT_COMMAND, TaskDriver.DriverCommand.resume.toString());
    postBody = String.format("%s=%s", JsonParameters.JSON_PARAMETERS, ClusterRepresentationUtil.ObjectToJson(paraMap));
    postRet = AdminTestHelper.post(_gClient, resourceUrl, postBody);
    LOG.info("Resumed job-queue, ret: " + postRet);
    // Flush job queue
    paraMap.put(JsonParameters.MANAGEMENT_COMMAND, "flush");
    postBody = JsonParameters.JSON_PARAMETERS + "=" + ClusterRepresentationUtil.ObjectToJson(paraMap);
    postRet = AdminTestHelper.post(_gClient, resourceUrl, postBody);
    LOG.info("Flushed job-queue, ret: " + postRet);
    // clean up
    controller.syncStop();
    for (int i = 0; i < n; i++) {
        if (participants[i] != null && participants[i].isConnected()) {
            participants[i].syncStop();
        }
    }
    System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
Also used : Task(org.apache.helix.task.Task) MockTask(org.apache.helix.integration.task.MockTask) StateMachineEngine(org.apache.helix.participant.StateMachineEngine) MockParticipantManager(org.apache.helix.integration.manager.MockParticipantManager) HashMap(java.util.HashMap) MockTask(org.apache.helix.integration.task.MockTask) WorkflowBean(org.apache.helix.task.beans.WorkflowBean) ClusterStateVerifier(org.apache.helix.tools.ClusterStateVerifier) TaskCallbackContext(org.apache.helix.task.TaskCallbackContext) Date(java.util.Date) Yaml(org.yaml.snakeyaml.Yaml) ClusterControllerManager(org.apache.helix.integration.manager.ClusterControllerManager) JobBean(org.apache.helix.task.beans.JobBean) TaskFactory(org.apache.helix.task.TaskFactory) TaskStateModelFactory(org.apache.helix.task.TaskStateModelFactory) ZNRecord(org.apache.helix.ZNRecord) Test(org.testng.annotations.Test)

Example 67 with ZNRecord

use of org.apache.helix.ZNRecord in project helix by apache.

the class InstancesResource method getInstancesRepresentation.

StringRepresentation getInstancesRepresentation(String clusterName) throws JsonGenerationException, JsonMappingException, IOException {
    ZkClient zkClient = (ZkClient) getContext().getAttributes().get(RestAdminApplication.ZKCLIENT);
    HelixDataAccessor accessor = ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
    Map<String, LiveInstance> liveInstancesMap = accessor.getChildValuesMap(accessor.keyBuilder().liveInstances());
    Map<String, InstanceConfig> instanceConfigsMap = accessor.getChildValuesMap(accessor.keyBuilder().instanceConfigs());
    Map<String, List<String>> tagInstanceLists = new TreeMap<String, List<String>>();
    for (String instanceName : instanceConfigsMap.keySet()) {
        boolean isAlive = liveInstancesMap.containsKey(instanceName);
        instanceConfigsMap.get(instanceName).getRecord().setSimpleField("Alive", isAlive + "");
        InstanceConfig config = instanceConfigsMap.get(instanceName);
        for (String tag : config.getTags()) {
            if (!tagInstanceLists.containsKey(tag)) {
                tagInstanceLists.put(tag, new LinkedList<String>());
            }
            if (!tagInstanceLists.get(tag).contains(instanceName)) {
                tagInstanceLists.get(tag).add(instanceName);
            }
        }
    }
    // Wrap raw data into an object, then serialize it
    List<ZNRecord> recordList = Lists.newArrayList();
    for (InstanceConfig instanceConfig : instanceConfigsMap.values()) {
        recordList.add(instanceConfig.getRecord());
    }
    ListInstancesWrapper wrapper = new ListInstancesWrapper();
    wrapper.instanceInfo = recordList;
    wrapper.tagInfo = tagInstanceLists;
    StringRepresentation representation = new StringRepresentation(ClusterRepresentationUtil.ObjectToJson(wrapper), MediaType.APPLICATION_JSON);
    return representation;
}
Also used : ZkClient(org.apache.helix.manager.zk.ZkClient) TreeMap(java.util.TreeMap) HelixDataAccessor(org.apache.helix.HelixDataAccessor) LiveInstance(org.apache.helix.model.LiveInstance) InstanceConfig(org.apache.helix.model.InstanceConfig) StringRepresentation(org.restlet.representation.StringRepresentation) List(java.util.List) LinkedList(java.util.LinkedList) ZNRecord(org.apache.helix.ZNRecord)

Example 68 with ZNRecord

use of org.apache.helix.ZNRecord in project helix by apache.

the class TestClusterAccessor method testDeleteConfigFields.

@Test(dependsOnMethods = "testUpdateConfigFields")
public void testDeleteConfigFields() throws IOException {
    System.out.println("Start test :" + TestHelper.getTestMethodName());
    String cluster = _clusters.iterator().next();
    ClusterConfig config = getClusterConfigFromRest(cluster);
    ZNRecord record = config.getRecord();
    String simpleKey = record.getSimpleFields().keySet().iterator().next();
    String value = record.getSimpleField(simpleKey);
    record.getSimpleFields().clear();
    record.setSimpleField(simpleKey, value);
    String listKey = record.getListFields().keySet().iterator().next();
    List<String> list = record.getListField(listKey);
    record.getListFields().clear();
    record.setListField(listKey, list);
    String mapKey = record.getMapFields().keySet().iterator().next();
    Map<String, String> map = record.getMapField(mapKey);
    record.getMapFields().clear();
    record.setMapField(mapKey, map);
    ClusterConfig prevConfig = getClusterConfigFromRest(cluster);
    updateClusterConfigFromRest(cluster, config, Command.delete);
    ClusterConfig newConfig = getClusterConfigFromRest(cluster);
    Assert.assertFalse(newConfig.getRecord().getSimpleFields().containsKey(simpleKey), "Failed to delete key " + simpleKey + " from cluster config");
    Assert.assertFalse(newConfig.getRecord().getListFields().containsKey(listKey), "Failed to delete key " + listKey + " from cluster config");
    Assert.assertFalse(newConfig.getRecord().getSimpleFields().containsKey(mapKey), "Failed to delete key " + mapKey + " from cluster config");
    prevConfig.getRecord().subtract(config.getRecord());
    Assert.assertEquals(newConfig, prevConfig, "cluster config from response: " + newConfig + " vs cluster config actually: " + prevConfig);
}
Also used : ZNRecord(org.apache.helix.ZNRecord) ClusterConfig(org.apache.helix.model.ClusterConfig) Test(org.testng.annotations.Test)

Example 69 with ZNRecord

use of org.apache.helix.ZNRecord in project helix by apache.

the class TestInstanceAccessor method updateInstanceConfig.

@Test(dependsOnMethods = "updateInstance")
public void updateInstanceConfig() throws IOException {
    System.out.println("Start test :" + TestHelper.getTestMethodName());
    String instanceName = CLUSTER_NAME + "localhost_12918";
    InstanceConfig instanceConfig = _configAccessor.getInstanceConfig(CLUSTER_NAME, instanceName);
    ZNRecord record = instanceConfig.getRecord();
    record.getSimpleFields().put("TestSimple", "value");
    record.getMapFields().put("TestMap", ImmutableMap.of("key", "value"));
    record.getListFields().put("TestList", Arrays.asList("e1", "e2", "e3"));
    Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(record), MediaType.APPLICATION_JSON_TYPE);
    put("clusters/" + CLUSTER_NAME + "/instances/" + instanceName + "/configs", null, entity, Response.Status.OK.getStatusCode());
    Assert.assertEquals(record.getSimpleFields(), _configAccessor.getInstanceConfig(CLUSTER_NAME, instanceName).getRecord().getSimpleFields());
    Assert.assertEquals(record.getListFields(), _configAccessor.getInstanceConfig(CLUSTER_NAME, instanceName).getRecord().getListFields());
    Assert.assertEquals(record.getMapFields(), _configAccessor.getInstanceConfig(CLUSTER_NAME, instanceName).getRecord().getMapFields());
}
Also used : Entity(javax.ws.rs.client.Entity) InstanceConfig(org.apache.helix.model.InstanceConfig) ZNRecord(org.apache.helix.ZNRecord) Test(org.testng.annotations.Test)

Example 70 with ZNRecord

use of org.apache.helix.ZNRecord in project helix by apache.

the class TestHelixAdminCli method testDropAddResource.

@Test
public void testDropAddResource() throws Exception {
    String className = TestHelper.getTestClassName();
    String methodName = TestHelper.getTestMethodName();
    String clusterName = className + "_" + methodName;
    String grandClusterName = clusterName + "_grand";
    final int n = 6;
    System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
    MockParticipantManager[] participants = new MockParticipantManager[n];
    ClusterDistributedController[] controllers = new ClusterDistributedController[2];
    setupCluster(clusterName, grandClusterName, n, participants, controllers);
    String command = "-zkSvr " + ZK_ADDR + " -activateCluster " + clusterName + " " + grandClusterName + " true";
    ClusterSetup.processCommandLineArgs(command.split("\\s+"));
    Thread.sleep(500);
    // save ideal state
    BaseDataAccessor<ZNRecord> baseAccessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
    HelixDataAccessor accessor = new ZKHelixDataAccessor(clusterName, baseAccessor);
    IdealState idealState = accessor.getProperty(accessor.keyBuilder().idealStates("db_11"));
    ZNRecordJsonSerializer serializer = new ZNRecordJsonSerializer();
    String tmpDir = System.getProperty("java.io.tmpdir");
    if (tmpDir == null) {
        tmpDir = "/tmp";
    }
    final String tmpIdealStateFile = tmpDir + "/" + clusterName + "_idealState.log";
    FileWriter fos = new FileWriter(tmpIdealStateFile);
    PrintWriter pw = new PrintWriter(fos);
    pw.write(new String(serializer.serialize(idealState.getRecord())));
    pw.close();
    command = "-zkSvr " + ZK_ADDR + " -dropResource " + clusterName + " db_11 ";
    ClusterSetup.processCommandLineArgs(command.split("\\s+"));
    boolean verifyResult = ClusterStateVerifier.verifyByZkCallback(new BestPossAndExtViewZkVerifier(ZK_ADDR, clusterName));
    Assert.assertTrue(verifyResult);
    command = "-zkSvr " + ZK_ADDR + " -addIdealState " + clusterName + " db_11 " + tmpIdealStateFile;
    ClusterSetup.processCommandLineArgs(command.split("\\s+"));
    verifyResult = ClusterStateVerifier.verifyByZkCallback(new BestPossAndExtViewZkVerifier(ZK_ADDR, clusterName));
    Assert.assertTrue(verifyResult);
    IdealState idealState2 = accessor.getProperty(accessor.keyBuilder().idealStates("db_11"));
    Assert.assertTrue(idealState2.getRecord().equals(idealState.getRecord()));
    // clean up
    for (int i = 0; i < controllers.length; i++) {
        controllers[i].syncStop();
    }
    for (int i = 0; i < participants.length; i++) {
        participants[i].syncStop();
    }
    System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
Also used : ZkBaseDataAccessor(org.apache.helix.manager.zk.ZkBaseDataAccessor) MockParticipantManager(org.apache.helix.integration.manager.MockParticipantManager) ZNRecordJsonSerializer(org.apache.helix.store.ZNRecordJsonSerializer) FileWriter(java.io.FileWriter) BestPossAndExtViewZkVerifier(org.apache.helix.tools.ClusterStateVerifier.BestPossAndExtViewZkVerifier) Date(java.util.Date) IdealState(org.apache.helix.model.IdealState) ClusterDistributedController(org.apache.helix.integration.manager.ClusterDistributedController) ZKHelixDataAccessor(org.apache.helix.manager.zk.ZKHelixDataAccessor) HelixDataAccessor(org.apache.helix.HelixDataAccessor) ZNRecord(org.apache.helix.ZNRecord) ZKHelixDataAccessor(org.apache.helix.manager.zk.ZKHelixDataAccessor) PrintWriter(java.io.PrintWriter) Test(org.testng.annotations.Test)

Aggregations

ZNRecord (org.apache.helix.ZNRecord)448 Test (org.testng.annotations.Test)186 ArrayList (java.util.ArrayList)117 Date (java.util.Date)111 HelixDataAccessor (org.apache.helix.HelixDataAccessor)91 ZKHelixDataAccessor (org.apache.helix.manager.zk.ZKHelixDataAccessor)78 Builder (org.apache.helix.PropertyKey.Builder)75 HashMap (java.util.HashMap)72 IdealState (org.apache.helix.model.IdealState)69 PropertyKey (org.apache.helix.PropertyKey)61 HelixException (org.apache.helix.HelixException)47 Map (java.util.Map)41 MockParticipantManager (org.apache.helix.integration.manager.MockParticipantManager)40 ZkBaseDataAccessor (org.apache.helix.manager.zk.ZkBaseDataAccessor)40 ClusterControllerManager (org.apache.helix.integration.manager.ClusterControllerManager)33 PropertyPathBuilder (org.apache.helix.PropertyPathBuilder)30 List (java.util.List)29 ZkClient (org.apache.helix.manager.zk.ZkClient)29 HelixAdmin (org.apache.helix.HelixAdmin)28 LiveInstance (org.apache.helix.model.LiveInstance)28