use of org.cerberus.util.answer.AnswerList in project cerberus-source by cerberustesting.
the class DataLibService method filterWithNatureRANDOMNEW.
@Override
public AnswerList<HashMap<String, String>> filterWithNatureRANDOMNEW(AnswerList<HashMap<String, String>> dataObjectList, TestCaseExecution tCExecution, TestCaseCountryProperties testCaseProperties, int outputRequestedDimention) {
AnswerList<HashMap<String, String>> result = new AnswerList();
// Temporary list in order to treat the input list
List<HashMap<String, String>> list;
List<HashMap<String, String>> resultObject;
resultObject = new ArrayList<HashMap<String, String>>();
int initNB = dataObjectList.getDataList().size();
// We get the list of values that were already used.
List<String> pastValues = this.testCaseExecutionDataService.getPastValuesOfProperty(tCExecution.getId(), testCaseProperties.getProperty(), tCExecution.getTest(), tCExecution.getTestCase(), tCExecution.getCountryEnvParam().getBuild(), tCExecution.getEnvironmentData(), tCExecution.getCountry());
int removedNB = 0;
// We save all rows that needs to be removed to listToremove.
List<Map<String, String>> listToremove = new ArrayList<Map<String, String>>();
list = dataObjectList.getDataList();
for (String valueToRemove : pastValues) {
for (Map<String, String> curentRow : list) {
if (curentRow.get("").equals(valueToRemove)) {
if (true) {
listToremove.add(curentRow);
removedNB++;
}
}
}
}
// We remove all listToremove entries from list.
list.removeAll(listToremove);
if (list != null && !list.isEmpty()) {
if (list.size() < outputRequestedDimention) {
// Still some results available but not enougth compared to what we requested.
result.setResultMessage(new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_RANDOMNEW_NOTENOUGTHRECORDS).resolveDescription("REMNB", Integer.toString(listToremove.size())).resolveDescription("TOTNB", Integer.toString(initNB)).resolveDescription("NBREQUEST", Integer.toString(outputRequestedDimention)));
} else {
// Get a random list.
List<Integer> listTempRandom = getRandomListOfInteger(dataObjectList.getDataList().size(), outputRequestedDimention);
String selectedList = "";
// Pick the result from list.
for (int i : listTempRandom) {
int j = i + 1;
selectedList += Integer.toString(j) + ",";
resultObject.add(dataObjectList.getDataList().get(i));
}
selectedList = StringUtil.removeLastChar(selectedList, 1);
result.setDataList(resultObject);
result.setResultMessage(new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS_GETFROMDATALIB_NATURERANDOMNEW).resolveDescription("TOTNB", Integer.toString(initNB)).resolveDescription("REMNB", Integer.toString(removedNB)).resolveDescription("POS", selectedList).resolveDescription("TOTALPOS", Integer.toString(list.size())));
}
} else {
// No more entries available.
result.setResultMessage(new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_RANDOMNEW_NOMORERECORD).resolveDescription("TOTNB", Integer.toString(initNB)));
}
return result;
}
use of org.cerberus.util.answer.AnswerList in project cerberus-source by cerberustesting.
the class DataLibService method getSubDataFromType.
/**
* Get the list of subData
*
* @param lib
* @return
*/
private AnswerItem<HashMap<String, String>> getSubDataFromType(TestDataLib lib) {
AnswerList answerData = new AnswerList();
AnswerItem<HashMap<String, String>> result = new AnswerItem();
MessageEvent msg = new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS);
List<TestDataLibData> objectDataList = new ArrayList<TestDataLibData>();
HashMap<String, String> row = new HashMap<String, String>();
switch(lib.getType()) {
case TestDataLib.TYPE_CSV:
answerData = testDataLibDataService.readByVarious(lib.getTestDataLibID(), null, null, "N");
if ((answerData.getResultMessage().getCode() == MessageEventEnum.DATA_OPERATION_OK.getCode()) && !answerData.getDataList().isEmpty()) {
objectDataList = answerData.getDataList();
boolean missingKey = true;
for (TestDataLibData tdld : objectDataList) {
row.put(tdld.getSubData(), tdld.getColumnPosition());
if (tdld.getSubData().equalsIgnoreCase("")) {
missingKey = false;
}
}
result.setItem(row);
if (missingKey) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_SUBDATACSVNOKEY);
result.setResultMessage(msg);
} else {
msg = new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS_GETFROMDATALIB_SUBDATA);
msg.setDescription(msg.getDescription().replace("%NBROW%", String.valueOf(answerData.getDataList().size())));
result.setResultMessage(msg);
}
} else if ((answerData.getResultMessage().getCode() == MessageEventEnum.DATA_OPERATION_OK.getCode()) && answerData.getDataList().isEmpty()) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_NOSUBDATACSV);
result.setResultMessage(msg);
} else {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_SUBDATACSV);
result.setResultMessage(msg);
}
break;
case TestDataLib.TYPE_SQL:
answerData = testDataLibDataService.readByVarious(lib.getTestDataLibID(), "N", null, null);
if ((answerData.getResultMessage().getCode() == MessageEventEnum.DATA_OPERATION_OK.getCode()) && !answerData.getDataList().isEmpty()) {
objectDataList = answerData.getDataList();
boolean missingKey = true;
for (TestDataLibData tdld : objectDataList) {
row.put(tdld.getSubData(), tdld.getColumn());
if (tdld.getSubData().equalsIgnoreCase("")) {
missingKey = false;
}
}
result.setItem(row);
if (missingKey) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_SUBDATASQLNOKEY);
result.setResultMessage(msg);
} else {
msg = new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS_GETFROMDATALIB_SUBDATA);
msg.setDescription(msg.getDescription().replace("%NBROW%", String.valueOf(answerData.getDataList().size())));
result.setResultMessage(msg);
}
} else if ((answerData.getResultMessage().getCode() == MessageEventEnum.DATA_OPERATION_OK.getCode()) && answerData.getDataList().isEmpty()) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_NOSUBDATASQL);
result.setResultMessage(msg);
} else {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_SUBDATASQL);
result.setResultMessage(msg);
}
break;
case TestDataLib.TYPE_SERVICE:
answerData = testDataLibDataService.readByVarious(lib.getTestDataLibID(), null, "N", null);
if ((answerData.getResultMessage().getCode() == MessageEventEnum.DATA_OPERATION_OK.getCode()) && !answerData.getDataList().isEmpty()) {
objectDataList = answerData.getDataList();
boolean missingKey = true;
for (TestDataLibData tdld : objectDataList) {
row.put(tdld.getSubData(), tdld.getParsingAnswer());
if (tdld.getSubData().equalsIgnoreCase("")) {
missingKey = false;
}
}
result.setItem(row);
if (missingKey) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_SUBDATASOAPNOKEY);
result.setResultMessage(msg);
} else {
msg = new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS_GETFROMDATALIB_SUBDATA);
msg.setDescription(msg.getDescription().replace("%NBROW%", String.valueOf(answerData.getDataList().size())));
result.setResultMessage(msg);
}
} else if ((answerData.getResultMessage().getCode() == MessageEventEnum.DATA_OPERATION_OK.getCode()) && answerData.getDataList().isEmpty()) {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_NOSUBDATASOAP);
result.setResultMessage(msg);
} else {
msg = new MessageEvent(MessageEventEnum.PROPERTY_FAILED_GETFROMDATALIB_SUBDATASOAP);
result.setResultMessage(msg);
}
break;
case TestDataLib.TYPE_INTERNAL:
// For static Type, there is no need to fetch the subdata as subdata are loaded at the same time of the data.
msg = new MessageEvent(MessageEventEnum.PROPERTY_SUCCESS_GETFROMDATALIB_SUBDATA);
result.setResultMessage(msg);
result.setItem(null);
break;
}
return result;
}
use of org.cerberus.util.answer.AnswerList in project cerberus-source by cerberustesting.
the class ExecutionThreadPoolService method executeNextInQueue.
/**
* {@inheritDoc}
*/
@Override
public void executeNextInQueue(boolean forceExecution) throws CerberusException {
// Job can be desactivated by parameter.
if (!(parameterService.getParameterBooleanByKey("cerberus_queueexecution_enable", "", true))) {
LOG.debug("Queue_Processing_Job disabled by parameter : 'cerberus_queueexecution_enable'.");
return;
}
// We first check that another thread of Cerberus already trigger the job. Only 1 instance of the job is necessary.
if (!(myVersionService.getMyVersionStringByKey("queueprocessingjobrunning", "N").equals("Y")) || forceExecution) {
if (forceExecution) {
LOG.debug("Forcing Start of Queue_Processing_Job.");
}
int nbqueuedexe = 0;
do {
if (!(parameterService.getParameterBooleanByKey("cerberus_queueexecution_enable", "", true))) {
LOG.debug("Queue_Processing_Job disabled by parameter : 'cerberus_queueexecution_enable'.");
return;
}
nbqueuedexe = 0;
// Job is not already running, we can trigger it.
LOG.debug("Starting Queue_Processing_Job.");
// Flag in database that job is already running.
myVersionService.UpdateMyVersionString("queueprocessingjobrunning", "Y");
myVersionService.UpdateMyVersionString("queueprocessingjobstart", String.valueOf(new Date()));
String cerberus_url = parameterService.getParameterStringByKey("cerberus_url", "", "");
// Getting all executions to be treated.
AnswerList answer = new AnswerList();
answer = tceiqService.readQueueToTreat();
List<TestCaseExecutionQueueToTreat> executionsInQueue = (List<TestCaseExecutionQueueToTreat>) answer.getDataList();
int poolSizeGeneral = parameterService.getParameterIntegerByKey("cerberus_queueexecution_global_threadpoolsize", "", 12);
int poolSizeRobot = parameterService.getParameterIntegerByKey("cerberus_queueexecution_defaultrobothost_threadpoolsize", "", 10);
int queueTimeout = parameterService.getParameterIntegerByKey("cerberus_queueexecution_timeout", "", 600000);
// Init constrain counter.
int const01_current = 0;
int const02_current = 0;
int const03_current = 0;
HashMap<String, Integer> constrains_current = new HashMap<String, Integer>();
constrains_current = getCurrentlyRunning();
// Getting RobotHost PoolSize
HashMap<String, Integer> robot_poolsize = new HashMap<String, Integer>();
robot_poolsize = invariantService.readToHashMapGp1IntegerByIdname("ROBOTHOST", poolSizeRobot);
// Analysing each execution in the database queue.
for (TestCaseExecutionQueueToTreat exe : executionsInQueue) {
// Robot PoolSize if retreived from hashmap.
int robot_poolsize_final = 0;
if (robot_poolsize.containsKey(exe.getRobotHost())) {
robot_poolsize_final = ParameterParserUtil.parseIntegerParam(robot_poolsize.get(exe.getRobotHost()), poolSizeRobot);
} else {
robot_poolsize_final = 0;
}
LOG.debug("Analysing Queue : " + exe.getId() + " poolGen " + poolSizeGeneral + " poolApp " + exe.getPoolSizeApplication() + " poolRobot " + robot_poolsize_final);
String const01_key = TestCaseExecutionQueueToTreat.CONSTRAIN1_GLOBAL;
String const02_key = TestCaseExecutionQueueToTreat.CONSTRAIN2_APPLICATION + CONST_SEPARATOR + exe.getSystem() + CONST_SEPARATOR + exe.getEnvironment() + CONST_SEPARATOR + exe.getCountry() + CONST_SEPARATOR + exe.getApplication();
String const03_key = TestCaseExecutionQueueToTreat.CONSTRAIN3_ROBOT + CONST_SEPARATOR + exe.getRobotHost();
// Eval Constrain 1
if (constrains_current.containsKey(const01_key)) {
const01_current = constrains_current.get(const01_key);
} else {
const01_current = 0;
}
// Eval Constrain 1
boolean constMatch01;
if (poolSizeGeneral == 0) {
// if poolsize == 0, this means no constrain specified.
constMatch01 = false;
} else {
constMatch01 = (const01_current >= poolSizeGeneral);
}
// Eval Constrain 2
if (constrains_current.containsKey(const02_key)) {
const02_current = constrains_current.get(const02_key);
} else {
const02_current = 0;
}
// Eval Constrain 2
boolean constMatch02;
if (exe.getPoolSizeApplication() == 0) {
// if poolsize == 0, this means no constrain specified.
constMatch02 = false;
} else {
constMatch02 = (const02_current >= exe.getPoolSizeApplication());
}
// Eval Constrain 3
if (constrains_current.containsKey(const03_key)) {
const03_current = constrains_current.get(const03_key);
} else {
const03_current = 0;
}
// Eval Constrain 3
boolean constMatch03;
if (robot_poolsize_final == 0) {
// if poolsize == 0, this means no constrain specified.
constMatch03 = false;
} else {
constMatch03 = (const03_current >= robot_poolsize_final);
}
String notTriggeredExeMessage = "";
boolean triggerExe = false;
if ((!constMatch01 && !constMatch02 && !constMatch03) || (!constMatch01 && exe.getManualExecution().equals("Y"))) {
// Adding execution to queue.
if (queueService.updateToWaiting(exe.getId())) {
try {
ExecutionQueueWorkerThread task = new ExecutionQueueWorkerThread();
task.setCerberusExecutionUrl(cerberus_url);
task.setQueueId(exe.getId());
task.setToExecuteTimeout(queueTimeout);
task.setQueueService(queueService);
task.setExecThreadPool(threadQueuePool);
Future<?> future = threadQueuePool.getExecutor().submit(task);
task.setFuture(future);
triggerExe = true;
nbqueuedexe++;
// Debug messages.
LOG.debug("result : " + triggerExe + " Const1 " + constMatch01 + " Const2 " + constMatch01 + " Const3 " + constMatch01 + " Manual " + exe.getManualExecution());
LOG.debug(" CurConst1 " + const01_current + " CurConst2 " + const02_current + " CurConst3 " + const03_current);
// Counter increase
constrains_current.put(const01_key, const01_current + 1);
if (!exe.getManualExecution().equals("Y")) {
// Specific increment only if automatic execution.
constrains_current.put(const02_key, const02_current + 1);
constrains_current.put(const03_key, const03_current + 1);
}
} catch (Exception e) {
LOG.error("Failed to add Queueid : " + exe.getId() + " into the queue : " + e.getMessage());
}
}
} else {
if (constMatch03) {
notTriggeredExeMessage = "Robot contrain on '" + const03_key + "' reached. " + robot_poolsize_final + " Execution(s) already in pool.";
}
if (constMatch02) {
notTriggeredExeMessage = "Application Environment contrain on '" + const02_key + "' reached . " + exe.getPoolSizeApplication() + " Execution(s) already in pool.";
}
if (constMatch01) {
notTriggeredExeMessage = "Global contrain reached. " + poolSizeGeneral + " Execution(s) already in pool.";
}
if ((exe.getDebugFlag() != null) && (exe.getDebugFlag().equalsIgnoreCase("Y"))) {
queueService.updateComment(exe.getId(), notTriggeredExeMessage);
}
LOG.debug("result : " + triggerExe + " Const1 " + constMatch01 + " Const2 " + constMatch01 + " Const3 " + constMatch01 + " Manual " + exe.getManualExecution());
LOG.debug(" CurConst1 " + const01_current + " CurConst2 " + const02_current + " CurConst3 " + const03_current);
LOG.debug(" " + notTriggeredExeMessage);
}
}
// Flag in database that job is finished.
myVersionService.UpdateMyVersionString("queueprocessingjobrunning", "N");
LOG.debug("Stoping Queue_Processing_Job - TOTAL Released execution(s) : " + nbqueuedexe);
} while (nbqueuedexe > 0);
} else {
LOG.debug("Queue_Processing_Job not triggered (already running.)");
}
}
use of org.cerberus.util.answer.AnswerList in project cerberus-source by cerberustesting.
the class ExecutionThreadPoolService method getCurrentlyToTreat.
@Override
public HashMap<String, Integer> getCurrentlyToTreat() throws CerberusException {
AnswerList answer = new AnswerList();
HashMap<String, Integer> constrains_current = new HashMap<String, Integer>();
// Getting all executions to be treated.
answer = tceiqService.readQueueToTreat();
List<TestCaseExecutionQueueToTreat> executionsToTreat = (List<TestCaseExecutionQueueToTreat>) answer.getDataList();
// Calculate constrain values.
for (TestCaseExecutionQueueToTreat exe : executionsToTreat) {
String const01_key = TestCaseExecutionQueueToTreat.CONSTRAIN1_GLOBAL;
String const02_key = TestCaseExecutionQueueToTreat.CONSTRAIN2_APPLICATION + CONST_SEPARATOR + exe.getSystem() + CONST_SEPARATOR + exe.getEnvironment() + CONST_SEPARATOR + exe.getCountry() + CONST_SEPARATOR + exe.getApplication();
String const03_key = TestCaseExecutionQueueToTreat.CONSTRAIN3_ROBOT + CONST_SEPARATOR + exe.getRobotHost();
if (constrains_current.containsKey(const01_key)) {
constrains_current.put(const01_key, constrains_current.get(const01_key) + 1);
} else {
constrains_current.put(const01_key, 1);
}
if (constrains_current.containsKey(const02_key)) {
constrains_current.put(const02_key, constrains_current.get(const02_key) + 1);
} else {
constrains_current.put(const02_key, 1);
}
if (constrains_current.containsKey(const03_key)) {
constrains_current.put(const03_key, constrains_current.get(const03_key) + 1);
} else {
constrains_current.put(const03_key, 1);
}
}
return constrains_current;
}
use of org.cerberus.util.answer.AnswerList in project cerberus-source by cerberustesting.
the class GetReportData method processRequest.
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, CerberusException, ParseException, JSONException {
response.setContentType("text/html;charset=UTF-8");
ApplicationContext appContext = WebApplicationContextUtils.getWebApplicationContext(this.getServletContext());
testCaseExecutionService = appContext.getBean(ITestCaseExecutionService.class);
testCaseExecutionInQueueService = appContext.getBean(ITestCaseExecutionQueueService.class);
response.setContentType("application/json");
response.setCharacterEncoding("utf8");
JSONObject jsonResult = new JSONObject();
String tag = request.getParameter("Tag");
boolean split = ParameterParserUtil.parseBooleanParam(request.getParameter("split"), false);
/**
* Get list of execution by tag, env, country, browser
*/
AnswerList<TestCaseExecution> listOfExecution = testCaseExecutionService.readByTagByCriteria(tag, 0, 0, null, null, null);
List<TestCaseExecution> testCaseExecutions = listOfExecution.getDataList();
/**
* Get list of Execution in Queue by Tag
*/
List<TestCaseExecutionQueue> testCaseExecutionsInQueue = testCaseExecutionInQueueService.findTestCaseExecutionInQueuebyTag(tag);
/**
* Feed hash map with execution from the two list (to get only one by
* test,testcase,country,env,browser)
*/
testCaseExecutions = hashExecution(testCaseExecutions, testCaseExecutionsInQueue);
/**
* Geting the global start and end of the execution tag.
*/
long startMin = 0;
long endMax = 0;
for (TestCaseExecution testCaseExecution : testCaseExecutions) {
if ((startMin == 0) || (testCaseExecution.getStart() < startMin)) {
startMin = testCaseExecution.getStart();
}
if ((endMax == 0) || (testCaseExecution.getEnd() > endMax)) {
endMax = testCaseExecution.getEnd();
}
}
if (!split) {
Map<String, JSONObject> axisMap = new HashMap<String, JSONObject>();
for (TestCaseExecution testCaseWithExecution : testCaseExecutions) {
String key;
String controlStatus;
JSONObject control = new JSONObject();
JSONObject function = new JSONObject();
if (testCaseWithExecution.getTestCaseObj().getFunction() != null && !"".equals(testCaseWithExecution.getTestCaseObj().getFunction())) {
key = testCaseWithExecution.getTestCaseObj().getFunction();
} else {
key = testCaseWithExecution.getTest();
}
controlStatus = testCaseWithExecution.getControlStatus();
control.put("value", 1);
control.put("color", getColor(controlStatus));
control.put("label", controlStatus);
function.put("name", key);
if (axisMap.containsKey(key)) {
function = axisMap.get(key);
if (function.has(controlStatus)) {
int prec = function.getJSONObject(controlStatus).getInt("value");
control.put("value", prec + 1);
}
}
function.put(controlStatus, control);
axisMap.put(key, function);
}
jsonResult.put("axis", axisMap.values());
jsonResult.put("tag", tag);
jsonResult.put("start", new Date(startMin));
jsonResult.put("end", new Date(endMax));
} else if (split) {
boolean env = ParameterParserUtil.parseBooleanParam(request.getParameter("env"), false);
boolean country = ParameterParserUtil.parseBooleanParam(request.getParameter("country"), false);
boolean browser = ParameterParserUtil.parseBooleanParam(request.getParameter("browser"), false);
boolean app = ParameterParserUtil.parseBooleanParam(request.getParameter("app"), false);
AnswerList columnExec = testCaseExecutionService.readDistinctColumnByTag(tag, env, country, browser, app);
List<TestCaseExecution> columnTcExec = columnExec.getDataList();
AnswerList columnQueue = testCaseExecutionInQueueService.readDistinctColumnByTag(tag, env, country, browser, app);
List<TestCaseExecutionQueue> columnInQueue = columnQueue.getDataList();
Map<String, TestCaseExecution> testCaseExecutionsList = new LinkedHashMap();
for (TestCaseExecution column : columnTcExec) {
String key = column.getBrowser() + column.getCountry() + column.getEnvironment() + column.getApplication();
testCaseExecutionsList.put(key, column);
}
for (TestCaseExecutionQueue column : columnInQueue) {
TestCaseExecution testCaseExecution = testCaseExecutionInQueueService.convertToTestCaseExecution(column);
String key = testCaseExecution.getBrowser() + testCaseExecution.getCountry() + testCaseExecution.getEnvironment() + testCaseExecution.getApplicationObj().getApplication();
testCaseExecutionsList.put(key, testCaseExecution);
}
List<TestCaseExecution> res = new ArrayList<TestCaseExecution>(testCaseExecutionsList.values());
HashMap<String, SummaryStatisticsDTO> statMap = new HashMap<String, SummaryStatisticsDTO>();
for (TestCaseExecution column : res) {
SummaryStatisticsDTO stat = new SummaryStatisticsDTO();
stat.setEnvironment(column.getEnvironment());
stat.setCountry(column.getCountry());
stat.setRobotDecli(column.getBrowser());
stat.setApplication(column.getApplication());
statMap.put(column.getEnvironment() + "_" + column.getCountry() + "_" + column.getBrowser() + "_" + column.getApplication(), stat);
}
jsonResult.put("contentTable", getStatByEnvCountryBrowser(testCaseExecutions, statMap, env, country, browser, app));
}
response.getWriter().print(jsonResult);
}
Aggregations