use of net.sourceforge.processdash.api.PDashQuery in project processdash by dtuma.
the class TimeLogReport method queryDatabaseForTimeLogEntries.
private List queryDatabaseForTimeLogEntries(UserFilter privacyFilter) {
// retrieve a mapping of dataset IDs, if we need it for testing privacy
PDashQuery query = getPdash().getQuery();
Map<Object, String> datasetIDs = null;
if (!UserGroup.isEveryone(privacyFilter))
datasetIDs = QueryUtils.mapColumns(query.query(DATASET_ID_QUERY));
// retrieve the ID of the process whose phases we should map to
String processID = getParameter("processID");
if (processID == null)
processID = new ProcessUtil(getDataContext()).getProcessID();
// retrieve the raw data for the time log entries themselves
List<Object[]> rawData = query.query(TIME_LOG_HQL, processID);
rawData.addAll(query.query(TIME_LOG_UNCAT_HQL));
// build a list of time log entries
List<TimeLogEntry> result = new ArrayList<TimeLogEntry>();
for (Object[] row : rawData) {
String path = row[0] + "/" + row[1];
if (path.startsWith("//"))
path = path.substring(1);
Date start = (Date) row[2];
long delta = ((Number) row[3]).longValue();
long interrupt = ((Number) row[4]).longValue();
String comment = (String) row[5];
// if a privacy filter is in effect, see if it excludes this entry
if (datasetIDs != null) {
String datasetID = datasetIDs.get(row[6]);
if (!privacyFilter.getDatasetIDs().contains(datasetID)) {
someEntriesBlocked = true;
continue;
}
}
// create a time log entry and add it to the list
result.add(new //
TimeLogEntryVO(//
0, //
path, //
start, //
delta, interrupt, comment));
}
return result;
}
use of net.sourceforge.processdash.api.PDashQuery in project processdash by dtuma.
the class CumulativeDefectChartSnippet method buildData.
private void buildData() throws IOException {
// query data from the database
String hql = getTinyWebServer().getRequestAsString("/dash/snippets/cumDefects.hql");
PDashQuery query = getPdash().getQuery();
List<Object[]> defectCounts = query.query(hql);
// Calc the number of rows we expect in our result set: one per day
int numRows;
if (defectCounts.isEmpty()) {
numRows = 0;
} else if (defectCounts.size() == 1) {
numRows = 2;
} else {
Date start = (Date) defectCounts.get(0)[0];
Date end = (Date) defectCounts.get(defectCounts.size() - 1)[0];
long len = end.getTime() - start.getTime() + DAY_DELTA;
numRows = (int) (len / DateUtils.DAYS) + 1;
}
// create a result set to hold the data
ResultSet data = new ResultSet(numRows, 3);
data.setColName(0, resources.getString("Date"));
data.setColName(1, resources.getString("Date"));
data.setColName(2, resources.getString("Count"));
data.setColName(3, resources.getString("Cumulative"));
// load defect data into the result set
int row = 0;
int cum = 0;
Calendar lastDate = Calendar.getInstance();
for (int i = 0; i < defectCounts.size(); i++) {
Object[] oneCount = defectCounts.get(i);
Date d = (Date) oneCount[0];
int num = ((Number) oneCount[1]).intValue();
if (i == 0) {
// add a "zero" point preceding the first row
lastDate.setTime(d);
lastDate.add(Calendar.DATE, -1);
addRow(data, ++row, lastDate.getTime(), 0, 0);
} else {
// add extra rows for days when no defects were removed
long datePadCutoff = d.getTime() - DAY_DELTA;
while (datePadCutoff > lastDate.getTimeInMillis()) {
lastDate.add(Calendar.DATE, 1);
addRow(data, ++row, lastDate.getTime(), 0, cum);
}
}
// now add a row for the current data point
cum += num;
lastDate.setTime(d);
addRow(data, ++row, d, num, cum);
}
// store the result set into the repository
ListData l = new ListData();
l.add(data);
getDataContext().putValue(DATA_NAME, l);
}
use of net.sourceforge.processdash.api.PDashQuery in project processdash by dtuma.
the class EstErrorScatterChartSnippet method buildData.
private void buildData(Resources res) throws IOException {
// query data from the database
String[] hql = getTinyWebServer().getRequestAsString("/dash/snippets/estErrorScatter.hql").split(";\\s*");
PDashQuery query = getPdash().getQuery();
List enactmentKeys = query.query(hql[0], getProjectKeys(), CURRENT);
List<Object[]> taskStatus = query.query(hql[1], enactmentKeys, CURRENT);
List<Object[]> sizeData = query.query(hql[2], enactmentKeys, CURRENT);
// create a result set to hold the data
ResultSet data = new ResultSet(taskStatus.size(), 7);
data.setColName(0, res.getString("Component"));
data.setColName(1, res.getString("Size_Units"));
data.setColName(2, res.getString("Plan_Size"));
data.setColName(3, res.getString("Actual_Size"));
data.setColName(4, res.getString("Size_Est_Error"));
data.setColName(5, res.getString("Plan_Time"));
data.setColName(6, res.getString("Actual_Time"));
data.setColName(7, res.getString("Time_Est_Error"));
data.setFormat(4, "100%");
data.setFormat(7, "100%");
// load time data into the result set
for (int i = 0; i < taskStatus.size(); i++) {
Object[] oneTask = taskStatus.get(i);
int row = i + 1;
data.setRowName(row, (String) oneTask[1]);
double planTime = ((Number) oneTask[2]).doubleValue();
double actualTime = ((Number) oneTask[3]).doubleValue();
data.setData(row, 5, StringData.create(formatTime(planTime)));
data.setData(row, 6, StringData.create(formatTime(actualTime)));
if (planTime > 0) {
double timeErr = (actualTime - planTime) / planTime;
data.setData(row, 7, new DoubleData(timeErr));
}
}
// load size data into the result set
for (Object[] oneSize : sizeData) {
int row = getRow(taskStatus, oneSize[0]);
if (row != -1) {
String units = (String) oneSize[1];
StringData currentUnits = (StringData) data.getData(row, 1);
if (currentUnits == null)
data.setData(row, 1, StringData.create(units));
else if (!units.equals(currentUnits.format()))
continue;
int col = "Plan".equals(oneSize[2]) ? 2 : 3;
double size = ((Number) oneSize[3]).doubleValue();
data.setData(row, col, new DoubleData(size));
}
}
// go back and calculate size estimating errors
boolean requireSize = parameters.containsKey("RequireSize");
for (int i = data.numRows(); i > 0; i--) {
DoubleData plan = (DoubleData) data.getData(i, 2);
DoubleData actual = (DoubleData) data.getData(i, 3);
if (hasValue(plan) && hasValue(actual)) {
double sizeError = (actual.getDouble() - plan.getDouble()) / plan.getDouble();
data.setData(i, 4, new DoubleData(sizeError));
} else if (requireSize) {
data.removeRow(i);
}
}
// store the result set into the repository
ListData l = new ListData();
l.add(data);
getDataContext().putValue(DATA_NAME, l);
}
use of net.sourceforge.processdash.api.PDashQuery in project processdash by dtuma.
the class RecentReviews method loadReviewData.
private void loadReviewData(HttpServletRequest req) throws IOException {
// retrieve metadata about the team process
PDashContext pdash = (PDashContext) req.getAttribute(PDashContext.REQUEST_ATTR);
PDashData data = pdash.getData();
String processID = data.getString(TeamDataConstants.PROCESS_ID);
List<String> reviewPhases = data.getList("/" + processID + "/Review_Phase_List");
// ensure the user has permission to view recent reviews
PDashQuery query = pdash.getQuery();
PersonFilter privacyFilter = new PersonFilter(PERMISSION, query);
if (privacyFilter.isBlock()) {
req.setAttribute("blocked", Boolean.TRUE);
return;
}
// query the database for data about recently completed reviews
String[] hql = getHql(req);
List<Object[]> taskData = query.query(hql[0], processID, reviewPhases);
List<Integer> planItemKeys = QueryUtils.pluckColumn(taskData, 0);
List<Object[]> defectCounts = query.query(hql[1], planItemKeys);
// build objects to hold the resulting data
List<ReviewRow> reviews = new ArrayList<RecentReviews.ReviewRow>();
for (Object[] oneRow : taskData) {
if (privacyFilter.include(oneRow[8]))
reviews.add(new ReviewRow(oneRow));
}
for (Object[] oneRow : defectCounts) storeDefectCounts(reviews, oneRow);
req.setAttribute("reviews", reviews);
// flag older reviews if necessary
if (!reviews.isEmpty()) {
Date newestDate = reviews.get(reviews.size() - 1).completionDate;
long cutoff = newestDate.getTime() - 2 * DateUtils.WEEKS;
boolean oneHidden = false;
for (ReviewRow review : reviews) {
if (review.setCutoff(cutoff))
oneHidden = true;
}
req.setAttribute("hasHiddenRows", oneHidden);
}
}
Aggregations