use of net.sourceforge.processdash.data.ListData in project processdash by dtuma.
the class HierarchySynchronizer method saveWorkflowUrlData.
private void saveWorkflowUrlData() {
workflowXml = openWorkflowXml();
// save changes to the WBS (omitting URL data from both dump files).
if (workflowXml != null) {
workflowURLsSupported = true;
// workflow ID data is scrubbed starting in version 4.2.0, and is
// the best source of reliable mapping information. In earlier
// versions, we map URLs based on workflow step names instead.
boolean preferWorkflowIDs = DashPackage.compareVersions(dumpFileVersion, "4.2.0") >= 0;
ListData result = new ListData();
collectWorkflowUrls(result, workflowXml, null, null, preferWorkflowIDs);
String dataName = processID + " /" + TeamDataConstants.PROJECT_WORKFLOW_URLS_DATA_NAME;
forceData(projectPath, dataName, result);
collectWorkflowIDs(workflowXml);
}
}
use of net.sourceforge.processdash.data.ListData in project processdash by dtuma.
the class ProbeItemSizeHistogram method buildData.
@Override
protected void buildData() {
int[] histogram = new int[SIZE_NAMES.size()];
DataContext data = getDataContext();
ListData partsAdditions = ListData.asListData(data.getSimpleValue(PARTS_LIST));
if (partsAdditions != null) {
for (int i = 0; i < partsAdditions.size(); i++) {
String path = asString(partsAdditions.get(i));
double itemCount = asDoubleData(data.getSimpleValue(path + METHODS));
String relSize = asString(data.getSimpleValue(path + REL_SIZE));
if (!StringUtils.hasValue(relSize) && itemCount == 0)
continue;
int relSizePos = SIZE_NAMES.indexOf(relSize);
if (relSizePos == -1)
relSizePos = UNCATEGORIZED_POS;
if (itemCount == 0)
itemCount = 1;
histogram[relSizePos]++;
}
}
// if no items were present in the "[BLANK]" category, don't show it.
int len = histogram.length;
if (histogram[len - 1] == 0)
len--;
ResultSet result = new ResultSet(len, 1);
result.setColName(0, "");
result.setColName(1, "Total # Items");
for (int i = 0; i < len; i++) {
result.setRowName(i + 1, SIZE_NAMES.get(i));
result.setData(i + 1, 1, new DoubleData(histogram[i]));
}
this.data = result;
}
use of net.sourceforge.processdash.data.ListData in project processdash by dtuma.
the class McfSizeMetricApiHandler method getSizeUnits.
/**
* Retrieve the size units from the input params.
*
* @param request
* the request we are processing
* @return the size metric
*/
private StringData getSizeUnits(SizeMetricApiRequestData request) {
String paramVal = (String) request.params.get("sizeUnits");
if (//
!StringUtils.hasValue(paramVal) || //
LOC.equalsIgnoreCase(paramVal) || NC_LOC.equalsIgnoreCase(paramVal))
return StringData.create(NC_LOC);
if (DLD_LINES.equalsIgnoreCase(paramVal))
return StringData.create(DLD_LINES);
String processID = getStringData(request, "Process_ID");
ListData allowedUnits = ListData.asListData(getData(//
request, "/" + processID + "/Custom_Size_Metric_List"));
if (allowedUnits == null)
throw badRequest(UNSUPPORTED_TARGET_PATH, "Cannot store size ", "data; could not identify the process for task '", request.targetPath, "'");
for (int i = 0; i < allowedUnits.size(); i++) {
String oneUnit = allowedUnits.get(i).toString();
if (paramVal.equalsIgnoreCase(oneUnit))
return StringData.create(oneUnit);
}
throw badRequest(BAD_PARAM, "The sizeUnits parameter '", paramVal, "' does not name a valid size metric in the '", processID, "' process.").putAttr("param", "sizeUnits");
}
use of net.sourceforge.processdash.data.ListData in project processdash by dtuma.
the class OpenDocument method findFile.
/** Find a file in the document list.
* @param name the name of the file to find
* @return the XML element corresponding to the named document.
*/
protected Element findFile(String name) throws IOException {
// Look for an inheritable value for the FILE_XML element in the
// data repository.
DataRepository data = getDataRepository();
String pfx = getPrefix();
if (pfx == null)
pfx = "/";
StringBuffer prefix = new StringBuffer(pfx);
ListData list;
Element result = null;
SaveableData val;
for (val = data.getInheritableValue(prefix, FILE_XML_DATANAME); val != null; val = data.getInheritableValue(chop(prefix), FILE_XML_DATANAME)) {
if (val != null && !(val instanceof SimpleData))
val = val.getSimpleValue();
if (val instanceof StringData)
list = ((StringData) val).asList();
else if (val instanceof ListData)
list = (ListData) val;
else
list = null;
if (list != null)
for (int i = 0; i < list.size(); i++) {
String url = (String) list.get(i);
Document docList = getDocumentTree(url);
if (docList != null) {
result = (new FileFinder(name, docList)).file;
if (result != null)
return result;
}
}
if (prefix.length() == 0)
break;
}
return null;
}
use of net.sourceforge.processdash.data.ListData in project processdash by dtuma.
the class RollupDatasetSelectElem method writeContents.
protected void writeContents() throws IOException {
DataRepository data = getDataRepository();
if (data == null)
return;
init(data);
// get the [Use_Rollup] data element for the current
// project. If it is null, return immediately.
String prefix = getPrefix();
if (prefix == null)
return;
String useRollupName = DataRepository.createDataName(prefix, "Use_Rollup");
ListData rollupIDs = getList(data, useRollupName);
if (rollupIDs == null)
return;
String tableStart = TABLE_START, tableEnd = "", tableRow;
for (int i = 0; i < rollupIDs.size(); i++) {
tableRow = getFragment(data, rollupIDs.get(i).toString());
if (tableRow != null && tableRow.length() > 0) {
out.print(tableStart);
out.print(tableRow);
tableStart = "";
tableEnd = TABLE_END;
}
}
out.print(tableEnd);
}
Aggregations