Search in sources :

Example 21 with DataRepository

use of net.sourceforge.processdash.data.repository.DataRepository in project processdash by dtuma.

the class MigrationToolTeam method convert.

public void convert() throws Exception {
    checkConversionPreconditions();
    File dataFile = getRootDataFile();
    makeExtraBackup();
    System.out.println("Starting conversion of team project '" + projectPath + "'");
    updateSettingsXmlFile();
    convertHierarchy();
    DataRepository data = ctx.getData();
    data.closeDatafile(projectPath);
    convertDatafileContents(dataFile);
    data.openDatafile(projectPath, dataFile.getPath());
    System.out.println("Finished converting team project '" + projectPath + "'");
}
Also used : DataRepository(net.sourceforge.processdash.data.repository.DataRepository) File(java.io.File)

Example 22 with DataRepository

use of net.sourceforge.processdash.data.repository.DataRepository in project processdash by dtuma.

the class WorkflowScriptSource method getScripts.

public List<ScriptID> getScripts(String path) {
    // Get the list of workflow URL specs for the enclosing team project.
    DataRepository data = context.getData();
    StringBuffer projectPathBuf = new StringBuffer(path);
    ListData urlSpecList = ListData.asListData(data.getInheritableValue(projectPathBuf, dataName));
    // this team project doesn't have any associated workflow URLs.
    if (urlSpecList == null || urlSpecList.test() == false)
        return null;
    // construct a list of path segments we should examine.  The first
    // segment is the path to the team project itself. Then we include
    // the name of each nested component or subtask within the project
    // on the path to the currently active task.
    String projectPath = projectPathBuf.toString();
    List<String> pathSegments = new ArrayList<String>();
    pathSegments.add(projectPath);
    if (path.length() > projectPath.length() + 1) {
        String relSubpath = path.substring(projectPath.length() + 1);
        pathSegments.addAll(Arrays.asList(relSubpath.split("/")));
    }
    // find the list of workflow scripts that are associated with the
    // currently active task and its ancestors.
    LinkedHashSet result = collectScripts(data, urlSpecList, pathSegments);
    if (result.isEmpty())
        return null;
    // for efficiency purposes, we built the list in backwards order.
    // reverse it so the URLs appear in the order the user wrote them.
    ArrayList<ScriptID> listResult = new ArrayList<ScriptID>(result);
    Collections.reverse(listResult);
    return listResult;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) ArrayList(java.util.ArrayList) ScriptID(net.sourceforge.processdash.process.ScriptID) DataRepository(net.sourceforge.processdash.data.repository.DataRepository) ListData(net.sourceforge.processdash.data.ListData)

Example 23 with DataRepository

use of net.sourceforge.processdash.data.repository.DataRepository in project processdash by dtuma.

the class SizeInventoryForm method findAndAppendObjectNumbers.

protected boolean findAndAppendObjectNumbers(StringBuffer url, String fullPath, boolean addExtraRows) {
    boolean addedNumber = false;
    DataRepository data = getDataRepository();
    int rowNum, lastPopulatedRow, i;
    rowNum = lastPopulatedRow = -1;
    ROW: while (true) {
        rowNum++;
        for (i = dataElems.length; i-- > 0; ) {
            String dataName = StringUtils.findAndReplace(dataElems[i], "NUM", String.valueOf(rowNum));
            dataName = DataRepository.createDataName(fullPath, dataName);
            if (data.getValue(dataName) != null) {
                lastPopulatedRow = rowNum;
                url.append("&n=").append(String.valueOf(rowNum));
                addedNumber = true;
                if (highlightTimestamp > 0) {
                    String rowPrefix = DataRepository.chopPath(dataName);
                    if (hasValidHighlight(rowPrefix))
                        url.append("&highlight_").append(String.valueOf(rowNum));
                }
                continue ROW;
            }
        }
        // we can safely conclude that there is no more data.
        if (rowNum - lastPopulatedRow > 20)
            break ROW;
    }
    if (addExtraRows) {
        addedNumber = true;
        for (i = NUM_EXTRA_ROWS; i-- > 0; ) url.append("&n=").append(String.valueOf(++lastPopulatedRow));
    }
    return addedNumber;
}
Also used : DataRepository(net.sourceforge.processdash.data.repository.DataRepository)

Example 24 with DataRepository

use of net.sourceforge.processdash.data.repository.DataRepository in project processdash by dtuma.

the class SizeEstimatingTemplate method hasValue.

/** @return true if the data element named by prefix/name is nonnull. */
protected boolean hasValue(String name) {
    String prefix = (String) env.get("PATH_TRANSLATED");
    DataRepository data = getDataRepository();
    String dataName = DataRepository.createDataName(prefix, name);
    SimpleData value = data.getSimpleValue(dataName);
    return (value != null && value.test());
}
Also used : DataRepository(net.sourceforge.processdash.data.repository.DataRepository) SimpleData(net.sourceforge.processdash.data.SimpleData)

Example 25 with DataRepository

use of net.sourceforge.processdash.data.repository.DataRepository in project processdash by dtuma.

the class DataExtractionScaffold method init.

public void init() throws Exception {
    DashController.setDataDirectory(dataDirectory);
    String dataDirPath = dataDirectory.getAbsolutePath() + System.getProperty("file.separator");
    // load and initialize settings
    String settingsFilename = dataDirPath + InternalSettings.getSettingsFilename();
    InternalSettings.initialize(settingsFilename);
    InternalSettings.setReadOnly(true);
    InternalSettings.set(SCAFFOLD_MODE_SETTING, "true");
    InternalSettings.set("templates.disableSearchPath", "true");
    InternalSettings.set("export.disableAutoExport", "true");
    InternalSettings.set("slowNetwork", "true");
    for (Map.Entry<String, String> e : extraSettings.entrySet()) {
        InternalSettings.set(e.getKey(), e.getValue());
    }
    extraSettings = null;
    // reset the template loader search path
    TemplateLoader.resetTemplateURLs();
    // setup the defect analyzer
    DefectAnalyzer.setDataDirectory(dataDirPath);
    // possibly initialize external resource mappings
    if (useExternalResourceMappingFile)
        ExternalResourceManager.getInstance().initializeMappings(dataDirectory, ExternalResourceManager.INITIALIZATION_MODE_ARCHIVE);
    // create the data repository.
    data = new DataRepository();
    DashHierarchy templates = TemplateLoader.loadTemplates(data);
    data.setDatafileSearchURLs(TemplateLoader.getTemplateURLs());
    // open and load the the user's work breakdown structure
    hierarchy = new DashHierarchy(null);
    String hierFilename = dataDirPath + Settings.getFile("stateFile");
    hierarchy.loadXML(hierFilename, templates);
    data.setNodeComparator(hierarchy);
    // create the time log
    timeLog = new WorkingTimeLog(dataDirectory);
    DashboardTimeLog.setDefault(timeLog);
    // open all the datafiles that were specified in the properties file.
    data.startInconsistency();
    openDataFiles(dataDirPath, PropertyKey.ROOT);
    data.openDatafile("", dataDirPath + "global.dat");
    // import data files
    DataImporter.setDynamic(false);
    ImportManager.init(data);
    data.finishInconsistency();
    // configure the task dependency resolver
    EVTaskDependencyResolver.init(this);
    EVTaskDependencyResolver.getInstance().setDynamic(false);
    if (createWebServer) {
        DashboardURLStreamHandlerFactory.disable();
        try {
            webServer = new WebServer();
            webServer.setDashboardContext(this);
            webServer.setData(data);
            webServer.setProps(hierarchy);
            webServer.setRoots(TemplateLoader.getTemplateURLs());
            WebServer.setOutputCharset(getWebCharset());
        } catch (IOException ioe) {
        }
    }
}
Also used : WebServer(net.sourceforge.processdash.net.http.WebServer) DashHierarchy(net.sourceforge.processdash.hier.DashHierarchy) DataRepository(net.sourceforge.processdash.data.repository.DataRepository) IOException(java.io.IOException) HashMap(java.util.HashMap) Map(java.util.Map) WorkingTimeLog(net.sourceforge.processdash.log.time.WorkingTimeLog)

Aggregations

DataRepository (net.sourceforge.processdash.data.repository.DataRepository)37 SimpleData (net.sourceforge.processdash.data.SimpleData)11 DashHierarchy (net.sourceforge.processdash.hier.DashHierarchy)4 IOException (java.io.IOException)3 ListData (net.sourceforge.processdash.data.ListData)3 File (java.io.File)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 StringData (net.sourceforge.processdash.data.StringData)2 PropertyKey (net.sourceforge.processdash.hier.PropertyKey)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 InputStream (java.io.InputStream)1 StringReader (java.io.StringReader)1 StringWriter (java.io.StringWriter)1 URL (java.net.URL)1 URLConnection (java.net.URLConnection)1 Collection (java.util.Collection)1 HashSet (java.util.HashSet)1 Iterator (java.util.Iterator)1