use of net.sourceforge.processdash.data.repository.InvalidDatafileFormat in project processdash by dtuma.
the class ProcessDashboard method registerEnvironmentalData.
private void registerEnvironmentalData() {
try {
Map env = new HashMap();
String os = InternalSettings.getOSPrefix();
env.put("OS.TYPE", new ImmutableStringData(os, false, true));
env.put("OS.IS_" + os.toUpperCase(), ImmutableDoubleData.TRUE);
if (!os.equals("windows"))
env.put("OS.IS_UNIX", ImmutableDoubleData.TRUE);
data.mountPhantomData("//Env", env);
} catch (InvalidDatafileFormat e) {
logger.log(Level.WARNING, "Unexpected error", e);
}
}
use of net.sourceforge.processdash.data.repository.InvalidDatafileFormat in project processdash by dtuma.
the class TextMetricsFileImporter method doImport.
public void doImport() throws IOException {
InputStream inputStream = new FileInputStream(file);
try {
BufferedReader in = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
Map defns = new HashMap();
Map<String, String> taskLists = new HashMap();
String line, name, value;
int commaPos;
while ((line = in.readLine()) != null) {
if (line.startsWith("!") || line.startsWith("<!--"))
break;
commaPos = line.indexOf(',');
if (commaPos == -1) {
in.close();
// this isn't a valid dump file.
return;
}
name = line.substring(1, commaPos);
name = EscapeString.unescape(name, '\\', ",", "c");
value = line.substring(commaPos + 1);
// import/export loop.
if (name.indexOf(ExportManager.EXPORT_DATANAME) != -1)
continue;
// special handling for earned value schedules
if (name.endsWith(XML_DATA_NAME_SUFFIX)) {
taskLists.put(name, value);
continue;
}
// (1) won't import "To Date" data, and
if (name.endsWith(" To Date"))
continue;
// (2) won't import data values that are zero or invalid.
if (value.equals("0.0") || value.equals("NaN") || value.equals("Infinity"))
continue;
defns.put(name, parseValue(value));
}
ImportedDefectManager.closeDefects(prefix);
ImportedTimeLogManager.getInstance().closeTimeLogs(prefix);
ImportedEVManager.getInstance().closeTaskLists(prefix);
while (line != null && !line.startsWith("<!--")) line = in.readLine();
if (line != null) {
DefectImporterXMLv1 defImp = new DefectImporterXMLv1();
defImp.importDefectsFromStream(in, prefix);
}
for (Entry<String, String> e : taskLists.entrySet()) {
importEvTaskList(e.getKey(), e.getValue());
}
// Protect this data from being viewed via external http requests.
defns.put("_Password_", ImmutableDoubleData.READ_ONLY_ZERO);
try {
data.mountImportedData(prefix, defns);
} catch (InvalidDatafileFormat idf) {
}
} finally {
inputStream.close();
}
}
use of net.sourceforge.processdash.data.repository.InvalidDatafileFormat in project processdash by dtuma.
the class ExportManager method storeCapabilityData.
private static void storeCapabilityData(DataRepository data) {
try {
Map capabilities = Collections.singletonMap("Supports_pdash_format", ImmutableDoubleData.TRUE);
data.mountPhantomData("//Export_Manager", capabilities);
} catch (InvalidDatafileFormat e) {
logger.log(Level.WARNING, "Unexpected error", e);
}
}
use of net.sourceforge.processdash.data.repository.InvalidDatafileFormat in project processdash by dtuma.
the class ArchiveMetricsFileImporter method readAndProcessArchive.
private void readAndProcessArchive() throws IOException, XmlPullParserException {
shouldDeleteArchiveFileOnCompletion = false;
zipFile = new ZipFile(file);
XmlPullParserFactory factory = XmlPullParserFactory.newInstance();
XmlPullParser parser = factory.newPullParser();
InputStream manifestIn = openEntry(zipFile, MANIFEST_FILE_NAME);
parser.setInput(manifestIn, ENCODING);
parser.nextTag();
parser.require(XmlPullParser.START_TAG, null, ARCHIVE_ELEM);
if (!FILE_TYPE_ARCHIVE.equals(parser.getAttributeValue(null, TYPE_ATTR)))
error("expected archive of type " + FILE_TYPE_ARCHIVE);
parser.nextTag();
parser.require(XmlPullParser.START_TAG, null, EXPORTED_TAG);
owner = parser.getAttributeValue(null, OWNER_ATTR);
exportTimestamp = XMLUtils.parseDate(parser.getAttributeValue(null, WHEN_ATTR));
ImportedDefectManager.closeDefects(prefix);
ImportedTimeLogManager.getInstance().closeTimeLogs(prefix);
ImportedEVManager.getInstance().closeTaskLists(prefix);
defns = new HashMap();
Map<String, String> packageIDs = new HashMap<String, String>();
while (parser.next() != XmlPullParser.END_DOCUMENT) {
if (parser.getEventType() == XmlPullParser.START_TAG) {
if (PACKAGE_ELEM.equals(parser.getName())) {
String id = parser.getAttributeValue(null, PACKAGE_ID_ATTR);
String version = parser.getAttributeValue(null, VERSION_ATTR);
packageIDs.put(id, version);
} else if (FROM_DATASET_TAG.equals(parser.getName())) {
srcDatasetID = parser.getAttributeValue(null, FROM_DATASET_ID_ATTR);
} else if (FILE_ELEM.equals(parser.getName())) {
String name = parser.getAttributeValue(null, FILE_NAME_ATTR);
String type = parser.getAttributeValue(null, TYPE_ATTR);
String version = parser.getAttributeValue(null, VERSION_ATTR);
readFile(zipFile, name, type, version);
}
}
}
// make a note of the individual who exported this data
recordKnownPerson();
// Protect this data from being viewed via external http requests.
defns.put("_Password_", ImmutableDoubleData.READ_ONLY_ZERO);
// Save metadata about the import
addImportMetadata(packageIDs);
try {
data.mountImportedData(prefix, defns);
} catch (InvalidDatafileFormat idf) {
}
}
Aggregations