use of net.sourceforge.processdash.util.RobustFileOutputStream in project processdash by dtuma.
the class DataRepository method saveDatafile.
/** Saves a set of data to the appropriate data file.
*
* @param datafile the datafile to save
*/
private void saveDatafile(DataFile datafile) {
if (datafile == null || datafile.file == null || datafile.isRemoved || saveDisabled)
return;
// this flag should stay false until we are absolutely certain
// that we have successfully saved the datafile.
boolean saveSuccessful = false;
// the same datafile concurrently.
synchronized (datafile) {
try {
// debug("saveDatafile");
Set valuesToSave = new TreeSet();
// if the data file has an include statement, lookup the associated
// default values defined by the included file.
Map defaultValues = datafile.inheritedDefinitions;
if (defaultValues == null)
defaultValues = Collections.EMPTY_MAP;
// optimistically mark the datafile as "clean" at the beginning of
// the save operation. This way, if the datafile is modified
// during the save operation, the dirty changes will take effect,
// and the datafile will be saved again in the future.
datafile.dirtyCount = 0;
int prefixLength = datafile.prefix.length() + 1;
for (Iterator i = getInternalKeys(); i.hasNext(); ) {
String name = (String) i.next();
DataElement element = (DataElement) data.get(name);
if (element == null || element.datafile != datafile || element.isDefaultValue())
// DataFile, or if it has a default value, skip it.
continue;
SaveableData value = element.getValue();
String valStr = null;
boolean editable = true;
if (value != null) {
valStr = value.saveString();
editable = value.isEditable();
} else if (element.isDefaultName()) {
// store the fact that the default is overwritten with null
valStr = "null";
}
if (valStr == null || valStr.length() == 0)
continue;
name = name.substring(prefixLength).replace('=', EQUALS_SIGN_REPL);
valuesToSave.add(name + (editable ? "=" : "==") + valStr);
}
// Write the saved values
RobustFileOutputStream rfos;
BufferedWriter out;
try {
rfos = new RobustFileOutputStream(datafile.file);
} catch (IOException e) {
logger.log(Level.SEVERE, "Encountered exception while opening " + datafile.file.getPath() + "; save aborted", e);
return;
}
try {
String encoding = getDatasetEncoding();
out = new BufferedWriter(new OutputStreamWriter(new NullByteWatcher(rfos), encoding));
// if the data file has an include statement, write it to the file
if (datafile.inheritsFrom != null) {
out.write(includeTag + datafile.inheritsFrom);
out.newLine();
}
// If the data file has a prefix, write it as a comment to the file
if (datafile.prefix != null && datafile.prefix.length() > 0) {
out.write("= Data for " + datafile.prefix);
out.newLine();
}
for (Iterator i = valuesToSave.iterator(); i.hasNext(); ) {
out.write((String) i.next());
out.newLine();
}
} catch (IOException e) {
logger.log(Level.SEVERE, "Encountered exception while writing to " + datafile.file.getPath() + "; save aborted", e);
try {
rfos.abort();
} catch (Exception ex) {
}
return;
}
try {
// Close output file
out.flush();
out.close();
saveSuccessful = true;
System.err.println("Saved " + datafile.file.getPath());
} catch (IOException e) {
logger.log(Level.SEVERE, "Encountered exception while closing " + datafile.file.getPath() + "; save aborted", e);
try {
rfos.abort();
} catch (Exception ex) {
}
}
} finally {
// if we couldn't successfully save the datafile, mark it as dirty.
if (!saveSuccessful)
datafile.dirtyCount = 1000;
}
}
}
use of net.sourceforge.processdash.util.RobustFileOutputStream in project processdash by dtuma.
the class WBSReplaceAction method maybeAddSnapshotToSrcZip.
/**
* Our WBS ZIP files contain historical snapshots that can be used as the
* parent in a 3-way merge operation. However, these snapshots are not
* created on every save. Since we just restored data from one WBS to
* another, we've created a new branch point when these two WBSes looked
* identical. Make sure our ZIP file contains a snapshot of what the
* data looked like at this branch point.
*/
private void maybeAddSnapshotToSrcZip(File srcZip, File replacementDir) throws IOException {
// get the unique ID of the change we just used in the replacement
ChangeHistory changeHistory = new ChangeHistory(replacementDir);
Entry lastChange = changeHistory.getLastEntry();
if (lastChange == null)
return;
// check to see if the source ZIP already included a snapshot
// corresponding to that change
String changeFileName = HISTORY_SUBDIR + "/" + lastChange.getUid() + ".zip";
File snapshotFile = new File(replacementDir, changeFileName);
if (snapshotFile.isFile())
return;
// Make a list of the files that should be included in the snapshot
List<String> newSnapshotFiles = FileUtils.listRecursively(replacementDir, DashboardBackupFactory.WBS_FILE_FILTER);
if (newSnapshotFiles == null || newSnapshotFiles.isEmpty())
return;
// If we renamed the settings.xml file, change the name back before
// we rebuild the ZIP file.
maybeRename(replacementDir, "X" + SETTINGS_FILENAME, SETTINGS_FILENAME);
// rebuild the source ZIP file and add a new snapshot
RobustFileOutputStream rOut = new RobustFileOutputStream(srcZip);
try {
ZipOutputStream zipOut = new ZipOutputStream(new BufferedOutputStream(rOut));
// add the existing file contents back to the ZIP
List<String> existingFiles = FileUtils.listRecursively(replacementDir, null);
addToZip(replacementDir, existingFiles, zipOut);
// create a new snapshot and add it to the ZIP
zipOut.putNextEntry(new ZipEntry(changeFileName));
ZipOutputStream historyZip = new ZipOutputStream(zipOut);
addToZip(replacementDir, newSnapshotFiles, historyZip);
historyZip.finish();
zipOut.closeEntry();
zipOut.finish();
zipOut.flush();
} catch (IOException ioe) {
rOut.abort();
throw ioe;
}
rOut.close();
}
use of net.sourceforge.processdash.util.RobustFileOutputStream in project processdash by dtuma.
the class TeamServerPointerFile method writeFile.
private void writeFile() throws IOException {
try {
Writer out = new OutputStreamWriter(new BufferedOutputStream(new RobustFileOutputStream(file, false)), "UTF-8");
out.write("<?xml version='1.0' encoding='UTF-8'?>\r\n\r\n");
out.write("<" + SERVER_DOCUMENT_TAG + ">\r\n");
for (ServerEntry se : serverEntries.values()) {
se.getAsXML(out);
}
out.write("</" + SERVER_DOCUMENT_TAG + ">\r\n");
out.close();
entriesAsOf = file.lastModified();
} catch (IOException ioe) {
IOException ioee = new IOException("Could not write to file " + file.getPath());
ioee.initCause(ioe);
throw ioee;
}
}
use of net.sourceforge.processdash.util.RobustFileOutputStream in project processdash by dtuma.
the class TeamProjectSetupWizard method closeOldProjectWbs.
private void closeOldProjectWbs(File oldProjectWbsDir) throws IOException {
// read the current settings from the user settings file
File f = new File(oldProjectWbsDir, "user-settings.ini");
Properties p = new Properties();
if (f.isFile()) {
InputStream in = new FileInputStream(f);
p.load(in);
in.close();
}
// add a "project closed" setting, and resave
p.put("projectClosed", "true");
RobustFileOutputStream out = new RobustFileOutputStream(f);
p.store(out, null);
out.close();
// add the "projectClosed" attr to the root project tag of projDump.xml
f = new File(oldProjectWbsDir, "projDump.xml");
if (f.isFile()) {
InputStream in = new BufferedInputStream(new FileInputStream(f));
out = new RobustFileOutputStream(f);
// copy bytes up through the opening of the initial "project" tag
copyBytesThrough(in, out, "<project", -1);
// the ">" char. If the latter, write the projectClosed attribute.
if (copyBytesThrough(in, out, "projectClosed=", '>') == false)
out.write(" projectClosed='true'>".getBytes("utf-8"));
// copy the rest of the file verbatim
FileUtils.copyFile(in, out);
in.close();
out.close();
}
}
use of net.sourceforge.processdash.util.RobustFileOutputStream in project processdash by dtuma.
the class TeamProjectSetupWizard method writeMergedUserDump.
private void writeMergedUserDump(File srcDir, File destDir) throws IOException {
File destFile = new File(destDir, "relaunchDump.xml");
OutputStream out = new BufferedOutputStream(new RobustFileOutputStream(destFile));
out.write(MERGED_DUMP_HEADER.getBytes("utf-8"));
for (File f : srcDir.listFiles()) {
if (f.getName().toLowerCase().endsWith("-data.pdash"))
copyUserDumpDataFromPdash(f, out);
}
out.write(MERGED_DUMP_FOOTER.getBytes("utf-8"));
out.close();
}
Aggregations