use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class ManualPickerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.finest("Starting manual peak picker, RT: " + rtRange + ", m/z: " + mzRange);
// Calculate total number of scans to process
for (RawDataFile dataFile : dataFiles) {
int[] scanNumbers = dataFile.getScanNumbers(1, rtRange);
totalScans += scanNumbers.length;
}
// Find peak in each data file
for (RawDataFile dataFile : dataFiles) {
ManualPeak newPeak = new ManualPeak(dataFile);
boolean dataPointFound = false;
int[] scanNumbers = dataFile.getScanNumbers(1, rtRange);
for (int scanNumber : scanNumbers) {
if (isCanceled())
return;
// Get next scan
Scan scan = dataFile.getScan(scanNumber);
// Find most intense m/z peak
DataPoint basePeak = ScanUtils.findBasePeak(scan, mzRange);
if (basePeak != null) {
if (basePeak.getIntensity() > 0)
dataPointFound = true;
newPeak.addDatapoint(scan.getScanNumber(), basePeak);
} else {
final double mzCenter = (mzRange.lowerEndpoint() + mzRange.upperEndpoint()) / 2.0;
DataPoint fakeDataPoint = new SimpleDataPoint(mzCenter, 0);
newPeak.addDatapoint(scan.getScanNumber(), fakeDataPoint);
}
processedScans++;
}
if (dataPointFound) {
newPeak.finalizePeak();
if (newPeak.getArea() > 0)
peakListRow.addPeak(dataFile, newPeak);
} else {
peakListRow.removePeak(dataFile);
}
}
// Notify the GUI that peaklist contents have changed
if (peakList != null) {
// Check if the feature list row has been added to the feature list, and
// if it has not, add it
List<PeakListRow> rows = Arrays.asList(peakList.getRows());
if (!rows.contains(peakListRow)) {
peakList.addRow(peakListRow);
}
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(peakList);
project.notifyObjectChanged(peakList, true);
}
if (table != null) {
((AbstractTableModel) table.getModel()).fireTableDataChanged();
}
logger.finest("Finished manual peak picker, " + processedScans + " scans processed");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class ADAP3AlignerTask method findPeakListRow.
/**
* Find the existing {@link PeakListRow} for a given feature list ID and row ID.
*
* @param peakListID number of a feature list in the array of {@link PeakList}. The numeration starts with 0.
* @param rowID integer that is returned by method getId() of {@link PeakListRow}.
* @return an instance of {@link PeakListRow} if an existing row is found. Otherwise it returns null.
*/
@Nullable
private PeakListRow findPeakListRow(final int peakListID, final int rowID) {
// Find feature list
PeakList peakList = findPeakList(peakListID);
if (peakList == null)
return null;
// Find row
PeakListRow row = null;
for (final PeakListRow r : peakList.getRows()) if (rowID == r.getID()) {
row = r;
break;
}
return row;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class PeakListSaveHandler method savePeakList.
/**
* Create an XML document with the feature list information an save it into the project zip file
*
* @param peakList
* @param peakListSavedName name of the feature list
* @throws java.io.IOException
*/
public void savePeakList(PeakList peakList) throws IOException, TransformerConfigurationException, SAXException {
numberOfRows = peakList.getNumberOfRows();
finishedRows = 0;
StreamResult streamResult = new StreamResult(finalStream);
SAXTransformerFactory tf = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
TransformerHandler hd = tf.newTransformerHandler();
Transformer serializer = hd.getTransformer();
serializer.setOutputProperty(OutputKeys.INDENT, "yes");
serializer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
hd.setResult(streamResult);
hd.startDocument();
AttributesImpl atts = new AttributesImpl();
hd.startElement("", "", PeakListElementName.PEAKLIST.getElementName(), atts);
atts.clear();
// <NAME>
hd.startElement("", "", PeakListElementName.PEAKLIST_NAME.getElementName(), atts);
hd.characters(peakList.getName().toCharArray(), 0, peakList.getName().length());
hd.endElement("", "", PeakListElementName.PEAKLIST_NAME.getElementName());
// <PEAKLIST_DATE>
String dateText = "";
if (((SimplePeakList) peakList).getDateCreated() == null) {
dateText = ((SimplePeakList) peakList).getDateCreated();
} else {
Date date = new Date();
dateText = dateFormat.format(date);
}
hd.startElement("", "", PeakListElementName.PEAKLIST_DATE.getElementName(), atts);
hd.characters(dateText.toCharArray(), 0, dateText.length());
hd.endElement("", "", PeakListElementName.PEAKLIST_DATE.getElementName());
// <QUANTITY>
hd.startElement("", "", PeakListElementName.QUANTITY.getElementName(), atts);
hd.characters(String.valueOf(numberOfRows).toCharArray(), 0, String.valueOf(numberOfRows).length());
hd.endElement("", "", PeakListElementName.QUANTITY.getElementName());
// <PROCESS>
PeakListAppliedMethod[] processes = peakList.getAppliedMethods();
for (PeakListAppliedMethod proc : processes) {
hd.startElement("", "", PeakListElementName.METHOD.getElementName(), atts);
hd.startElement("", "", PeakListElementName.METHOD_NAME.getElementName(), atts);
String methodName = proc.getDescription();
hd.characters(methodName.toCharArray(), 0, methodName.length());
hd.endElement("", "", PeakListElementName.METHOD_NAME.getElementName());
hd.startElement("", "", PeakListElementName.METHOD_PARAMETERS.getElementName(), atts);
String methodParameters = proc.getParameters();
hd.characters(methodParameters.toCharArray(), 0, methodParameters.length());
hd.endElement("", "", PeakListElementName.METHOD_PARAMETERS.getElementName());
hd.endElement("", "", PeakListElementName.METHOD.getElementName());
}
atts.clear();
// <RAWFILE>
RawDataFile[] dataFiles = peakList.getRawDataFiles();
for (int i = 0; i < dataFiles.length; i++) {
String ID = dataFilesIDMap.get(dataFiles[i]);
hd.startElement("", "", PeakListElementName.RAWFILE.getElementName(), atts);
char[] idChars = ID.toCharArray();
hd.characters(idChars, 0, idChars.length);
hd.endElement("", "", PeakListElementName.RAWFILE.getElementName());
}
// <ROW>
PeakListRow row;
for (int i = 0; i < numberOfRows; i++) {
if (canceled)
return;
atts.clear();
row = peakList.getRow(i);
atts.addAttribute("", "", PeakListElementName.ID.getElementName(), "CDATA", String.valueOf(row.getID()));
if (row.getComment() != null) {
atts.addAttribute("", "", PeakListElementName.COMMENT.getElementName(), "CDATA", row.getComment());
}
hd.startElement("", "", PeakListElementName.ROW.getElementName(), atts);
fillRowElement(row, hd);
hd.endElement("", "", PeakListElementName.ROW.getElementName());
finishedRows++;
}
hd.endElement("", "", PeakListElementName.PEAKLIST.getElementName());
hd.endDocument();
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class ShapeModelerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
Class<?> shapeModelClass = shapeModelerType.getModelClass();
Constructor<?> shapeModelConstruct;
shapeModelConstruct = shapeModelClass.getConstructors()[0];
// Get data file information
RawDataFile dataFile = originalPeakList.getRawDataFile(0);
// Create new feature list
newPeakList = new SimplePeakList(originalPeakList + " " + suffix, dataFile);
totalRows = originalPeakList.getNumberOfRows();
int[] scanNumbers;
double[] retentionTimes, intensities;
SimplePeakListRow newRow;
for (PeakListRow row : originalPeakList.getRows()) {
if (isCanceled())
return;
newRow = new SimplePeakListRow(newPeakID);
try {
for (Feature peak : row.getPeaks()) {
// Load the intensities into array
dataFile = peak.getDataFile();
scanNumbers = peak.getScanNumbers();
retentionTimes = new double[scanNumbers.length];
for (int i = 0; i < scanNumbers.length; i++) retentionTimes[i] = dataFile.getScan(scanNumbers[i]).getRetentionTime();
intensities = new double[scanNumbers.length];
for (int i = 0; i < scanNumbers.length; i++) {
DataPoint dp = peak.getDataPoint(scanNumbers[i]);
if (dp != null)
intensities[i] = dp.getIntensity();
else
intensities[i] = 0;
}
Feature shapePeak = (Feature) shapeModelConstruct.newInstance(peak, scanNumbers, intensities, retentionTimes, resolution);
newRow.addPeak(shapePeak.getDataFile(), shapePeak);
}
} catch (Exception e) {
String message = "Error trying to make an instance of shape model class " + shapeModelClass;
MZmineCore.getDesktop().displayErrorMessage(MZmineCore.getDesktop().getMainWindow(), message);
logger.severe(message);
return;
}
newPeakList.addRow(newRow);
newPeakID++;
processedRows++;
}
// Add new peaklist to the project
project.addPeakList(newPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(newPeakList);
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
newPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peaks shaped by " + shapeModelerType + " function", parameters));
logger.finest("Finished peak shape modeler " + processedRows + " rows processed");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class PeakExtenderTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running peak extender on " + peakList);
// We assume source peakList contains one datafile
RawDataFile dataFile = peakList.getRawDataFile(0);
// Create a new deisotoped peakList
extendedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Sort peaks by descending height
Feature[] sortedPeaks = peakList.getPeaks(dataFile);
Arrays.sort(sortedPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
// Loop through all peaks
totalPeaks = sortedPeaks.length;
Feature oldPeak;
for (int ind = 0; ind < totalPeaks; ind++) {
if (isCanceled())
return;
oldPeak = sortedPeaks[ind];
if (oldPeak.getHeight() >= minimumHeight) {
Feature newPeak = this.getExtendedPeak(oldPeak);
// Get previous pekaListRow
PeakListRow oldRow = peakList.getPeakRow(oldPeak);
// keep old ID
int oldID = oldRow.getID();
SimplePeakListRow newRow = new SimplePeakListRow(oldID);
PeakUtils.copyPeakListRowProperties(oldRow, newRow);
newRow.addPeak(dataFile, newPeak);
extendedPeakList.addRow(newRow);
}
// Update completion rate
processedPeaks++;
}
// Add new peakList to the project
project.addPeakList(extendedPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(extendedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : peakList.getAppliedMethods()) {
extendedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
extendedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peak extender", parameters));
// Remove the original peakList if requested
if (removeOriginal)
project.removePeakList(peakList);
logger.info("Finished peak extender on " + peakList);
setStatus(TaskStatus.FINISHED);
}
Aggregations