use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class StreamPeakListRowLearnerTask method copyPeakRow.
/**
* Create a copy of a feature list row.
*
* @param row the row to copy.
* @return the newly created copy.
*/
private static PeakListRow copyPeakRow(final PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
final Feature newPeak = new SimpleFeature(peak);
PeakUtils.copyPeakProperties(peak, newPeak);
newRow.addPeak(peak.getDataFile(), newPeak);
}
return newRow;
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class ChromatogramBuilderTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started chromatogram builder on " + dataFile);
scans = scanSelection.getMatchingScans(dataFile);
int[] allScanNumbers = scanSelection.getMatchingScanNumbers(dataFile);
totalScans = scans.length;
// Check if the scans are properly ordered by RT
double prevRT = Double.NEGATIVE_INFINITY;
for (Scan s : scans) {
if (s.getRetentionTime() < prevRT) {
setStatus(TaskStatus.ERROR);
final String msg = "Retention time of scan #" + s.getScanNumber() + " is smaller then the retention time of the previous scan." + " Please make sure you only use scans with increasing retention times." + " You can restrict the scan numbers in the parameters, or you can use the Crop filter module";
setErrorMessage(msg);
return;
}
prevRT = s.getRetentionTime();
}
// Create new feature list
newPeakList = new SimplePeakList(dataFile + " " + suffix, dataFile);
Chromatogram[] chromatograms;
HighestDataPointConnector massConnector = new HighestDataPointConnector(dataFile, allScanNumbers, minimumTimeSpan, minimumHeight, mzTolerance);
for (Scan scan : scans) {
if (isCanceled())
return;
MassList massList = scan.getMassList(massListName);
if (massList == null) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Scan " + dataFile + " #" + scan.getScanNumber() + " does not have a mass list " + massListName);
return;
}
DataPoint[] mzValues = massList.getDataPoints();
if (mzValues == null) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Mass list " + massListName + " does not contain m/z values for scan #" + scan.getScanNumber() + " of file " + dataFile);
return;
}
massConnector.addScan(scan.getScanNumber(), mzValues);
processedScans++;
}
chromatograms = massConnector.finishChromatograms();
// Sort the final chromatograms by m/z
Arrays.sort(chromatograms, new PeakSorter(SortingProperty.MZ, SortingDirection.Ascending));
// Add the chromatograms to the new feature list
for (Feature finishedPeak : chromatograms) {
SimplePeakListRow newRow = new SimplePeakListRow(newPeakID);
newPeakID++;
newRow.addPeak(dataFile, finishedPeak);
newPeakList.addRow(newRow);
}
// Add new peaklist to the project
project.addPeakList(newPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(newPeakList);
setStatus(TaskStatus.FINISHED);
logger.info("Finished chromatogram builder on " + dataFile);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class ShapeModelerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
Class<?> shapeModelClass = shapeModelerType.getModelClass();
Constructor<?> shapeModelConstruct;
shapeModelConstruct = shapeModelClass.getConstructors()[0];
// Get data file information
RawDataFile dataFile = originalPeakList.getRawDataFile(0);
// Create new feature list
newPeakList = new SimplePeakList(originalPeakList + " " + suffix, dataFile);
totalRows = originalPeakList.getNumberOfRows();
int[] scanNumbers;
double[] retentionTimes, intensities;
SimplePeakListRow newRow;
for (PeakListRow row : originalPeakList.getRows()) {
if (isCanceled())
return;
newRow = new SimplePeakListRow(newPeakID);
try {
for (Feature peak : row.getPeaks()) {
// Load the intensities into array
dataFile = peak.getDataFile();
scanNumbers = peak.getScanNumbers();
retentionTimes = new double[scanNumbers.length];
for (int i = 0; i < scanNumbers.length; i++) retentionTimes[i] = dataFile.getScan(scanNumbers[i]).getRetentionTime();
intensities = new double[scanNumbers.length];
for (int i = 0; i < scanNumbers.length; i++) {
DataPoint dp = peak.getDataPoint(scanNumbers[i]);
if (dp != null)
intensities[i] = dp.getIntensity();
else
intensities[i] = 0;
}
Feature shapePeak = (Feature) shapeModelConstruct.newInstance(peak, scanNumbers, intensities, retentionTimes, resolution);
newRow.addPeak(shapePeak.getDataFile(), shapePeak);
}
} catch (Exception e) {
String message = "Error trying to make an instance of shape model class " + shapeModelClass;
MZmineCore.getDesktop().displayErrorMessage(MZmineCore.getDesktop().getMainWindow(), message);
logger.severe(message);
return;
}
newPeakList.addRow(newRow);
newPeakID++;
processedRows++;
}
// Add new peaklist to the project
project.addPeakList(newPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(newPeakList);
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
newPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peaks shaped by " + shapeModelerType + " function", parameters));
logger.finest("Finished peak shape modeler " + processedRows + " rows processed");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class PeakExtenderTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running peak extender on " + peakList);
// We assume source peakList contains one datafile
RawDataFile dataFile = peakList.getRawDataFile(0);
// Create a new deisotoped peakList
extendedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Sort peaks by descending height
Feature[] sortedPeaks = peakList.getPeaks(dataFile);
Arrays.sort(sortedPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
// Loop through all peaks
totalPeaks = sortedPeaks.length;
Feature oldPeak;
for (int ind = 0; ind < totalPeaks; ind++) {
if (isCanceled())
return;
oldPeak = sortedPeaks[ind];
if (oldPeak.getHeight() >= minimumHeight) {
Feature newPeak = this.getExtendedPeak(oldPeak);
// Get previous pekaListRow
PeakListRow oldRow = peakList.getPeakRow(oldPeak);
// keep old ID
int oldID = oldRow.getID();
SimplePeakListRow newRow = new SimplePeakListRow(oldID);
PeakUtils.copyPeakListRowProperties(oldRow, newRow);
newRow.addPeak(dataFile, newPeak);
extendedPeakList.addRow(newRow);
}
// Update completion rate
processedPeaks++;
}
// Add new peakList to the project
project.addPeakList(extendedPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(extendedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : peakList.getAppliedMethods()) {
extendedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
extendedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peak extender", parameters));
// Remove the original peakList if requested
if (removeOriginal)
project.removePeakList(peakList);
logger.info("Finished peak extender on " + peakList);
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class SameRangeTask method run.
public void run() {
logger.info("Started gap-filling " + peakList);
setStatus(TaskStatus.PROCESSING);
// Get total number of rows
totalRows = peakList.getNumberOfRows();
// Get feature list columns
RawDataFile[] columns = peakList.getRawDataFiles();
// Create new feature list
processedPeakList = new SimplePeakList(peakList + " " + suffix, columns);
/**
***********************************************************
* Creating a stream to process the data in parallel
*/
processedRowsAtomic = new AtomicInteger(0);
List<PeakListRow> outputList = Collections.synchronizedList(new ArrayList<>());
peakList.parallelStream().forEach(sourceRow -> {
// Canceled?
if (isCanceled())
return;
PeakListRow newRow = new SimplePeakListRow(sourceRow.getID());
// Copy comment
newRow.setComment(sourceRow.getComment());
// Copy identities
for (PeakIdentity ident : sourceRow.getPeakIdentities()) newRow.addPeakIdentity(ident, false);
if (sourceRow.getPreferredPeakIdentity() != null)
newRow.setPreferredPeakIdentity(sourceRow.getPreferredPeakIdentity());
// Copy each peaks and fill gaps
for (RawDataFile column : columns) {
// Canceled?
if (isCanceled())
return;
// Get current peak
Feature currentPeak = sourceRow.getPeak(column);
// If there is a gap, try to fill it
if (currentPeak == null)
currentPeak = fillGap(sourceRow, column);
// If a peak was found or created, add it
if (currentPeak != null)
newRow.addPeak(column, currentPeak);
}
outputList.add(newRow);
processedRowsAtomic.getAndAdd(1);
});
outputList.stream().forEach(newRow -> {
processedPeakList.addRow((PeakListRow) newRow);
});
// Canceled?
if (isCanceled())
return;
// Append processed feature list to the project
project.addPeakList(processedPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(processedPeakList);
// Add task description to peakList
processedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Gap filling using RT and m/z range", parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(peakList);
setStatus(TaskStatus.FINISHED);
logger.info("Finished gap-filling " + peakList);
}
Aggregations