use of net.sf.mzmine.datamodel.Feature in project mzmine2 by mzmine.
the class PeakFinderTask method fillList.
public void fillList(boolean masterList) {
for (int i = 0; i < peakList.getNumberOfRawDataFiles(); i++) {
if (i != masterSample) {
RawDataFile datafile1;
RawDataFile datafile2;
if (masterList) {
datafile1 = peakList.getRawDataFile(masterSample);
datafile2 = peakList.getRawDataFile(i);
} else {
datafile1 = peakList.getRawDataFile(i);
datafile2 = peakList.getRawDataFile(masterSample);
}
RegressionInfo info = new RegressionInfo();
for (PeakListRow row : peakList.getRows()) {
Feature peaki = row.getPeak(datafile1);
Feature peake = row.getPeak(datafile2);
if (peaki != null && peake != null) {
info.addData(peake.getRT(), peaki.getRT());
}
}
info.setFunction();
// Canceled?
if (isCanceled()) {
return;
}
Vector<Gap> gaps = new Vector<Gap>();
// if necessary
for (int row = 0; row < peakList.getNumberOfRows(); row++) {
PeakListRow sourceRow = peakList.getRow(row);
PeakListRow newRow = processedPeakList.getRow(row);
Feature sourcePeak = sourceRow.getPeak(datafile1);
if (sourcePeak == null) {
// Create a new gap
double mz = sourceRow.getAverageMZ();
double rt2 = -1;
if (!masterList) {
if (processedPeakList.getRow(row).getPeak(datafile2) != null) {
rt2 = processedPeakList.getRow(row).getPeak(datafile2).getRT();
}
} else {
if (peakList.getRow(row).getPeak(datafile2) != null) {
rt2 = peakList.getRow(row).getPeak(datafile2).getRT();
}
}
if (rt2 > -1) {
double rt = info.predict(rt2);
if (rt != -1) {
Range<Double> mzRange = mzTolerance.getToleranceRange(mz);
Range<Double> rtRange = rtTolerance.getToleranceRange(rt);
Gap newGap = new Gap(newRow, datafile1, mzRange, rtRange, intTolerance);
gaps.add(newGap);
}
}
} else {
newRow.addPeak(datafile1, sourcePeak);
}
}
// Stop processing this file if there are no gaps
if (gaps.size() == 0) {
processedScans.addAndGet(datafile1.getNumOfScans());
continue;
}
// Get all scans of this data file
int[] scanNumbers = datafile1.getScanNumbers(1);
// Process each scan
for (int scanNumber : scanNumbers) {
// Canceled?
if (isCanceled()) {
return;
}
// Get the scan
Scan scan = datafile1.getScan(scanNumber);
// Feed this scan to all gaps
for (Gap gap : gaps) {
gap.offerNextScan(scan);
}
processedScans.incrementAndGet();
}
// Finalize gaps
for (Gap gap : gaps) {
gap.noMoreOffers();
}
}
}
}
use of net.sf.mzmine.datamodel.Feature in project mzmine2 by mzmine.
the class SameRangeTask method fillGap.
private Feature fillGap(PeakListRow row, RawDataFile column) {
SameRangePeak newPeak = new SameRangePeak(column);
Range<Double> mzRange = null, rtRange = null;
// Check the peaks for selected data files
for (RawDataFile dataFile : row.getRawDataFiles()) {
Feature peak = row.getPeak(dataFile);
if (peak == null)
continue;
if ((mzRange == null) || (rtRange == null)) {
mzRange = peak.getRawDataPointsMZRange();
rtRange = peak.getRawDataPointsRTRange();
} else {
mzRange = mzRange.span(peak.getRawDataPointsMZRange());
rtRange = rtRange.span(peak.getRawDataPointsRTRange());
}
}
assert mzRange != null;
assert rtRange != null;
Range<Double> mzRangeWithTol = mzTolerance.getToleranceRange(mzRange);
// Get scan numbers
int[] scanNumbers = column.getScanNumbers(1, rtRange);
boolean dataPointFound = false;
for (int scanNumber : scanNumbers) {
if (isCanceled())
return null;
// Get next scan
Scan scan = column.getScan(scanNumber);
// Find most intense m/z peak
DataPoint basePeak = ScanUtils.findBasePeak(scan, mzRangeWithTol);
if (basePeak != null) {
if (basePeak.getIntensity() > 0)
dataPointFound = true;
newPeak.addDatapoint(scan.getScanNumber(), basePeak);
} else {
DataPoint fakeDataPoint = new SimpleDataPoint(RangeUtils.rangeCenter(mzRangeWithTol), 0);
newPeak.addDatapoint(scan.getScanNumber(), fakeDataPoint);
}
}
if (dataPointFound) {
newPeak.finalizePeak();
if (newPeak.getArea() == 0)
return null;
return newPeak;
}
return null;
}
use of net.sf.mzmine.datamodel.Feature in project mzmine2 by mzmine.
the class SameRangeTask method run.
public void run() {
logger.info("Started gap-filling " + peakList);
setStatus(TaskStatus.PROCESSING);
// Get total number of rows
totalRows = peakList.getNumberOfRows();
// Get feature list columns
RawDataFile[] columns = peakList.getRawDataFiles();
// Create new feature list
processedPeakList = new SimplePeakList(peakList + " " + suffix, columns);
/**
***********************************************************
* Creating a stream to process the data in parallel
*/
processedRowsAtomic = new AtomicInteger(0);
List<PeakListRow> outputList = Collections.synchronizedList(new ArrayList<>());
peakList.parallelStream().forEach(sourceRow -> {
// Canceled?
if (isCanceled())
return;
PeakListRow newRow = new SimplePeakListRow(sourceRow.getID());
// Copy comment
newRow.setComment(sourceRow.getComment());
// Copy identities
for (PeakIdentity ident : sourceRow.getPeakIdentities()) newRow.addPeakIdentity(ident, false);
if (sourceRow.getPreferredPeakIdentity() != null)
newRow.setPreferredPeakIdentity(sourceRow.getPreferredPeakIdentity());
// Copy each peaks and fill gaps
for (RawDataFile column : columns) {
// Canceled?
if (isCanceled())
return;
// Get current peak
Feature currentPeak = sourceRow.getPeak(column);
// If there is a gap, try to fill it
if (currentPeak == null)
currentPeak = fillGap(sourceRow, column);
// If a peak was found or created, add it
if (currentPeak != null)
newRow.addPeak(column, currentPeak);
}
outputList.add(newRow);
processedRowsAtomic.getAndAdd(1);
});
outputList.stream().forEach(newRow -> {
processedPeakList.addRow((PeakListRow) newRow);
});
// Canceled?
if (isCanceled())
return;
// Append processed feature list to the project
project.addPeakList(processedPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(processedPeakList);
// Add task description to peakList
processedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Gap filling using RT and m/z range", parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(peakList);
setStatus(TaskStatus.FINISHED);
logger.info("Finished gap-filling " + peakList);
}
use of net.sf.mzmine.datamodel.Feature in project mzmine2 by mzmine.
the class PeakFilterTask method filterPeakList.
/**
* Filter the feature list.
*
* @param peakList feature list to filter.
* @return a new feature list with entries of the original feature list that pass the filtering.
*/
private PeakList filterPeakList(final PeakList peakList) {
// Make a copy of the peakList
final PeakList newPeakList = new SimplePeakList(peakList.getName() + ' ' + parameters.getParameter(RowsFilterParameters.SUFFIX).getValue(), peakList.getRawDataFiles());
// Get parameters - which filters are active
final boolean filterByDuration = parameters.getParameter(PeakFilterParameters.PEAK_DURATION).getValue();
final boolean filterByArea = parameters.getParameter(PeakFilterParameters.PEAK_AREA).getValue();
final boolean filterByHeight = parameters.getParameter(PeakFilterParameters.PEAK_HEIGHT).getValue();
final boolean filterByDatapoints = parameters.getParameter(PeakFilterParameters.PEAK_DATAPOINTS).getValue();
final boolean filterByFWHM = parameters.getParameter(PeakFilterParameters.PEAK_FWHM).getValue();
final boolean filterByTailingFactor = parameters.getParameter(PeakFilterParameters.PEAK_TAILINGFACTOR).getValue();
final boolean filterByAsymmetryFactor = parameters.getParameter(PeakFilterParameters.PEAK_ASYMMETRYFACTOR).getValue();
final boolean filterByMS2 = parameters.getParameter(PeakFilterParameters.MS2_Filter).getValue();
// Loop through all rows in feature list
final PeakListRow[] rows = peakList.getRows();
totalRows = rows.length;
for (processedRows = 0; !isCanceled() && processedRows < totalRows; processedRows++) {
final PeakListRow row = rows[processedRows];
final RawDataFile[] rawdatafiles = row.getRawDataFiles();
int totalRawDataFiles = rawdatafiles.length;
boolean[] keepPeak = new boolean[totalRawDataFiles];
for (int i = 0; i < totalRawDataFiles; i++) {
// Peak values
keepPeak[i] = true;
final Feature peak = row.getPeak(rawdatafiles[i]);
final double peakDuration = peak.getRawDataPointsRTRange().upperEndpoint() - peak.getRawDataPointsRTRange().lowerEndpoint();
final double peakArea = peak.getArea();
final double peakHeight = peak.getHeight();
final int peakDatapoints = peak.getScanNumbers().length;
final int msmsScanNumber = peak.getMostIntenseFragmentScanNumber();
Double peakFWHM = peak.getFWHM();
Double peakTailingFactor = peak.getTailingFactor();
Double peakAsymmetryFactor = peak.getAsymmetryFactor();
if (peakFWHM == null) {
peakFWHM = -1.0;
}
if (peakTailingFactor == null) {
peakTailingFactor = -1.0;
}
if (peakAsymmetryFactor == null) {
peakAsymmetryFactor = -1.0;
}
// Check Duration
if (filterByDuration) {
final Range<Double> durationRange = parameters.getParameter(PeakFilterParameters.PEAK_DURATION).getEmbeddedParameter().getValue();
if (!durationRange.contains(peakDuration)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check Area
if (filterByArea) {
final Range<Double> areaRange = parameters.getParameter(PeakFilterParameters.PEAK_AREA).getEmbeddedParameter().getValue();
if (!areaRange.contains(peakArea)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check Height
if (filterByHeight) {
final Range<Double> heightRange = parameters.getParameter(PeakFilterParameters.PEAK_HEIGHT).getEmbeddedParameter().getValue();
if (!heightRange.contains(peakHeight)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check # Data Points
if (filterByDatapoints) {
final Range<Integer> datapointsRange = parameters.getParameter(PeakFilterParameters.PEAK_DATAPOINTS).getEmbeddedParameter().getValue();
if (!datapointsRange.contains(peakDatapoints)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check FWHM
if (filterByFWHM) {
final Range<Double> fwhmRange = parameters.getParameter(PeakFilterParameters.PEAK_FWHM).getEmbeddedParameter().getValue();
if (!fwhmRange.contains(peakFWHM)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check Tailing Factor
if (filterByTailingFactor) {
final Range<Double> tailingRange = parameters.getParameter(PeakFilterParameters.PEAK_TAILINGFACTOR).getEmbeddedParameter().getValue();
if (!tailingRange.contains(peakTailingFactor)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check height
if (filterByAsymmetryFactor) {
final Range<Double> asymmetryRange = parameters.getParameter(PeakFilterParameters.PEAK_ASYMMETRYFACTOR).getEmbeddedParameter().getValue();
if (!asymmetryRange.contains(peakAsymmetryFactor)) {
// Mark peak to be removed
keepPeak[i] = false;
}
}
// Check MS/MS filter
if (filterByMS2) {
if (msmsScanNumber < 1)
keepPeak[i] = false;
}
}
// empty row?
boolean isEmpty = Booleans.asList(keepPeak).stream().allMatch(keep -> !keep);
if (!isEmpty)
newPeakList.addRow(copyPeakRow(row, keepPeak));
}
return newPeakList;
}
use of net.sf.mzmine.datamodel.Feature in project mzmine2 by mzmine.
the class PeakComparisonRowFilterTask method copyPeakRow.
/**
* Create a copy of a feature list row.
*
* @param row the row to copy.
* @return the newly created copy.
*/
private static PeakListRow copyPeakRow(final PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
final Feature newPeak = new SimpleFeature(peak);
PeakUtils.copyPeakProperties(peak, newPeak);
newRow.addPeak(peak.getDataFile(), newPeak);
}
return newRow;
}
Aggregations