use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class MsMsSpectraMergeModule method mergeConsecutiveScans.
/**
* Internal method for merging a list of consecutive MS/MS scans.
*
* @param scans MS/MS scans with their precursor information
* @param massList name of the mass list to use when extracting peaks
* @param scoreModel scoring model to use when removing low quality MS/MS and selecting the best
* quality MS/MS
* @return merged spectrum
*/
protected MergedSpectrum mergeConsecutiveScans(ParameterSet parameters, FragmentScan scans, String massList, Ms2QualityScoreModel scoreModel) {
int totalNumberOfScans = scans.ms2ScanNumbers.length;
/*
* find scan with best quality
*/
final double[] scores = scoreModel.calculateQualityScore(scans);
int best = 0;
for (int k = 1; k < scores.length; ++k) {
if (scores[k] > scores[best]) {
best = k;
}
}
if (scores[best] <= 0)
return MergedSpectrum.empty(totalNumberOfScans);
final List<Scan> scansToMerge = new ArrayList<>();
scansToMerge.add(scans.origin.getScan(scans.ms2ScanNumbers[best]));
final Scan firstScan = scansToMerge.get(0);
final MassList firstML = firstScan.getMassList(massList);
if (firstML == null)
throw new RuntimeException("Scan " + firstScan.getDataFile().getName() + "#" + firstScan.getScanNumber() + " does not have a mass list " + massList);
if (firstML.getDataPoints().length <= 1)
return MergedSpectrum.empty(totalNumberOfScans);
/*
* remove scans which are considerably worse than the best scan
*/
final double scoreThreshold = best / 5d;
for (int i = 1; i < scores.length; ++i) {
int k = best - i;
if (k >= 0 && scores[k] > scoreThreshold) {
scansToMerge.add(scans.origin.getScan(scans.ms2ScanNumbers[k]));
}
k = best + i;
if (k < scores.length && scores[k] > scoreThreshold) {
scansToMerge.add(scans.origin.getScan(scans.ms2ScanNumbers[k]));
}
}
if (scansToMerge.size() == 1) {
final MergedSpectrum single = new MergedSpectrum(scansToMerge.get(0), massList);
single.bestFragmentScanScore = scores[best];
single.removedScansByLowQuality = scores.length - 1;
return single;
}
/*
* merge every scan if its cosine is above the cosine threshold
*/
final double cosineThreshold = parameters.getParameter(MsMsSpectraMergeParameters.COSINE_PARAMETER).getValue();
final MZTolerance mzTolerance = parameters.getParameter(MsMsSpectraMergeParameters.MASS_ACCURACY).getValue();
final MzMergeMode mzMergeMode = parameters.getParameter(MsMsSpectraMergeParameters.MZ_MERGE_MODE).getValue();
final IntensityMergeMode intensityMergeMode = parameters.getParameter(MsMsSpectraMergeParameters.INTENSITY_MERGE_MODE).getValue();
MergedSpectrum initial = new MergedSpectrum(scansToMerge.get(0), massList);
initial.bestFragmentScanScore = best;
final double lowestMassToConsider = Math.min(50d, scans.feature.getMZ() - 50d);
final DataPoint[] initialMostIntense = ScanUtils.extractMostIntensePeaksAcrossMassRange(initial.data, Range.closed(lowestMassToConsider, 150d), 6);
double lowestIntensityToConsider;
final int mostIntensPeakWithin = ScanUtils.findMostIntensePeakWithin(initialMostIntense, Range.closed(lowestMassToConsider, scans.feature.getMZ()));
if (mostIntensPeakWithin >= 0)
lowestIntensityToConsider = 0.005d * initialMostIntense[mostIntensPeakWithin].getIntensity();
else
lowestIntensityToConsider = 0d;
Range<Double> cosineRange = Range.closed(lowestMassToConsider, scans.feature.getMZ() - 20);
final double initialCosine = ScanUtils.probabilityProductUnnormalized(initialMostIntense, initialMostIntense, mzTolerance, lowestIntensityToConsider, cosineRange);
for (int k = 1; k < scansToMerge.size(); ++k) {
Scan scan = scansToMerge.get(k);
if (!(scan.getPolarity().equals(initial.polarity) && scan.getPrecursorCharge() == initial.precursorCharge && mzTolerance.checkWithinTolerance(scan.getPrecursorMZ(), initial.precursorMz))) {
LoggerFactory.getLogger(MsMsSpectraMergeModule.class).warn("Scan " + scan.getScanNumber() + " cannot be merged: it seems to belong to a different feature.");
continue;
}
DataPoint[] dataPoints = scan.getMassList(massList).getDataPoints();
final DataPoint[] mostIntense = ScanUtils.extractMostIntensePeaksAcrossMassRange(dataPoints, cosineRange, 6);
final double norm = ScanUtils.probabilityProductUnnormalized(mostIntense, mostIntense, mzTolerance, lowestIntensityToConsider, cosineRange);
final double cosine = ScanUtils.probabilityProductUnnormalized(initialMostIntense, mostIntense, mzTolerance, lowestIntensityToConsider, cosineRange) / Math.sqrt(norm * initialCosine);
if (cosine >= cosineThreshold) {
initial = merge(initial, scan, dataPoints, mzMergeMode, intensityMergeMode, mzTolerance);
} else {
initial.removedScansByLowCosine++;
}
}
initial.removedScansByLowQuality += (totalNumberOfScans - scansToMerge.size());
return initial;
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class MsMsSpectraMergeModule method merge.
private static MergedSpectrum merge(MergedSpectrum left, Scan right, DataPoint[] rightData, MzMergeMode mzMergeMode, IntensityMergeMode intensityMergeMode, MZTolerance ppm) {
DataPoint[] byInt = rightData.clone();
Arrays.sort(byInt, (u, v) -> Double.compare(v.getIntensity(), u.getIntensity()));
MergedDataPoint[] merge = merge(left.data, byInt, mzMergeMode, intensityMergeMode, ppm);
RawDataFile f = left.origins[0];
RawDataFile[] fm;
if (right.getDataFile().equals(left.origins[0])) {
fm = left.origins;
} else {
HashSet<RawDataFile> rawDataFiles = new HashSet<>(Arrays.asList(left.origins));
rawDataFiles.add(right.getDataFile());
fm = rawDataFiles.toArray(new RawDataFile[0]);
}
int[] scanIds = Arrays.copyOf(left.scanIds, left.scanIds.length + 1);
scanIds[scanIds.length - 1] = right.getScanNumber();
return new MergedSpectrum(merge, fm, scanIds, left.precursorMz, left.polarity, left.precursorCharge, left.removedScansByLowQuality, left.removedScansByLowCosine, left.bestFragmentScanScore);
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class Gap method noMoreOffers.
public void noMoreOffers() {
// Check peak that was last constructed
if (currentPeakDataPoints != null) {
checkCurrentPeak();
currentPeakDataPoints = null;
}
// If we have best peak candidate, construct a SimpleChromatographicPeak
if (bestPeakDataPoints != null) {
double area = 0, height = 0, mz = 0, rt = 0;
int[] scanNumbers = new int[bestPeakDataPoints.size()];
DataPoint[] finalDataPoint = new DataPoint[bestPeakDataPoints.size()];
Range<Double> finalRTRange = null, finalMZRange = null, finalIntensityRange = null;
int representativeScan = 0;
// Process all datapoints
for (int i = 0; i < bestPeakDataPoints.size(); i++) {
GapDataPoint dp = bestPeakDataPoints.get(i);
if (i == 0) {
finalRTRange = Range.singleton(dp.getRT());
finalMZRange = Range.singleton(dp.getMZ());
finalIntensityRange = Range.singleton(dp.getIntensity());
} else {
assert finalRTRange != null && finalMZRange != null && finalIntensityRange != null;
finalRTRange = finalRTRange.span(Range.singleton(dp.getRT()));
finalMZRange = finalMZRange.span(Range.singleton(dp.getMZ()));
finalIntensityRange = finalIntensityRange.span(Range.singleton(dp.getIntensity()));
}
scanNumbers[i] = bestPeakDataPoints.get(i).getScanNumber();
finalDataPoint[i] = new SimpleDataPoint(dp.getMZ(), dp.getIntensity());
mz += bestPeakDataPoints.get(i).getMZ();
// Check height
if (bestPeakDataPoints.get(i).getIntensity() > height) {
height = bestPeakDataPoints.get(i).getIntensity();
rt = bestPeakDataPoints.get(i).getRT();
representativeScan = bestPeakDataPoints.get(i).getScanNumber();
}
// Skip last data point
if (i == bestPeakDataPoints.size() - 1) {
break;
}
// X axis interval length
double rtDifference = bestPeakDataPoints.get(i + 1).getRT() - bestPeakDataPoints.get(i).getRT();
// Convert the RT scale to seconds
rtDifference *= 60d;
// intensity at the beginning and end of the interval
double intensityStart = bestPeakDataPoints.get(i).getIntensity();
double intensityEnd = bestPeakDataPoints.get(i + 1).getIntensity();
// calculate area of the interval
area += (rtDifference * (intensityStart + intensityEnd) / 2);
}
// Calculate average m/z value
mz /= bestPeakDataPoints.size();
// Find the best fragmentation scan, if available
int fragmentScan = ScanUtils.findBestFragmentScan(rawDataFile, finalRTRange, finalMZRange);
// Find all MS2 fragment scans, if available
int[] allMS2fragmentScanNumbers = ScanUtils.findAllMS2FragmentScans(rawDataFile, finalRTRange, finalMZRange);
// Is intensity above the noise level?
if (height >= noiseLevel) {
SimpleFeature newPeak = new SimpleFeature(rawDataFile, mz, rt, height, area, scanNumbers, finalDataPoint, FeatureStatus.ESTIMATED, representativeScan, fragmentScan, allMS2fragmentScanNumbers, finalRTRange, finalMZRange, finalIntensityRange);
// Fill the gap
peakListRow.addPeak(rawDataFile, newPeak);
}
}
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class LocalMaxMassDetector method getMassValues.
public DataPoint[] getMassValues(DataPoint[] dataPoints, ParameterSet parameters) {
double noiseLevel = parameters.getParameter(LocalMaxMassDetectorParameters.noiseLevel).getValue();
// List of found mz peaks
ArrayList<DataPoint> mzPeaks = new ArrayList<DataPoint>();
// All data points of current m/z peak
// Top data point of current m/z peak
DataPoint currentMzPeakTop = null;
// True if we haven't reached the current local maximum yet
boolean ascending = true;
// Iterate through all data points
for (int i = 0; i < dataPoints.length - 1; i++) {
boolean nextIsBigger = dataPoints[i + 1].getIntensity() > dataPoints[i].getIntensity();
boolean nextIsZero = dataPoints[i + 1].getIntensity() == 0;
boolean currentIsZero = dataPoints[i].getIntensity() == 0;
// Ignore zero intensity regions
if (currentIsZero)
continue;
// Check for local maximum
if (ascending && (!nextIsBigger)) {
currentMzPeakTop = dataPoints[i];
ascending = false;
continue;
}
assert currentMzPeakTop != null;
// Check for the end of the peak
if ((!ascending) && (nextIsBigger || nextIsZero)) {
// Add the m/z peak if it is above the noise level
if (currentMzPeakTop.getIntensity() > noiseLevel) {
mzPeaks.add(currentMzPeakTop);
}
// Reset and start with new peak
ascending = true;
}
}
return mzPeaks.toArray(new DataPoint[0]);
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class RecursiveMassDetector method recursiveThreshold.
/**
* This function searches for maxima from given part of a spectrum
*/
private int recursiveThreshold(TreeSet<DataPoint> mzPeaks, DataPoint[] dataPoints, int startInd, int stopInd, double curentNoiseLevel, double minimumMZPeakWidth, double maximumMZPeakWidth, int recuLevel) {
// logger.finest(" Level of recursion " + recuLevel);
Vector<DataPoint> RawDataPointsInds = new Vector<DataPoint>();
int peakStartInd, peakStopInd, peakMaxInd;
double peakWidthMZ;
for (int ind = startInd; ind < stopInd; ind++) {
boolean currentIsBiggerNoise = dataPoints[ind].getIntensity() > curentNoiseLevel;
double localMinimum = Double.MAX_VALUE;
// Ignore intensities below curentNoiseLevel
if (!currentIsBiggerNoise) {
continue;
}
// Add initial point of the peak
peakStartInd = ind;
peakMaxInd = peakStartInd;
// While peak is on
while ((ind < stopInd) && (dataPoints[ind].getIntensity() > curentNoiseLevel)) {
boolean isLocalMinimum = (dataPoints[ind - 1].getIntensity() > dataPoints[ind].getIntensity()) && (dataPoints[ind].getIntensity() < dataPoints[ind + 1].getIntensity());
// Check if this is the minimum point of the peak
if (isLocalMinimum && (dataPoints[ind].getIntensity() < localMinimum))
localMinimum = dataPoints[ind].getIntensity();
// Check if this is the maximum point of the peak
if (dataPoints[ind].getIntensity() > dataPoints[peakMaxInd].getIntensity())
peakMaxInd = ind;
// Forming the DataPoint array that defines this peak
RawDataPointsInds.add(dataPoints[ind]);
ind++;
}
// Add ending point of the peak
peakStopInd = ind;
peakWidthMZ = dataPoints[peakStopInd].getMZ() - dataPoints[peakStartInd].getMZ();
// Verify width of the peak
if ((peakWidthMZ >= minimumMZPeakWidth) && (peakWidthMZ <= maximumMZPeakWidth)) {
// Declare a new MzPeak with intensity equal to max intensity
// data point
mzPeaks.add(dataPoints[peakMaxInd]);
if (recuLevel > 0) {
// return stop index and beginning of the next peak
return ind;
}
}
RawDataPointsInds.clear();
// peak of the right size
if (peakWidthMZ > maximumMZPeakWidth) {
if (localMinimum < Double.MAX_VALUE) {
ind = recursiveThreshold(mzPeaks, dataPoints, peakStartInd, peakStopInd, localMinimum, minimumMZPeakWidth, maximumMZPeakWidth, recuLevel + 1);
}
}
}
// return stop index
return stopInd;
}
Aggregations