use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class SavitzkyGolayPeakDetector method resolvePeaks.
@Override
public ResolvedPeak[] resolvePeaks(final Feature chromatogram, ParameterSet parameters, RSessionWrapper rSession, CenterFunction mzCenterFunction, double msmsRange, double rTRangeMSMS) {
int[] scanNumbers = chromatogram.getScanNumbers();
final int scanCount = scanNumbers.length;
double[] retentionTimes = new double[scanCount];
double[] intensities = new double[scanCount];
RawDataFile dataFile = chromatogram.getDataFile();
for (int i = 0; i < scanCount; i++) {
final int scanNum = scanNumbers[i];
retentionTimes[i] = dataFile.getScan(scanNum).getRetentionTime();
DataPoint dp = chromatogram.getDataPoint(scanNum);
if (dp != null)
intensities[i] = dp.getIntensity();
else
intensities[i] = 0.0;
}
// Calculate intensity statistics.
double maxIntensity = 0.0;
double avgIntensity = 0.0;
for (final double intensity : intensities) {
maxIntensity = Math.max(intensity, maxIntensity);
avgIntensity += intensity;
}
avgIntensity /= scanCount;
final List<Feature> resolvedPeaks = new ArrayList<Feature>(2);
// noise return an empty array.
if (avgIntensity <= maxIntensity / 2.0) {
// Calculate second derivatives of intensity values.
final double[] secondDerivative = SGDerivative.calculateDerivative(intensities, false, SG_FILTER_LEVEL);
// Calculate noise threshold.
final double noiseThreshold = calcDerivativeThreshold(secondDerivative, parameters.getParameter(DERIVATIVE_THRESHOLD_LEVEL).getValue());
// Search for peaks.
Arrays.sort(scanNumbers);
final Feature[] resolvedOriginalPeaks = peaksSearch(chromatogram, scanNumbers, secondDerivative, noiseThreshold, mzCenterFunction, msmsRange, rTRangeMSMS);
final Range<Double> peakDuration = parameters.getParameter(PEAK_DURATION).getValue();
final double minimumPeakHeight = parameters.getParameter(MIN_PEAK_HEIGHT).getValue();
// parameters.
for (final Feature p : resolvedOriginalPeaks) {
if (peakDuration.contains(RangeUtils.rangeLength(p.getRawDataPointsRTRange())) && p.getHeight() >= minimumPeakHeight) {
resolvedPeaks.add(p);
}
}
}
return resolvedPeaks.toArray(new ResolvedPeak[resolvedPeaks.size()]);
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class QualityParameters method PeakFindRTs.
private static double[] PeakFindRTs(double intensity, double rt, Feature peak) {
double x1 = 0, x2 = 0, x3 = 0, x4 = 0, y1 = 0, y2 = 0, y3 = 0, y4 = 0, lastDiff1 = intensity, lastDiff2 = intensity, currentDiff, currentRT;
int[] scanNumbers = peak.getScanNumbers();
RawDataFile dataFile = peak.getDataFile();
// peak apex
for (int i = 1; i < scanNumbers.length - 1; i++) {
if (peak.getDataPoint(scanNumbers[i]) != null) {
currentDiff = Math.abs(intensity - peak.getDataPoint(scanNumbers[i]).getIntensity());
currentRT = dataFile.getScan(scanNumbers[i]).getRetentionTime();
if (currentDiff < lastDiff1 & currentDiff > 0 & currentRT <= rt & peak.getDataPoint(scanNumbers[i + 1]) != null) {
x1 = dataFile.getScan(scanNumbers[i]).getRetentionTime();
y1 = peak.getDataPoint(scanNumbers[i]).getIntensity();
x2 = dataFile.getScan(scanNumbers[i + 1]).getRetentionTime();
y2 = peak.getDataPoint(scanNumbers[i + 1]).getIntensity();
lastDiff1 = currentDiff;
} else if (currentDiff < lastDiff2 & currentDiff > 0 & currentRT >= rt & peak.getDataPoint(scanNumbers[i - 1]) != null) {
x3 = dataFile.getScan(scanNumbers[i - 1]).getRetentionTime();
y3 = peak.getDataPoint(scanNumbers[i - 1]).getIntensity();
x4 = dataFile.getScan(scanNumbers[i]).getRetentionTime();
y4 = peak.getDataPoint(scanNumbers[i]).getIntensity();
lastDiff2 = currentDiff;
}
}
}
// Calculate RT value for input intensity based on linear regression
double slope, intercept, rt1, rt2;
if (y1 > 0) {
slope = (y2 - y1) / (x2 - x1);
intercept = y1 - (slope * x1);
rt1 = (intensity - intercept) / slope;
} else if (x2 > 0) {
// Straight drop of peak to 0 intensity
rt1 = x2;
} else {
rt1 = peak.getRawDataPointsRTRange().lowerEndpoint();
}
if (y4 > 0) {
slope = (y4 - y3) / (x4 - x3);
intercept = y3 - (slope * x3);
rt2 = (intensity - intercept) / slope;
} else if (x3 > 0) {
// Straight drop of peak to 0 intensity
rt2 = x3;
} else {
rt2 = peak.getRawDataPointsRTRange().upperEndpoint();
}
return new double[] { rt1, rt2 };
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class ScatterPlotAxisSelection method generateOptionsForPeakList.
static ScatterPlotAxisSelection[] generateOptionsForPeakList(PeakList peakList) {
Vector<ScatterPlotAxisSelection> options = new Vector<ScatterPlotAxisSelection>();
for (RawDataFile dataFile : peakList.getRawDataFiles()) {
ScatterPlotAxisSelection newOption = new ScatterPlotAxisSelection(dataFile);
options.add(newOption);
}
for (UserParameter<?, ?> parameter : MZmineCore.getProjectManager().getCurrentProject().getParameters()) {
if (!(parameter instanceof ComboParameter))
continue;
Object[] possibleValues = ((ComboParameter<?>) parameter).getChoices();
for (Object value : possibleValues) {
ScatterPlotAxisSelection newOption = new ScatterPlotAxisSelection(parameter, value);
options.add(newOption);
}
}
return options.toArray(new ScatterPlotAxisSelection[0]);
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class ScatterPlotAxisSelection method getValue.
public double getValue(PeakListRow row) {
if (file != null) {
Feature peak = row.getPeak(file);
if (peak == null)
return 0;
else
return peak.getArea();
}
double totalArea = 0;
int numOfFiles = 0;
for (RawDataFile dataFile : row.getRawDataFiles()) {
Object fileValue = MZmineCore.getProjectManager().getCurrentProject().getParameterValue(parameter, dataFile);
if (fileValue == null)
continue;
if (fileValue.toString().equals(parameterValue.toString())) {
Feature peak = row.getPeak(dataFile);
if ((peak != null) && (peak.getArea() > 0)) {
totalArea += peak.getArea();
numOfFiles++;
}
}
}
if (numOfFiles == 0)
return 0;
totalArea /= numOfFiles;
return totalArea;
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class PeakFinderTask method fillList.
public void fillList(boolean masterList) {
for (int i = 0; i < peakList.getNumberOfRawDataFiles(); i++) {
if (i != masterSample) {
RawDataFile datafile1;
RawDataFile datafile2;
if (masterList) {
datafile1 = peakList.getRawDataFile(masterSample);
datafile2 = peakList.getRawDataFile(i);
} else {
datafile1 = peakList.getRawDataFile(i);
datafile2 = peakList.getRawDataFile(masterSample);
}
RegressionInfo info = new RegressionInfo();
for (PeakListRow row : peakList.getRows()) {
Feature peaki = row.getPeak(datafile1);
Feature peake = row.getPeak(datafile2);
if (peaki != null && peake != null) {
info.addData(peake.getRT(), peaki.getRT());
}
}
info.setFunction();
// Canceled?
if (isCanceled()) {
return;
}
Vector<Gap> gaps = new Vector<Gap>();
// if necessary
for (int row = 0; row < peakList.getNumberOfRows(); row++) {
PeakListRow sourceRow = peakList.getRow(row);
PeakListRow newRow = processedPeakList.getRow(row);
Feature sourcePeak = sourceRow.getPeak(datafile1);
if (sourcePeak == null) {
// Create a new gap
double mz = sourceRow.getAverageMZ();
double rt2 = -1;
if (!masterList) {
if (processedPeakList.getRow(row).getPeak(datafile2) != null) {
rt2 = processedPeakList.getRow(row).getPeak(datafile2).getRT();
}
} else {
if (peakList.getRow(row).getPeak(datafile2) != null) {
rt2 = peakList.getRow(row).getPeak(datafile2).getRT();
}
}
if (rt2 > -1) {
double rt = info.predict(rt2);
if (rt != -1) {
Range<Double> mzRange = mzTolerance.getToleranceRange(mz);
Range<Double> rtRange = rtTolerance.getToleranceRange(rt);
Gap newGap = new Gap(newRow, datafile1, mzRange, rtRange, intTolerance);
gaps.add(newGap);
}
}
} else {
newRow.addPeak(datafile1, sourcePeak);
}
}
// Stop processing this file if there are no gaps
if (gaps.size() == 0) {
processedScans.addAndGet(datafile1.getNumOfScans());
continue;
}
// Get all scans of this data file
int[] scanNumbers = datafile1.getScanNumbers(1);
// Process each scan
for (int scanNumber : scanNumbers) {
// Canceled?
if (isCanceled()) {
return;
}
// Get the scan
Scan scan = datafile1.getScan(scanNumber);
// Feed this scan to all gaps
for (Gap gap : gaps) {
gap.offerNextScan(scan);
}
processedScans.incrementAndGet();
}
// Finalize gaps
for (Gap gap : gaps) {
gap.noMoreOffers();
}
}
}
}
Aggregations