use of net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod in project mzmine2 by mzmine.
the class SmoothingTask method run.
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
try {
// Get filter weights.
final double[] filterWeights = SavitzkyGolayFilter.getNormalizedWeights(filterWidth);
// Create new feature list
newPeakList = new SimplePeakList(origPeakList + " " + suffix, origPeakList.getRawDataFiles());
// Process each row.
for (final PeakListRow row : origPeakList.getRows()) {
if (!isCanceled()) {
// Create a new peak-list row.
final int originalID = row.getID();
final PeakListRow newRow = new SimplePeakListRow(originalID);
// Process each peak.
for (final Feature peak : row.getPeaks()) {
if (!isCanceled()) {
// Copy original peak intensities.
final int[] scanNumbers = peak.getScanNumbers();
final int numScans = scanNumbers.length;
final double[] intensities = new double[numScans];
for (int i = 0; i < numScans; i++) {
final DataPoint dataPoint = peak.getDataPoint(scanNumbers[i]);
intensities[i] = dataPoint == null ? 0.0 : dataPoint.getIntensity();
}
// Smooth peak.
final double[] smoothed = convolve(intensities, filterWeights);
// Measure peak (max, ranges, area etc.)
final RawDataFile dataFile = peak.getDataFile();
final DataPoint[] newDataPoints = new DataPoint[numScans];
double maxIntensity = 0.0;
int maxScanNumber = -1;
DataPoint maxDataPoint = null;
Range<Double> intensityRange = null;
double area = 0.0;
for (int i = 0; i < numScans; i++) {
final int scanNumber = scanNumbers[i];
final DataPoint dataPoint = peak.getDataPoint(scanNumber);
final double intensity = smoothed[i];
if (dataPoint != null && intensity > 0.0) {
// Create a new data point.
final double mz = dataPoint.getMZ();
final double rt = dataFile.getScan(scanNumber).getRetentionTime();
final DataPoint newDataPoint = new SimpleDataPoint(mz, intensity);
newDataPoints[i] = newDataPoint;
// Track maximum intensity data point.
if (intensity > maxIntensity) {
maxIntensity = intensity;
maxScanNumber = scanNumber;
maxDataPoint = newDataPoint;
}
// Update ranges.
if (intensityRange == null) {
intensityRange = Range.singleton(intensity);
} else {
intensityRange = intensityRange.span(Range.singleton(intensity));
}
// Accumulate peak area.
if (i != 0) {
final DataPoint lastDP = newDataPoints[i - 1];
final double lastIntensity = lastDP == null ? 0.0 : lastDP.getIntensity();
final double lastRT = dataFile.getScan(scanNumbers[i - 1]).getRetentionTime();
area += (rt - lastRT) * 60d * (intensity + lastIntensity) / 2.0;
}
}
}
assert maxDataPoint != null;
if (!isCanceled() && maxScanNumber >= 0) {
// Create a new peak.
newRow.addPeak(dataFile, new SimpleFeature(dataFile, maxDataPoint.getMZ(), peak.getRT(), maxIntensity, area, scanNumbers, newDataPoints, peak.getFeatureStatus(), maxScanNumber, peak.getMostIntenseFragmentScanNumber(), peak.getAllMS2FragmentScanNumbers(), peak.getRawDataPointsRTRange(), peak.getRawDataPointsMZRange(), intensityRange));
}
}
}
newPeakList.addRow(newRow);
progress++;
}
}
// Finish up.
if (!isCanceled()) {
// Add new peak-list to the project.
project.addPeakList(newPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(newPeakList);
// Remove the original peak-list if requested.
if (removeOriginal) {
project.removePeakList(origPeakList);
}
// Copy previously applied methods
for (final PeakListAppliedMethod method : origPeakList.getAppliedMethods()) {
newPeakList.addDescriptionOfAppliedTask(method);
}
// Add task description to peak-list.
newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peaks smoothed by Savitzky-Golay filter", parameters));
LOG.finest("Finished peak smoothing: " + progress + " rows processed");
setStatus(TaskStatus.FINISHED);
}
} catch (Throwable t) {
LOG.log(Level.SEVERE, "Smoothing error", t);
setErrorMessage(t.getMessage());
setStatus(TaskStatus.ERROR);
}
}
use of net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod in project mzmine2 by mzmine.
the class DeconvolutionTask method resolvePeaks.
/**
* Deconvolve a chromatogram into separate peaks.
*
* @param peakList holds the chromatogram to deconvolve.
* @param mzCenterFunction2
* @return a new feature list holding the resolved peaks.
* @throws RSessionWrapperException
*/
private PeakList resolvePeaks(final PeakList peakList, RSessionWrapper rSession) throws RSessionWrapperException {
// Get data file information.
final RawDataFile dataFile = peakList.getRawDataFile(0);
// Peak resolver.
final MZmineProcessingStep<PeakResolver> resolver = parameters.getParameter(PEAK_RESOLVER).getValue();
// set msms pairing range
this.setMSMSRange = parameters.getParameter(mzRangeMSMS).getValue();
if (setMSMSRange)
this.msmsRange = parameters.getParameter(mzRangeMSMS).getEmbeddedParameter().getValue();
else
this.msmsRange = 0;
this.setMSMSRT = parameters.getParameter(RetentionTimeMSMS).getValue();
if (setMSMSRT)
this.RTRangeMSMS = parameters.getParameter(RetentionTimeMSMS).getEmbeddedParameter().getValue();
else
this.RTRangeMSMS = 0;
// Create new feature list.
final PeakList resolvedPeaks = new SimplePeakList(peakList + " " + parameters.getParameter(SUFFIX).getValue(), dataFile);
// Load previous applied methods.
for (final PeakListAppliedMethod method : peakList.getAppliedMethods()) {
resolvedPeaks.addDescriptionOfAppliedTask(method);
}
// Add task description to feature list.
resolvedPeaks.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peak deconvolution by " + resolver, resolver.getParameterSet()));
// Initialise counters.
processedRows = 0;
totalRows = peakList.getNumberOfRows();
int peakId = 1;
// Process each chromatogram.
final PeakListRow[] peakListRows = peakList.getRows();
final int chromatogramCount = peakListRows.length;
for (int index = 0; !isCanceled() && index < chromatogramCount; index++) {
final PeakListRow currentRow = peakListRows[index];
final Feature chromatogram = currentRow.getPeak(dataFile);
// Resolve peaks.
final PeakResolver resolverModule = resolver.getModule();
final ParameterSet resolverParams = resolver.getParameterSet();
final ResolvedPeak[] peaks = resolverModule.resolvePeaks(chromatogram, resolverParams, rSession, mzCenterFunction, msmsRange, RTRangeMSMS);
// Add peaks to the new feature list.
for (final ResolvedPeak peak : peaks) {
peak.setParentChromatogramRowID(currentRow.getID());
final PeakListRow newRow = new SimplePeakListRow(peakId++);
newRow.addPeak(dataFile, peak);
newRow.setPeakInformation(peak.getPeakInformation());
resolvedPeaks.addRow(newRow);
}
processedRows++;
}
return resolvedPeaks;
}
use of net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod in project mzmine2 by mzmine.
the class LinearNormalizerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running linear normalizer");
// This hashtable maps rows from original alignment result to rows of
// the normalized alignment
Hashtable<PeakListRow, SimplePeakListRow> rowMap = new Hashtable<PeakListRow, SimplePeakListRow>();
// Create new feature list
normalizedPeakList = new SimplePeakList(originalPeakList + " " + suffix, originalPeakList.getRawDataFiles());
// Loop through all raw data files, and find the peak with biggest
// height
double maxOriginalHeight = 0.0;
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
Feature p = originalpeakListRow.getPeak(file);
if (p != null) {
if (maxOriginalHeight <= p.getHeight())
maxOriginalHeight = p.getHeight();
}
}
}
// Loop through all raw data files, and normalize peak values
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
// Cancel?
if (isCanceled()) {
return;
}
// Determine normalization type and calculate normalization factor
double normalizationFactor = 1.0;
// - normalization by average peak intensity
if (normalizationType == NormalizationType.AverageIntensity) {
double intensitySum = 0;
int intensityCount = 0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
intensitySum += p.getHeight();
} else {
intensitySum += p.getArea();
}
intensityCount++;
}
}
normalizationFactor = intensitySum / (double) intensityCount;
}
// - normalization by average squared peak intensity
if (normalizationType == NormalizationType.AverageSquaredIntensity) {
double intensitySum = 0.0;
int intensityCount = 0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
intensitySum += (p.getHeight() * p.getHeight());
} else {
intensitySum += (p.getArea() * p.getArea());
}
intensityCount++;
}
}
normalizationFactor = intensitySum / (double) intensityCount;
}
// - normalization by maximum peak intensity
if (normalizationType == NormalizationType.MaximumPeakHeight) {
double maximumIntensity = 0.0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
if (maximumIntensity < p.getHeight())
maximumIntensity = p.getHeight();
} else {
if (maximumIntensity < p.getArea())
maximumIntensity = p.getArea();
}
}
}
normalizationFactor = maximumIntensity;
}
// - normalization by total raw signal
if (normalizationType == NormalizationType.TotalRawSignal) {
normalizationFactor = 0;
for (int scanNumber : file.getScanNumbers(1)) {
Scan scan = file.getScan(scanNumber);
normalizationFactor += scan.getTIC();
}
}
// Readjust normalization factor so that maximum height will be
// equal to maximumOverallPeakHeightAfterNormalization after
// normalization
double maxNormalizedHeight = maxOriginalHeight / normalizationFactor;
normalizationFactor = normalizationFactor * maxNormalizedHeight / maximumOverallPeakHeightAfterNormalization;
// Normalize all peak intenisities using the normalization factor
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
// Cancel?
if (isCanceled()) {
return;
}
Feature originalPeak = originalpeakListRow.getPeak(file);
if (originalPeak != null) {
SimpleFeature normalizedPeak = new SimpleFeature(originalPeak);
PeakUtils.copyPeakProperties(originalPeak, normalizedPeak);
double normalizedHeight = originalPeak.getHeight() / normalizationFactor;
double normalizedArea = originalPeak.getArea() / normalizationFactor;
normalizedPeak.setHeight(normalizedHeight);
normalizedPeak.setArea(normalizedArea);
SimplePeakListRow normalizedRow = rowMap.get(originalpeakListRow);
if (normalizedRow == null) {
normalizedRow = new SimplePeakListRow(originalpeakListRow.getID());
PeakUtils.copyPeakListRowProperties(originalpeakListRow, normalizedRow);
rowMap.put(originalpeakListRow, normalizedRow);
}
normalizedRow.addPeak(file, normalizedPeak);
}
}
// Progress
processedDataFiles++;
}
// Finally add all normalized rows to normalized alignment result
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
SimplePeakListRow normalizedRow = rowMap.get(originalpeakListRow);
if (normalizedRow == null)
continue;
normalizedPeakList.addRow(normalizedRow);
}
// Add new peaklist to the project
project.addPeakList(normalizedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
normalizedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Linear normalization of by " + normalizationType, parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
logger.info("Finished linear normalizer");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod in project mzmine2 by mzmine.
the class RTCalibrationTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running retention time normalizer");
// First we need to find standards by iterating through first feature list
totalRows = originalPeakLists[0].getNumberOfRows();
// Create new feature lists
normalizedPeakLists = new SimplePeakList[originalPeakLists.length];
for (int i = 0; i < originalPeakLists.length; i++) {
normalizedPeakLists[i] = new SimplePeakList(originalPeakLists[i] + " " + suffix, originalPeakLists[i].getRawDataFiles());
// Remember how many rows we need to normalize
totalRows += originalPeakLists[i].getNumberOfRows();
}
// goodStandards Vector contains identified standard rows, represented
// by arrays. Each array has same length as originalPeakLists array.
// Array items represent particular standard peak in each PeakList
Vector<PeakListRow[]> goodStandards = new Vector<PeakListRow[]>();
// Iterate the first peaklist
standardIteration: for (PeakListRow candidate : originalPeakLists[0].getRows()) {
// Cancel?
if (isCanceled()) {
return;
}
processedRows++;
// Check that all peaks of this row have proper height
for (Feature p : candidate.getPeaks()) {
if (p.getHeight() < minHeight)
continue standardIteration;
}
PeakListRow[] goodStandardCandidate = new PeakListRow[originalPeakLists.length];
goodStandardCandidate[0] = candidate;
double candidateMZ = candidate.getAverageMZ();
double candidateRT = candidate.getAverageRT();
// Find matching rows in remaining peaklists
for (int i = 1; i < originalPeakLists.length; i++) {
Range<Double> rtRange = rtTolerance.getToleranceRange(candidateRT);
Range<Double> mzRange = mzTolerance.getToleranceRange(candidateMZ);
PeakListRow[] matchingRows = originalPeakLists[i].getRowsInsideScanAndMZRange(rtRange, mzRange);
// standard candidate
if (matchingRows.length != 1)
continue standardIteration;
// Check that all peaks of this row have proper height
for (Feature p : matchingRows[0].getPeaks()) {
if (p.getHeight() < minHeight)
continue standardIteration;
}
// Save reference to matching peak in this feature list
goodStandardCandidate[i] = matchingRows[0];
}
// If we found a match of same peak in all peaklists, mark it as a
// good standard
goodStandards.add(goodStandardCandidate);
logger.finest("Found a good standard for RT normalization: " + candidate);
}
// Check if we have any standards
if (goodStandards.size() == 0) {
setStatus(TaskStatus.ERROR);
setErrorMessage("No good standard peak was found");
return;
}
// Calculate average retention times of all standards
double[] averagedRTs = new double[goodStandards.size()];
for (int i = 0; i < goodStandards.size(); i++) {
double rtAverage = 0;
for (PeakListRow row : goodStandards.get(i)) rtAverage += row.getAverageRT();
rtAverage /= (double) originalPeakLists.length;
averagedRTs[i] = rtAverage;
}
// Normalize each feature list
for (int peakListIndex = 0; peakListIndex < originalPeakLists.length; peakListIndex++) {
// Get standard rows for this feature list only
PeakListRow[] standards = new PeakListRow[goodStandards.size()];
for (int i = 0; i < goodStandards.size(); i++) {
standards[i] = goodStandards.get(i)[peakListIndex];
}
normalizePeakList(originalPeakLists[peakListIndex], normalizedPeakLists[peakListIndex], standards, averagedRTs);
}
// Cancel?
if (isCanceled()) {
return;
}
for (int i = 0; i < originalPeakLists.length; i++) {
project.addPeakList(normalizedPeakLists[i]);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakLists[i].getAppliedMethods()) {
normalizedPeakLists[i].addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakLists[i].addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Retention time normalization", parameters));
// Remove the original peaklists if requested
if (removeOriginal)
project.removePeakList(originalPeakLists[i]);
}
logger.info("Finished retention time normalizer");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod in project mzmine2 by mzmine.
the class StandardCompoundNormalizerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.finest("Starting standard compound normalization of " + originalPeakList + " using " + normalizationType + " (total " + standardRows.length + " standard peaks)");
// Check if we have standards
if (standardRows.length == 0) {
setErrorMessage("No internal standard peaks selected");
setStatus(TaskStatus.ERROR);
return;
}
// Initialize new alignment result for the normalized result
normalizedPeakList = new SimplePeakList(originalPeakList + " " + suffix, originalPeakList.getRawDataFiles());
// Copy raw data files from original alignment result to new alignment
// result
totalRows = originalPeakList.getNumberOfRows();
// Loop through all rows
rowIteration: for (PeakListRow row : originalPeakList.getRows()) {
// Cancel ?
if (isCanceled()) {
return;
}
// Do not add the standard rows to the new peaklist
for (int i = 0; i < standardRows.length; i++) {
if (row == standardRows[i]) {
processedRows++;
continue rowIteration;
}
}
// Copy comment and identification
SimplePeakListRow normalizedRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, normalizedRow);
// Get m/z and RT of the current row
double mz = row.getAverageMZ();
double rt = row.getAverageRT();
// Loop through all raw data files
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
double[] normalizationFactors = null;
double[] normalizationFactorWeights = null;
if (normalizationType == StandardUsageType.Nearest) {
// Search for nearest standard
PeakListRow nearestStandardRow = null;
double nearestStandardRowDistance = Double.MAX_VALUE;
for (int standardRowIndex = 0; standardRowIndex < standardRows.length; standardRowIndex++) {
PeakListRow standardRow = standardRows[standardRowIndex];
double stdMZ = standardRow.getAverageMZ();
double stdRT = standardRow.getAverageRT();
double distance = MZvsRTBalance * Math.abs(mz - stdMZ) + Math.abs(rt - stdRT);
if (distance <= nearestStandardRowDistance) {
nearestStandardRow = standardRow;
nearestStandardRowDistance = distance;
}
}
assert nearestStandardRow != null;
// Calc and store a single normalization factor
normalizationFactors = new double[1];
normalizationFactorWeights = new double[1];
Feature standardPeak = nearestStandardRow.getPeak(file);
if (standardPeak == null) {
// What to do if standard peak is not available?
normalizationFactors[0] = 1.0;
} else {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
normalizationFactors[0] = standardPeak.getHeight();
} else {
normalizationFactors[0] = standardPeak.getArea();
}
}
logger.finest("Normalizing row #" + row.getID() + " using standard peak " + standardPeak + ", factor " + normalizationFactors[0]);
normalizationFactorWeights[0] = 1.0f;
}
if (normalizationType == StandardUsageType.Weighted) {
// Add all standards as factors, and use distance as weight
normalizationFactors = new double[standardRows.length];
normalizationFactorWeights = new double[standardRows.length];
for (int standardRowIndex = 0; standardRowIndex < standardRows.length; standardRowIndex++) {
PeakListRow standardRow = standardRows[standardRowIndex];
double stdMZ = standardRow.getAverageMZ();
double stdRT = standardRow.getAverageRT();
double distance = MZvsRTBalance * Math.abs(mz - stdMZ) + Math.abs(rt - stdRT);
Feature standardPeak = standardRow.getPeak(file);
if (standardPeak == null) {
// What to do if standard peak is not available?
normalizationFactors[standardRowIndex] = 1.0;
normalizationFactorWeights[standardRowIndex] = 0.0;
} else {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
normalizationFactors[standardRowIndex] = standardPeak.getHeight();
} else {
normalizationFactors[standardRowIndex] = standardPeak.getArea();
}
normalizationFactorWeights[standardRowIndex] = 1 / distance;
}
}
}
assert normalizationFactors != null;
assert normalizationFactorWeights != null;
// Calculate a single normalization factor as weighted average
// of all factors
double weightedSum = 0.0f;
double sumOfWeights = 0.0f;
for (int factorIndex = 0; factorIndex < normalizationFactors.length; factorIndex++) {
weightedSum += normalizationFactors[factorIndex] * normalizationFactorWeights[factorIndex];
sumOfWeights += normalizationFactorWeights[factorIndex];
}
double normalizationFactor = weightedSum / sumOfWeights;
// For simple scaling of the normalized values
normalizationFactor = normalizationFactor / 100.0f;
logger.finest("Normalizing row #" + row.getID() + "[" + file + "] using factor " + normalizationFactor);
// How to handle zero normalization factor?
if (normalizationFactor == 0.0)
normalizationFactor = Double.MIN_VALUE;
// Normalize peak
Feature originalPeak = row.getPeak(file);
if (originalPeak != null) {
SimpleFeature normalizedPeak = new SimpleFeature(originalPeak);
PeakUtils.copyPeakProperties(originalPeak, normalizedPeak);
double normalizedHeight = originalPeak.getHeight() / normalizationFactor;
double normalizedArea = originalPeak.getArea() / normalizationFactor;
normalizedPeak.setHeight(normalizedHeight);
normalizedPeak.setArea(normalizedArea);
normalizedRow.addPeak(file, normalizedPeak);
}
}
normalizedPeakList.addRow(normalizedRow);
processedRows++;
}
// Add new peaklist to the project
project.addPeakList(normalizedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
normalizedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Standard compound normalization", parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
logger.info("Finished standard compound normalizer");
setStatus(TaskStatus.FINISHED);
}
Aggregations