use of net.sf.mzmine.datamodel.impl.SimplePeakList in project mzmine2 by mzmine.
the class TwoDBottomPanel method getTopThresholdPeakList.
/**
* Returns a feature list with the top peaks defined by the parameter "threshold"
*/
PeakList getTopThresholdPeakList(int threshold) {
PeakList selectedPeakList = (PeakList) peakListSelector.getSelectedItem();
if (selectedPeakList == null)
return null;
SimplePeakList newList = new SimplePeakList(selectedPeakList.getName(), selectedPeakList.getRawDataFiles());
Vector<PeakListRow> peakRows = new Vector<PeakListRow>();
Range<Double> mzRange = selectedPeakList.getRowsMZRange();
Range<Double> rtRange = selectedPeakList.getRowsRTRange();
PeakThresholdMode selectedPeakOption = (PeakThresholdMode) thresholdCombo.getSelectedItem();
if (selectedPeakOption == PeakThresholdMode.TOP_PEAKS_AREA) {
mzRange = masterFrame.getPlot().getXYPlot().getAxisRange();
rtRange = masterFrame.getPlot().getXYPlot().getDomainRange();
}
for (PeakListRow peakRow : selectedPeakList.getRows()) {
if (mzRange.contains(peakRow.getAverageMZ()) && rtRange.contains(peakRow.getAverageRT())) {
peakRows.add(peakRow);
}
}
Collections.sort(peakRows, new PeakListRowSorter(SortingProperty.Intensity, SortingDirection.Descending));
if (threshold > peakRows.size())
threshold = peakRows.size();
for (int i = 0; i < threshold; i++) {
newList.addRow(peakRows.elementAt(i));
}
return newList;
}
use of net.sf.mzmine.datamodel.impl.SimplePeakList in project mzmine2 by mzmine.
the class CameraSearchTask method groupPeaksByPCGroup.
/**
* Uses PCGroup-field in PeakIdentity to group peaks and build spectrum
*
* @param peakList a PeakList object
* @return a new PeakList object
*/
private PeakList groupPeaksByPCGroup(PeakList peakList) {
// Create new feature list.
final PeakList combinedPeakList = new SimplePeakList(peakList + " " + parameters.getParameter(CameraSearchParameters.SUFFIX).getValue(), peakList.getRawDataFiles());
// Load previous applied methods.
for (final PeakList.PeakListAppliedMethod method : peakList.getAppliedMethods()) {
combinedPeakList.addDescriptionOfAppliedTask(method);
}
// Add task description to feature list.
combinedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Bioconductor CAMERA", parameters));
// --------------------
// Find unique PCGroups
// --------------------
Set<String> pcGroups = new HashSet<>();
for (PeakListRow row : peakList.getRows()) {
PeakIdentity identity = row.getPreferredPeakIdentity();
if (identity == null)
continue;
String groupName = identity.getName();
if (groupName == null || groupName.length() == 0)
continue;
pcGroups.add(groupName);
}
List<PeakListRow> groupRows = new ArrayList<>();
// Set <String> groupNames = new HashSet <> ();
Map<Double, Double> spectrum = new HashMap<>();
List<PeakListRow> newPeakListRows = new ArrayList<>();
for (String groupName : pcGroups) {
// -----------------------------------------
// Find all peaks belonging to isotopeGroups
// -----------------------------------------
groupRows.clear();
// groupNames.clear();
spectrum.clear();
double maxIntensity = 0.0;
PeakListRow groupRow = null;
for (PeakListRow row : peakList.getRows()) {
PeakIdentity identity = row.getPreferredPeakIdentity();
if (identity == null)
continue;
String name = identity.getName();
if (name.equals(groupName)) {
double intensity = row.getAverageHeight();
groupRows.add(row);
// groupNames.add(name);
spectrum.put(row.getAverageMZ(), intensity);
if (intensity > maxIntensity) {
maxIntensity = intensity;
groupRow = row;
}
}
}
if (groupRow == null || spectrum.size() <= 1)
continue;
PeakIdentity identity = groupRow.getPreferredPeakIdentity();
if (identity == null)
continue;
DataPoint[] dataPoints = new DataPoint[spectrum.size()];
int count = 0;
for (Entry<Double, Double> e : spectrum.entrySet()) dataPoints[count++] = new SimpleDataPoint(e.getKey(), e.getValue());
IsotopePattern pattern = new SimpleIsotopePattern(dataPoints, IsotopePatternStatus.PREDICTED, "Spectrum");
groupRow.getBestPeak().setIsotopePattern(pattern);
// combinedPeakList.addRow(groupRow);
newPeakListRows.add(groupRow);
}
// ------------------------------------
// Sort new peak rows by retention time
// ------------------------------------
Collections.sort(newPeakListRows, new Comparator<PeakListRow>() {
@Override
public int compare(PeakListRow row1, PeakListRow row2) {
double retTime1 = row1.getAverageRT();
double retTime2 = row2.getAverageRT();
return Double.compare(retTime1, retTime2);
}
});
for (PeakListRow row : newPeakListRows) combinedPeakList.addRow(row);
return combinedPeakList;
}
use of net.sf.mzmine.datamodel.impl.SimplePeakList in project mzmine2 by mzmine.
the class ComplexSearchTask method run.
/**
* @see java.lang.Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Starting complex search in " + peakList);
PeakListRow[] rows = peakList.getRows();
totalRows = rows.length;
// Sort the array by m/z so we start with biggest peak (possible
// complex)
Arrays.sort(rows, new PeakListRowSorter(SortingProperty.MZ, SortingDirection.Descending));
// Compare each three rows against each other
for (int i = 0; i < totalRows; i++) {
Range<Double> testRTRange = rtTolerance.getToleranceRange(rows[i].getAverageRT());
PeakListRow[] testRows = peakList.getRowsInsideScanRange(testRTRange);
for (int j = 0; j < testRows.length; j++) {
for (int k = j; k < testRows.length; k++) {
// Task canceled?
if (isCanceled())
return;
// very small m/z peak
if ((rows[i] == testRows[j]) || (rows[i] == testRows[k]))
continue;
if (checkComplex(rows[i], testRows[j], testRows[k]))
addComplexInfo(rows[i], testRows[j], testRows[k]);
}
}
finishedRows++;
}
// Add task description to peakList
((SimplePeakList) peakList).addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Identification of complexes", parameters));
// Repaint the window to reflect the change in the feature list
Desktop desktop = MZmineCore.getDesktop();
if (!(desktop instanceof HeadLessDesktop))
desktop.getMainWindow().repaint();
setStatus(TaskStatus.FINISHED);
logger.info("Finished complexes search in " + peakList);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakList in project mzmine2 by mzmine.
the class LinearNormalizerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running linear normalizer");
// This hashtable maps rows from original alignment result to rows of
// the normalized alignment
Hashtable<PeakListRow, SimplePeakListRow> rowMap = new Hashtable<PeakListRow, SimplePeakListRow>();
// Create new feature list
normalizedPeakList = new SimplePeakList(originalPeakList + " " + suffix, originalPeakList.getRawDataFiles());
// Loop through all raw data files, and find the peak with biggest
// height
double maxOriginalHeight = 0.0;
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
Feature p = originalpeakListRow.getPeak(file);
if (p != null) {
if (maxOriginalHeight <= p.getHeight())
maxOriginalHeight = p.getHeight();
}
}
}
// Loop through all raw data files, and normalize peak values
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
// Cancel?
if (isCanceled()) {
return;
}
// Determine normalization type and calculate normalization factor
double normalizationFactor = 1.0;
// - normalization by average peak intensity
if (normalizationType == NormalizationType.AverageIntensity) {
double intensitySum = 0;
int intensityCount = 0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
intensitySum += p.getHeight();
} else {
intensitySum += p.getArea();
}
intensityCount++;
}
}
normalizationFactor = intensitySum / (double) intensityCount;
}
// - normalization by average squared peak intensity
if (normalizationType == NormalizationType.AverageSquaredIntensity) {
double intensitySum = 0.0;
int intensityCount = 0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
intensitySum += (p.getHeight() * p.getHeight());
} else {
intensitySum += (p.getArea() * p.getArea());
}
intensityCount++;
}
}
normalizationFactor = intensitySum / (double) intensityCount;
}
// - normalization by maximum peak intensity
if (normalizationType == NormalizationType.MaximumPeakHeight) {
double maximumIntensity = 0.0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
if (maximumIntensity < p.getHeight())
maximumIntensity = p.getHeight();
} else {
if (maximumIntensity < p.getArea())
maximumIntensity = p.getArea();
}
}
}
normalizationFactor = maximumIntensity;
}
// - normalization by total raw signal
if (normalizationType == NormalizationType.TotalRawSignal) {
normalizationFactor = 0;
for (int scanNumber : file.getScanNumbers(1)) {
Scan scan = file.getScan(scanNumber);
normalizationFactor += scan.getTIC();
}
}
// Readjust normalization factor so that maximum height will be
// equal to maximumOverallPeakHeightAfterNormalization after
// normalization
double maxNormalizedHeight = maxOriginalHeight / normalizationFactor;
normalizationFactor = normalizationFactor * maxNormalizedHeight / maximumOverallPeakHeightAfterNormalization;
// Normalize all peak intenisities using the normalization factor
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
// Cancel?
if (isCanceled()) {
return;
}
Feature originalPeak = originalpeakListRow.getPeak(file);
if (originalPeak != null) {
SimpleFeature normalizedPeak = new SimpleFeature(originalPeak);
PeakUtils.copyPeakProperties(originalPeak, normalizedPeak);
double normalizedHeight = originalPeak.getHeight() / normalizationFactor;
double normalizedArea = originalPeak.getArea() / normalizationFactor;
normalizedPeak.setHeight(normalizedHeight);
normalizedPeak.setArea(normalizedArea);
SimplePeakListRow normalizedRow = rowMap.get(originalpeakListRow);
if (normalizedRow == null) {
normalizedRow = new SimplePeakListRow(originalpeakListRow.getID());
PeakUtils.copyPeakListRowProperties(originalpeakListRow, normalizedRow);
rowMap.put(originalpeakListRow, normalizedRow);
}
normalizedRow.addPeak(file, normalizedPeak);
}
}
// Progress
processedDataFiles++;
}
// Finally add all normalized rows to normalized alignment result
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
SimplePeakListRow normalizedRow = rowMap.get(originalpeakListRow);
if (normalizedRow == null)
continue;
normalizedPeakList.addRow(normalizedRow);
}
// Add new peaklist to the project
project.addPeakList(normalizedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
normalizedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Linear normalization of by " + normalizationType, parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
logger.info("Finished linear normalizer");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakList in project mzmine2 by mzmine.
the class RTCalibrationTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running retention time normalizer");
// First we need to find standards by iterating through first feature list
totalRows = originalPeakLists[0].getNumberOfRows();
// Create new feature lists
normalizedPeakLists = new SimplePeakList[originalPeakLists.length];
for (int i = 0; i < originalPeakLists.length; i++) {
normalizedPeakLists[i] = new SimplePeakList(originalPeakLists[i] + " " + suffix, originalPeakLists[i].getRawDataFiles());
// Remember how many rows we need to normalize
totalRows += originalPeakLists[i].getNumberOfRows();
}
// goodStandards Vector contains identified standard rows, represented
// by arrays. Each array has same length as originalPeakLists array.
// Array items represent particular standard peak in each PeakList
Vector<PeakListRow[]> goodStandards = new Vector<PeakListRow[]>();
// Iterate the first peaklist
standardIteration: for (PeakListRow candidate : originalPeakLists[0].getRows()) {
// Cancel?
if (isCanceled()) {
return;
}
processedRows++;
// Check that all peaks of this row have proper height
for (Feature p : candidate.getPeaks()) {
if (p.getHeight() < minHeight)
continue standardIteration;
}
PeakListRow[] goodStandardCandidate = new PeakListRow[originalPeakLists.length];
goodStandardCandidate[0] = candidate;
double candidateMZ = candidate.getAverageMZ();
double candidateRT = candidate.getAverageRT();
// Find matching rows in remaining peaklists
for (int i = 1; i < originalPeakLists.length; i++) {
Range<Double> rtRange = rtTolerance.getToleranceRange(candidateRT);
Range<Double> mzRange = mzTolerance.getToleranceRange(candidateMZ);
PeakListRow[] matchingRows = originalPeakLists[i].getRowsInsideScanAndMZRange(rtRange, mzRange);
// standard candidate
if (matchingRows.length != 1)
continue standardIteration;
// Check that all peaks of this row have proper height
for (Feature p : matchingRows[0].getPeaks()) {
if (p.getHeight() < minHeight)
continue standardIteration;
}
// Save reference to matching peak in this feature list
goodStandardCandidate[i] = matchingRows[0];
}
// If we found a match of same peak in all peaklists, mark it as a
// good standard
goodStandards.add(goodStandardCandidate);
logger.finest("Found a good standard for RT normalization: " + candidate);
}
// Check if we have any standards
if (goodStandards.size() == 0) {
setStatus(TaskStatus.ERROR);
setErrorMessage("No good standard peak was found");
return;
}
// Calculate average retention times of all standards
double[] averagedRTs = new double[goodStandards.size()];
for (int i = 0; i < goodStandards.size(); i++) {
double rtAverage = 0;
for (PeakListRow row : goodStandards.get(i)) rtAverage += row.getAverageRT();
rtAverage /= (double) originalPeakLists.length;
averagedRTs[i] = rtAverage;
}
// Normalize each feature list
for (int peakListIndex = 0; peakListIndex < originalPeakLists.length; peakListIndex++) {
// Get standard rows for this feature list only
PeakListRow[] standards = new PeakListRow[goodStandards.size()];
for (int i = 0; i < goodStandards.size(); i++) {
standards[i] = goodStandards.get(i)[peakListIndex];
}
normalizePeakList(originalPeakLists[peakListIndex], normalizedPeakLists[peakListIndex], standards, averagedRTs);
}
// Cancel?
if (isCanceled()) {
return;
}
for (int i = 0; i < originalPeakLists.length; i++) {
project.addPeakList(normalizedPeakLists[i]);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakLists[i].getAppliedMethods()) {
normalizedPeakLists[i].addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakLists[i].addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Retention time normalization", parameters));
// Remove the original peaklists if requested
if (removeOriginal)
project.removePeakList(originalPeakLists[i]);
}
logger.info("Finished retention time normalizer");
setStatus(TaskStatus.FINISHED);
}
Aggregations