use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class Candidates method getAvgPeakHeights.
/**
* @param ID
* @return avg heights of all with the ids, but only if they are contained in same scans and mass
* lists
*/
private double[] getAvgPeakHeights(int[] ID) {
PeakListRow[] rows = plh.getRowsByID(ID);
RawDataFile[] raws = rows[0].getRawDataFiles();
if (raws.length < 1)
return null;
double[] mzs = new double[ID.length];
for (int i = 0; i < rows.length; i++) mzs[i] = rows[i].getAverageMZ();
double[] avgHeights = new double[ID.length];
int pointsAdded = 0;
for (RawDataFile raw : raws) {
if (!raw.getDataMZRange().contains(rows[0].getAverageMZ()))
continue;
int[] scanNums = raw.getScanNumbers();
for (int i = 0; i < scanNums.length; i++) {
Scan scan = raw.getScan(scanNums[i]);
MassList list = scan.getMassList(massListName);
if (list == null || !massListContainsEveryMZ(list, mzs, minHeight))
continue;
double[] avgBuffer = new double[mzs.length];
boolean allFound = true;
for (int j = 0; j < mzs.length; j++) {
DataPoint[] points = getMassListDataPointsByMass(list, mzTolerance.getToleranceRange(mzs[j]));
if (points.length == 0)
continue;
DataPoint dp = getClosestDataPoint(points, rows[j].getAverageMZ(), minHeight);
if (// yes the list contained something close to every datapoint that was over
dp == null) // minHeight, BUT
{
// the closest might not have been. Check is done inside getClosestDataPoint();
allFound = false;
break;
}
avgBuffer[j] = dp.getIntensity();
}
if (allFound) {
pointsAdded++;
for (int j = 0; j < mzs.length; j++) avgHeights[j] += avgBuffer[j];
}
}
}
if (pointsAdded == 0) {
logger.warning("Error: Peaks with ids: " + Arrays.toString(ID) + " were not in same scans at all. Please update the parameters.");
return null;
}
for (int i = 0; i < avgHeights.length; i++) avgHeights[i] /= (pointsAdded);
return avgHeights;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class RTCalibrationTask method normalizePeakList.
/**
* Normalize retention time of all rows in given feature list and save normalized rows into new peak
* list.
*
* @param originalPeakList Feature list to be normalized
* @param normalizedPeakList New feature list, where normalized rows are to be saved
* @param standards Standard rows in same feature list
* @param normalizedStdRTs Normalized retention times of standard rows
*/
private void normalizePeakList(PeakList originalPeakList, PeakList normalizedPeakList, PeakListRow[] standards, double[] normalizedStdRTs) {
PeakListRow[] originalRows = originalPeakList.getRows();
// Iterate feature list rows
for (PeakListRow originalRow : originalRows) {
// Cancel?
if (isCanceled()) {
return;
}
// Normalize one row
PeakListRow normalizedRow = normalizeRow(originalRow, standards, normalizedStdRTs);
// Copy comment and identification
normalizedRow.setComment(originalRow.getComment());
for (PeakIdentity ident : originalRow.getPeakIdentities()) normalizedRow.addPeakIdentity(ident, false);
normalizedRow.setPreferredPeakIdentity(originalRow.getPreferredPeakIdentity());
// Add the new row to normalized feature list
normalizedPeakList.addRow(normalizedRow);
processedRows++;
}
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class RansacAlignerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running Ransac aligner");
// twice, first for score calculation, second for actual alignment.
for (int i = 0; i < peakLists.length; i++) {
totalRows += peakLists[i].getNumberOfRows() * 2;
}
// Collect all data files
List<RawDataFile> allDataFiles = new ArrayList<RawDataFile>();
for (PeakList peakList : peakLists) {
for (RawDataFile dataFile : peakList.getRawDataFiles()) {
// Each data file can only have one column in aligned feature list
if (allDataFiles.contains(dataFile)) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Cannot run alignment, because file " + dataFile + " is present in multiple feature lists");
return;
}
allDataFiles.add(dataFile);
}
}
// Create a new aligned feature list
alignedPeakList = new SimplePeakList(peakListName, allDataFiles.toArray(new RawDataFile[0]));
// Iterate source feature lists
for (PeakList peakList : peakLists) {
HashMap<PeakListRow, PeakListRow> alignmentMapping = this.getAlignmentMap(peakList);
PeakListRow[] allRows = peakList.getRows();
// Align all rows using mapping
for (PeakListRow row : allRows) {
PeakListRow targetRow = alignmentMapping.get(row);
// If we have no mapping for this row, add a new one
if (targetRow == null) {
targetRow = new SimplePeakListRow(newRowID);
newRowID++;
alignedPeakList.addRow(targetRow);
}
// Add all peaks from the original row to the aligned row
for (RawDataFile file : row.getRawDataFiles()) {
targetRow.addPeak(file, row.getPeak(file));
}
// Add all non-existing identities from the original row to the
// aligned row
PeakUtils.copyPeakListRowProperties(row, targetRow);
processedRows++;
}
}
// Next feature list
// Add new aligned feature list to the project
project.addPeakList(alignedPeakList);
// Edit by Aleksandr Smirnov
PeakListRow row = alignedPeakList.getRow(1);
double alignedRetTime = row.getAverageRT();
for (Feature peak : row.getPeaks()) {
double retTimeDelta = alignedRetTime - peak.getRT();
RawDataFile dataFile = peak.getDataFile();
SortedMap<Double, Double> chromatogram = new TreeMap<>();
for (int scan : peak.getScanNumbers()) {
DataPoint dataPoint = peak.getDataPoint(scan);
double retTime = dataFile.getScan(scan).getRetentionTime() + retTimeDelta;
if (dataPoint != null)
chromatogram.put(retTime, dataPoint.getIntensity());
}
}
// End of Edit
// Add task description to peakList
alignedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Ransac aligner", parameters));
logger.info("Finished RANSAC aligner");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class RansacAlignerTask method getVectorAlignment.
/**
* Create the vector which contains all the possible aligned peaks.
*
* @param peakListX
* @param peakListY
* @return vector which contains all the possible aligned peaks.
*/
private List<AlignStructMol> getVectorAlignment(PeakList peakListX, PeakList peakListY) {
List<AlignStructMol> alignMol = new ArrayList<AlignStructMol>();
for (PeakListRow row : peakListX.getRows()) {
if (isCanceled()) {
return null;
}
// Calculate limits for a row with which the row can be aligned
Range<Double> mzRange = mzTolerance.getToleranceRange(row.getAverageMZ());
Range<Double> rtRange = rtToleranceBefore.getToleranceRange(row.getAverageRT());
// Get all rows of the aligned peaklist within parameter limits
PeakListRow[] candidateRows = peakListY.getRowsInsideScanAndMZRange(rtRange, mzRange);
for (PeakListRow candidateRow : candidateRows) {
alignMol.add(new AlignStructMol(row, candidateRow));
}
}
return alignMol;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class MultiRawDataLearnerTask method run.
/**
* @see Runnable#run()
*/
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running learner task on " + peakList);
// Create a new results peakList which is added at the end
resultPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
/**
* - A PeakList is a list of Features (peak in retention time dimension with accurate m/z)<br>
* ---- contains one or multiple RawDataFiles <br>
* ---- access mean retention time, mean m/z, maximum intensity, ...<br>
* - A RawDataFile holds a full chromatographic run with all ms scans<br>
* ---- Each Scan and the underlying raw data can be accessed <br>
* ---- Scans can be filtered by MS level, polarity, ...<br>
*/
// get all rows and sort by m/z
PeakListRow[] rows = peakList.getRows();
Arrays.sort(rows, new PeakListRowSorter(SortingProperty.MZ, SortingDirection.Ascending));
// number of rawFiles is 1 prior to peaklist alignment
RawDataFile[] rawFiles = peakList.getRawDataFiles();
boolean isAlignedPeakList = rawFiles.length > 1;
totalRows = rows.length;
// loop through all rows
for (PeakListRow row : rows) {
// loop through all raw data files
for (RawDataFile raw : rawFiles) {
// check for cancelled state and stop
if (isCanceled())
return;
// current peak
Feature peak = row.getPeak(raw);
// check for peak in row for specific raw file
if (peak != null) {
double mz = peak.getMZ();
double intensity = peak.getHeight();
double rt = peak.getRT();
// do stuff
// ...
}
}
// Update completion rate
processedRows++;
}
// add to project
addResultToProject();
logger.info("Finished on " + peakList);
setStatus(TaskStatus.FINISHED);
}
Aggregations