use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class IsotopePeakScannerTask method copyPeakRow.
/**
* Create a copy of a feature list row.
*
* @param row the row to copy.
* @return the newly created copy.
*/
private static PeakListRow copyPeakRow(final PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
final Feature newPeak = new SimpleFeature(peak);
PeakUtils.copyPeakProperties(peak, newPeak);
newRow.addPeak(peak.getDataFile(), newPeak);
}
return newRow;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class IsotopePeakScannerTask method getRowFromCandidate.
/**
* Extracts a feature list row from a Candidates array.
* @param candidates
* @param bestPatternIndex The index of the isotope pattern that was found to be the best fit for the detected pattern
* @param peakIndex the index of the candidate peak, the feature list row should be extracted for.
* @param plh
* @return null if no peak with the given parameters exists, the specified feature list row otherwise.
*/
@Nullable
private PeakListRow getRowFromCandidate(@Nonnull Candidates[] candidates, int bestPatternIndex, int peakIndex, @Nonnull PeakListHandler plh) {
if (bestPatternIndex >= candidates.length)
return null;
if (peakIndex >= candidates[bestPatternIndex].size())
return null;
Candidate cand = candidates[bestPatternIndex].get(peakIndex);
if (cand != null) {
int id = cand.getCandID();
PeakListRow original = plh.getRowByID(id);
return original;
}
return null;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class IsotopePeakScannerTask method groupPeaks.
/**
* @param pL
* @param parentIndex index of possible parent peak
* @param maxMass
* @return will return ArrayList<PeakListRow> of all peaks within the range of pL[parentIndex].mz
* -> pL[parentIndex].mz+maxMass
*/
private ArrayList<PeakListRow> groupPeaks(PeakListRow[] pL, int parentIndex, double maxDiff) {
ArrayList<PeakListRow> buf = new ArrayList<PeakListRow>();
// this means the result will contain row(parentIndex) itself
buf.add(pL[parentIndex]);
double mz = pL[parentIndex].getAverageMZ();
double rt = pL[parentIndex].getAverageRT();
for (// will not add the parent peak itself
int i = parentIndex + 1; // will not add the parent peak itself
i < pL.length; // will not add the parent peak itself
i++) {
PeakListRow r = pL[i];
if (r.getAverageHeight() < minHeight)
continue;
if (!(pL[i].getAverageMZ() > mz && pL[i].getAverageMZ() <= (mz + maxDiff + mzTolerance.getMzTolerance()))) {
if (// since pL is sorted by ascending mass, we can
pL[i].getAverageMZ() > (mz + maxDiff))
// stop now
return buf;
continue;
}
if (checkRT && !rtTolerance.checkWithinTolerance(rt, r.getAverageRT()))
continue;
buf.add(pL[i]);
}
return buf;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class SQLExportTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
// Get number of rows
totalRows = peakList.getNumberOfRows();
try {
this.dbConnection = DriverManager.getConnection(connectionString);
} catch (SQLException e) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Error connecting to the SQL database: " + e.toString());
return;
}
PeakListRow[] rows = peakList.getRows();
try {
dbConnection.setAutoCommit(false);
// information will be exported
if (rows.length < 1 && emptyExport) {
exportPeakListRow(null);
} else {
for (PeakListRow row : rows) {
if (getStatus() != TaskStatus.PROCESSING)
break;
exportPeakListRow(row);
processedRows++;
}
}
dbConnection.commit();
dbConnection.close();
} catch (SQLException e) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Error running SQL query: " + e.toString());
return;
}
if (getStatus() == TaskStatus.PROCESSING)
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class IsotopeGrouperTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running isotopic peak grouper on " + peakList);
// We assume source peakList contains one datafile
RawDataFile dataFile = peakList.getRawDataFile(0);
// Create a new deisotoped peakList
deisotopedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Collect all selected charge states
int[] charges = new int[maximumCharge];
for (int i = 0; i < maximumCharge; i++) charges[i] = i + 1;
// Sort peaks by descending height
Feature[] sortedPeaks = peakList.getPeaks(dataFile);
Arrays.sort(sortedPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
// Loop through all peaks
totalPeaks = sortedPeaks.length;
for (int ind = 0; ind < totalPeaks; ind++) {
if (isCanceled())
return;
Feature aPeak = sortedPeaks[ind];
// Check if peak was already deleted
if (aPeak == null) {
processedPeaks++;
continue;
}
// Check which charge state fits best around this peak
int bestFitCharge = 0;
int bestFitScore = -1;
Vector<Feature> bestFitPeaks = null;
for (int charge : charges) {
Vector<Feature> fittedPeaks = new Vector<Feature>();
fittedPeaks.add(aPeak);
fitPattern(fittedPeaks, aPeak, charge, sortedPeaks);
int score = fittedPeaks.size();
if ((score > bestFitScore) || ((score == bestFitScore) && (bestFitCharge > charge))) {
bestFitScore = score;
bestFitCharge = charge;
bestFitPeaks = fittedPeaks;
}
}
PeakListRow oldRow = peakList.getPeakRow(aPeak);
assert bestFitPeaks != null;
// isotope, we skip this left the original peak in the feature list.
if (bestFitPeaks.size() == 1) {
deisotopedPeakList.addRow(oldRow);
processedPeaks++;
continue;
}
// Convert the peak pattern to array
Feature[] originalPeaks = bestFitPeaks.toArray(new Feature[0]);
// Create a new SimpleIsotopePattern
DataPoint[] isotopes = new DataPoint[bestFitPeaks.size()];
for (int i = 0; i < isotopes.length; i++) {
Feature p = originalPeaks[i];
isotopes[i] = new SimpleDataPoint(p.getMZ(), p.getHeight());
}
SimpleIsotopePattern newPattern = new SimpleIsotopePattern(isotopes, IsotopePatternStatus.DETECTED, aPeak.toString());
// the lowest m/z peak
if (chooseMostIntense) {
Arrays.sort(originalPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
} else {
Arrays.sort(originalPeaks, new PeakSorter(SortingProperty.MZ, SortingDirection.Ascending));
}
Feature newPeak = new SimpleFeature(originalPeaks[0]);
newPeak.setIsotopePattern(newPattern);
newPeak.setCharge(bestFitCharge);
// Keep old ID
int oldID = oldRow.getID();
SimplePeakListRow newRow = new SimplePeakListRow(oldID);
PeakUtils.copyPeakListRowProperties(oldRow, newRow);
newRow.addPeak(dataFile, newPeak);
deisotopedPeakList.addRow(newRow);
// Remove all peaks already assigned to isotope pattern
for (int i = 0; i < sortedPeaks.length; i++) {
if (bestFitPeaks.contains(sortedPeaks[i]))
sortedPeaks[i] = null;
}
// Update completion rate
processedPeaks++;
}
// Add new peakList to the project
project.addPeakList(deisotopedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : peakList.getAppliedMethods()) {
deisotopedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
deisotopedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Isotopic peaks grouper", parameters));
// Remove the original peakList if requested
if (removeOriginal)
project.removePeakList(peakList);
logger.info("Finished isotopic peak grouper on " + peakList);
setStatus(TaskStatus.FINISHED);
}
Aggregations