Search in sources :

Example 16 with SimplePeakListAppliedMethod

use of net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod in project mzmine2 by mzmine.

the class PeakComparisonRowFilterTask method filterPeakListRows.

/**
 * Filter the feature list rows by comparing peaks within a row.
 *
 * @param peakList feature list to filter.
 * @return a new feature list with rows of the original feature list that pass the filtering.
 */
private PeakList filterPeakListRows(final PeakList peakList) {
    // Create new feature list.
    final PeakList newPeakList = new SimplePeakList(peakList.getName() + ' ' + parameters.getParameter(PeakComparisonRowFilterParameters.SUFFIX).getValue(), peakList.getRawDataFiles());
    // Copy previous applied methods.
    for (final PeakListAppliedMethod method : peakList.getAppliedMethods()) {
        newPeakList.addDescriptionOfAppliedTask(method);
    }
    // Add task description to peakList.
    newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod(getTaskDescription(), parameters));
    // Get parameters.
    final boolean evalutateFoldChange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getValue();
    final boolean evalutatePPMdiff = parameters.getParameter(PeakComparisonRowFilterParameters.MZ_PPM_DIFF).getValue();
    final boolean evalutateRTdiff = parameters.getParameter(PeakComparisonRowFilterParameters.RT_DIFF).getValue();
    final int columnIndex1 = parameters.getParameter(PeakComparisonRowFilterParameters.COLUMN_INDEX_1).getValue();
    final int columnIndex2 = parameters.getParameter(PeakComparisonRowFilterParameters.COLUMN_INDEX_2).getValue();
    final Range<Double> foldChangeRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getEmbeddedParameter().getValue();
    final Range<Double> ppmDiffRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getEmbeddedParameter().getValue();
    final Range<Double> rtDiffRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getEmbeddedParameter().getValue();
    // Setup variables
    final PeakListRow[] rows = peakList.getRows();
    RawDataFile rawDataFile1;
    RawDataFile rawDataFile2;
    Feature peak1;
    Feature peak2;
    totalRows = rows.length;
    final RawDataFile[] rawDataFiles = peakList.getRawDataFiles();
    boolean allCriteriaMatched = true;
    // doesn't exist.
    if (columnIndex1 > rawDataFiles.length) {
        setErrorMessage("Column 1 set too large.");
        setStatus(TaskStatus.ERROR);
        return null;
    }
    if (columnIndex2 > rawDataFiles.length) {
        setErrorMessage("Column 2 set too large.");
        setStatus(TaskStatus.ERROR);
        return null;
    }
    // Loop over the rows & filter
    for (processedRows = 0; !isCanceled() && processedRows < totalRows; processedRows++) {
        if (isCanceled())
            return null;
        allCriteriaMatched = true;
        // Default value in case of null peak
        double peak1Area = 1.0;
        double peak2Area = 1.0;
        double peak1MZ = -1.0;
        double peak2MZ = -1.0;
        double peak1RT = -1.0;
        double peak2RT = -1.0;
        double foldChange = 0.0;
        double ppmDiff = 0.0;
        double rtDiff = 0.0;
        final PeakListRow row = rows[processedRows];
        rawDataFile1 = rawDataFiles[columnIndex1];
        rawDataFile2 = rawDataFiles[columnIndex2];
        peak1 = row.getPeak(rawDataFile1);
        peak2 = row.getPeak(rawDataFile2);
        if (peak1 != null) {
            peak1Area = peak1.getArea();
            peak1MZ = peak1.getMZ();
            peak1RT = peak1.getRT();
        }
        if (peak2 != null) {
            peak2Area = peak2.getArea();
            peak2MZ = peak2.getMZ();
            peak2RT = peak2.getRT();
        }
        // Fold change criteria checking.
        if (evalutateFoldChange) {
            foldChange = Math.log(peak1Area / peak2Area) / Math.log(2);
            if (!foldChangeRange.contains(foldChange))
                allCriteriaMatched = false;
            // PPM difference evaluation
            if (evalutatePPMdiff) {
                ppmDiff = (peak1MZ - peak2MZ) / peak1MZ * 1E6;
                if (!ppmDiffRange.contains(ppmDiff))
                    allCriteriaMatched = false;
            }
            // RT difference evaluation
            if (evalutateRTdiff) {
                rtDiff = peak1RT - peak2RT;
                if (!rtDiffRange.contains(rtDiff))
                    allCriteriaMatched = false;
            }
        }
        // Good row?
        if (allCriteriaMatched)
            newPeakList.addRow(copyPeakRow(row));
    }
    return newPeakList;
}
Also used : SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) PeakListAppliedMethod(net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) SimpleFeature(net.sf.mzmine.datamodel.impl.SimpleFeature) Feature(net.sf.mzmine.datamodel.Feature) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) PeakList(net.sf.mzmine.datamodel.PeakList)

Example 17 with SimplePeakListAppliedMethod

use of net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod in project mzmine2 by mzmine.

the class DuplicateFilterTask method filterDuplicatePeakListRows.

/**
 * Filter our duplicate feature list rows.
 *
 * @param origPeakList the original feature list.
 * @param suffix the suffix to apply to the new feature list name.
 * @param mzTolerance m/z tolerance.
 * @param rtTolerance RT tolerance.
 * @param requireSameId must duplicate peaks have the same identities?
 * @return the filtered feature list.
 */
private PeakList filterDuplicatePeakListRows(final PeakList origPeakList, final String suffix, final MZTolerance mzTolerance, final RTTolerance rtTolerance, final boolean requireSameId, FilterMode mode) {
    final PeakListRow[] peakListRows = origPeakList.getRows();
    final int rowCount = peakListRows.length;
    RawDataFile[] rawFiles = origPeakList.getRawDataFiles();
    // Create the new feature list.
    final PeakList newPeakList = new SimplePeakList(origPeakList + " " + suffix, origPeakList.getRawDataFiles());
    // sort rows
    if (mode.equals(FilterMode.OLD_AVERAGE))
        Arrays.sort(peakListRows, new PeakListRowSorter(SortingProperty.Area, SortingDirection.Descending));
    else
        Arrays.sort(peakListRows, new PeakListRowSorter(SortingProperty.ID, SortingDirection.Ascending));
    // filter by average mz and rt
    boolean filterByAvgRTMZ = !mode.equals(FilterMode.SINGLE_FEATURE);
    // Loop through all feature list rows
    processedRows = 0;
    int n = 0;
    totalRows = rowCount;
    for (int firstRowIndex = 0; !isCanceled() && firstRowIndex < rowCount; firstRowIndex++) {
        final PeakListRow mainRow = peakListRows[firstRowIndex];
        if (mainRow != null) {
            // copy first row
            PeakListRow firstRow = copyRow(mainRow);
            for (int secondRowIndex = firstRowIndex + 1; !isCanceled() && secondRowIndex < rowCount; secondRowIndex++) {
                final PeakListRow secondRow = peakListRows[secondRowIndex];
                if (secondRow != null) {
                    // Compare identifications
                    final boolean sameID = !requireSameId || PeakUtils.compareIdentities(firstRow, secondRow);
                    boolean sameMZRT = // average or single feature
                    filterByAvgRTMZ ? checkSameAverageRTMZ(firstRow, secondRow, mzTolerance, rtTolerance) : checkSameSingleFeatureRTMZ(rawFiles, firstRow, secondRow, mzTolerance, rtTolerance);
                    // Duplicate peaks?
                    if (sameID && sameMZRT) {
                        // create consensus row in new filter
                        if (!mode.equals(FilterMode.OLD_AVERAGE)) {
                            // copy all detected features of row2 into row1
                            // to exchange gap-filled against detected features
                            createConsensusFirstRow(rawFiles, firstRow, secondRow);
                        }
                        // second row deleted
                        n++;
                        peakListRows[secondRowIndex] = null;
                    }
                }
            }
            // add to new list
            newPeakList.addRow(firstRow);
        }
        processedRows++;
    }
    // finalize
    if (!isCanceled()) {
        // Load previous applied methods.
        for (final PeakListAppliedMethod method : origPeakList.getAppliedMethods()) {
            newPeakList.addDescriptionOfAppliedTask(method);
        }
        // Add task description to peakList
        newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Duplicate feature list rows filter", parameters));
        LOG.info("Removed " + n + " duplicate rows");
    }
    return newPeakList;
}
Also used : SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) PeakListAppliedMethod(net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) PeakList(net.sf.mzmine.datamodel.PeakList) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) PeakListRowSorter(net.sf.mzmine.util.PeakListRowSorter)

Example 18 with SimplePeakListAppliedMethod

use of net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod in project mzmine2 by mzmine.

the class CameraSearchTask method groupPeaksByIsotope.

/**
 * Uses Isotope-field in PeakIdentity to group isotopes and build spectrum
 *
 * @param peakList PeakList object
 * @return new PeakList object
 */
private PeakList groupPeaksByIsotope(PeakList peakList) {
    // Create new feature list.
    final PeakList combinedPeakList = new SimplePeakList(peakList + " " + parameters.getParameter(CameraSearchParameters.SUFFIX).getValue(), peakList.getRawDataFiles());
    // Load previous applied methods.
    for (final PeakList.PeakListAppliedMethod method : peakList.getAppliedMethods()) {
        combinedPeakList.addDescriptionOfAppliedTask(method);
    }
    // Add task description to feature list.
    combinedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Bioconductor CAMERA", parameters));
    // ------------------------------------------------
    // Find unique isotopes belonging to the same group
    // ------------------------------------------------
    Set<String> isotopeGroups = new HashSet<>();
    for (PeakListRow row : peakList.getRows()) {
        PeakIdentity identity = row.getPreferredPeakIdentity();
        if (identity == null)
            continue;
        String isotope = identity.getPropertyValue("Isotope");
        if (isotope == null)
            continue;
        String isotopeGroup = isotope.substring(1, isotope.indexOf("]"));
        if (isotopeGroup == null || isotopeGroup.length() == 0)
            continue;
        isotopeGroups.add(isotopeGroup);
    }
    List<PeakListRow> groupRows = new ArrayList<>();
    Set<String> groupNames = new HashSet<>();
    Map<Double, Double> spectrum = new HashMap<>();
    List<PeakListRow> newPeakListRows = new ArrayList<>();
    for (String isotopeGroup : isotopeGroups) {
        // -----------------------------------------
        // Find all peaks belonging to isotopeGroups
        // -----------------------------------------
        groupRows.clear();
        groupNames.clear();
        spectrum.clear();
        int minLength = Integer.MAX_VALUE;
        PeakListRow groupRow = null;
        for (PeakListRow row : peakList.getRows()) {
            PeakIdentity identity = row.getPreferredPeakIdentity();
            if (identity == null)
                continue;
            String isotope = identity.getPropertyValue("Isotope");
            if (isotope == null)
                continue;
            String isoGroup = isotope.substring(1, isotope.indexOf("]"));
            if (isoGroup == null)
                continue;
            if (isoGroup.equals(isotopeGroup)) {
                groupRows.add(row);
                groupNames.add(identity.getName());
                spectrum.put(row.getAverageMZ(), row.getAverageHeight());
                if (isoGroup.length() < minLength) {
                    minLength = isoGroup.length();
                    groupRow = row;
                }
            }
        }
        // Skip peaks that have different identity names (belong to different pcgroup)
        if (groupRow == null || groupNames.size() != 1)
            continue;
        if (groupRow == null)
            continue;
        PeakIdentity identity = groupRow.getPreferredPeakIdentity();
        if (identity == null)
            continue;
        DataPoint[] dataPoints = new DataPoint[spectrum.size()];
        int count = 0;
        for (Entry<Double, Double> e : spectrum.entrySet()) dataPoints[count++] = new SimpleDataPoint(e.getKey(), e.getValue());
        IsotopePattern pattern = new SimpleIsotopePattern(dataPoints, IsotopePatternStatus.PREDICTED, "Spectrum");
        groupRow.getBestPeak().setIsotopePattern(pattern);
        // combinedPeakList.addRow(groupRow);
        newPeakListRows.add(groupRow);
    }
    if (includeSingletons) {
        for (PeakListRow row : peakList.getRows()) {
            PeakIdentity identity = row.getPreferredPeakIdentity();
            if (identity == null)
                continue;
            String isotope = identity.getPropertyValue("Isotope");
            if (isotope == null || isotope.length() == 0) {
                DataPoint[] dataPoints = new DataPoint[1];
                dataPoints[0] = new SimpleDataPoint(row.getAverageMZ(), row.getAverageHeight());
                IsotopePattern pattern = new SimpleIsotopePattern(dataPoints, IsotopePatternStatus.PREDICTED, "Spectrum");
                row.getBestPeak().setIsotopePattern(pattern);
                newPeakListRows.add(row);
            }
        }
    }
    // ------------------------------------
    // Sort new peak rows by retention time
    // ------------------------------------
    Collections.sort(newPeakListRows, new Comparator<PeakListRow>() {

        @Override
        public int compare(PeakListRow row1, PeakListRow row2) {
            double retTime1 = row1.getAverageRT();
            double retTime2 = row2.getAverageRT();
            return Double.compare(retTime1, retTime2);
        }
    });
    for (PeakListRow row : newPeakListRows) combinedPeakList.addRow(row);
    return combinedPeakList;
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IsotopePattern(net.sf.mzmine.datamodel.IsotopePattern) SimpleIsotopePattern(net.sf.mzmine.datamodel.impl.SimpleIsotopePattern) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) DataPoint(net.sf.mzmine.datamodel.DataPoint) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) SimplePeakIdentity(net.sf.mzmine.datamodel.impl.SimplePeakIdentity) PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) DataPoint(net.sf.mzmine.datamodel.DataPoint) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) SimpleIsotopePattern(net.sf.mzmine.datamodel.impl.SimpleIsotopePattern) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) PeakList(net.sf.mzmine.datamodel.PeakList) HashSet(java.util.HashSet)

Example 19 with SimplePeakListAppliedMethod

use of net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod in project mzmine2 by mzmine.

the class AdductSearchTask method run.

@Override
public void run() {
    setStatus(TaskStatus.PROCESSING);
    LOG.info("Starting adducts search in " + peakList);
    try {
        // Search the feature list for adducts.
        searchAdducts();
        if (!isCanceled()) {
            // Add task description to peakList.
            peakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Identification of adducts", parameters));
            // Repaint the window to reflect the change in the feature list
            Desktop desktop = MZmineCore.getDesktop();
            if (!(desktop instanceof HeadLessDesktop))
                desktop.getMainWindow().repaint();
            // Done.
            setStatus(TaskStatus.FINISHED);
            LOG.info("Finished adducts search in " + peakList);
        }
    } catch (Throwable t) {
        LOG.log(Level.SEVERE, "Adduct search error", t);
        setStatus(TaskStatus.ERROR);
        setErrorMessage(t.getMessage());
    }
}
Also used : HeadLessDesktop(net.sf.mzmine.desktop.impl.HeadLessDesktop) Desktop(net.sf.mzmine.desktop.Desktop) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) HeadLessDesktop(net.sf.mzmine.desktop.impl.HeadLessDesktop)

Example 20 with SimplePeakListAppliedMethod

use of net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod in project mzmine2 by mzmine.

the class TargetedPeakDetectionModuleTask method run.

public void run() {
    setStatus(TaskStatus.PROCESSING);
    // Calculate total number of scans in all files
    totalScans = dataFile.getNumOfScans(1);
    // Create new feature list
    processedPeakList = new SimplePeakList(dataFile.getName() + " " + suffix, dataFile);
    List<PeakInformation> peaks = this.readFile();
    if (peaks == null || peaks.isEmpty()) {
        setStatus(TaskStatus.ERROR);
        setErrorMessage("Could not read file or the file is empty ");
        return;
    }
    // Fill new feature list with empty rows
    for (int row = 0; row < peaks.size(); row++) {
        PeakListRow newRow = new SimplePeakListRow(ID++);
        processedPeakList.addRow(newRow);
    }
    // Canceled?
    if (isCanceled()) {
        return;
    }
    List<Gap> gaps = new ArrayList<Gap>();
    // gaps if necessary
    for (int row = 0; row < peaks.size(); row++) {
        PeakListRow newRow = processedPeakList.getRow(row);
        // Create a new gap
        Range<Double> mzRange = mzTolerance.getToleranceRange(peaks.get(row).getMZ());
        Range<Double> rtRange = rtTolerance.getToleranceRange(peaks.get(row).getRT());
        newRow.addPeakIdentity(new SimplePeakIdentity(peaks.get(row).getName()), true);
        Gap newGap = new Gap(newRow, dataFile, mzRange, rtRange, intTolerance, noiseLevel);
        gaps.add(newGap);
    }
    // Stop processing this file if there are no gaps
    if (gaps.isEmpty()) {
        processedScans += dataFile.getNumOfScans();
    }
    // Get all scans of this data file
    int[] scanNumbers = dataFile.getScanNumbers(msLevel);
    if (scanNumbers == null) {
        logger.log(Level.WARNING, "Could not read file with the MS level of " + msLevel);
        setStatus(TaskStatus.ERROR);
        return;
    }
    // Process each scan
    for (int scanNumber : scanNumbers) {
        // Canceled?
        if (isCanceled()) {
            return;
        }
        // Get the scan
        Scan scan = dataFile.getScan(scanNumber);
        // Feed this scan to all gaps
        for (Gap gap : gaps) {
            gap.offerNextScan(scan);
        }
        processedScans++;
    }
    // Finalize gaps
    for (Gap gap : gaps) {
        gap.noMoreOffers();
    }
    // Append processed feature list to the project
    project.addPeakList(processedPeakList);
    // Add quality parameters to peaks
    QualityParameters.calculateQualityParameters(processedPeakList);
    // Add task description to peakList
    processedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Targeted feature detection ", parameters));
    logger.log(Level.INFO, "Finished targeted feature detection on {0}", this.dataFile);
    setStatus(TaskStatus.FINISHED);
}
Also used : ArrayList(java.util.ArrayList) SimplePeakIdentity(net.sf.mzmine.datamodel.impl.SimplePeakIdentity) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) Scan(net.sf.mzmine.datamodel.Scan) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList)

Aggregations

SimplePeakListAppliedMethod (net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod)42 SimplePeakList (net.sf.mzmine.datamodel.impl.SimplePeakList)29 PeakListRow (net.sf.mzmine.datamodel.PeakListRow)26 PeakListAppliedMethod (net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod)20 RawDataFile (net.sf.mzmine.datamodel.RawDataFile)19 SimplePeakListRow (net.sf.mzmine.datamodel.impl.SimplePeakListRow)18 Feature (net.sf.mzmine.datamodel.Feature)16 DataPoint (net.sf.mzmine.datamodel.DataPoint)15 PeakList (net.sf.mzmine.datamodel.PeakList)11 Desktop (net.sf.mzmine.desktop.Desktop)10 HeadLessDesktop (net.sf.mzmine.desktop.impl.HeadLessDesktop)10 SimpleFeature (net.sf.mzmine.datamodel.impl.SimpleFeature)9 SimpleDataPoint (net.sf.mzmine.datamodel.impl.SimpleDataPoint)8 ArrayList (java.util.ArrayList)7 PeakIdentity (net.sf.mzmine.datamodel.PeakIdentity)6 Vector (java.util.Vector)4 IsotopePattern (net.sf.mzmine.datamodel.IsotopePattern)4 Scan (net.sf.mzmine.datamodel.Scan)4 SimpleIsotopePattern (net.sf.mzmine.datamodel.impl.SimpleIsotopePattern)4 IOException (java.io.IOException)3