Search in sources :

Example 16 with PeakIdentity

use of net.sf.mzmine.datamodel.PeakIdentity in project mzmine2 by mzmine.

the class PeakFinderTask method run.

public void run() {
    setStatus(TaskStatus.PROCESSING);
    logger.info("Running gap filler on " + peakList);
    // Calculate total number of scans in all files
    for (RawDataFile dataFile : peakList.getRawDataFiles()) {
        totalScans += dataFile.getNumOfScans(1);
    }
    processedScans = new AtomicInteger();
    // Create new feature list
    processedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
    // Fill new feature list with empty rows
    for (int row = 0; row < peakList.getNumberOfRows(); row++) {
        PeakListRow sourceRow = peakList.getRow(row);
        PeakListRow newRow = new SimplePeakListRow(sourceRow.getID());
        newRow.setComment(sourceRow.getComment());
        for (PeakIdentity ident : sourceRow.getPeakIdentities()) {
            newRow.addPeakIdentity(ident, false);
        }
        if (sourceRow.getPreferredPeakIdentity() != null) {
            newRow.setPreferredPeakIdentity(sourceRow.getPreferredPeakIdentity());
        }
        processedPeakList.addRow(newRow);
    }
    if (rtCorrection) {
        totalScans *= 2;
        // Fill the gaps of a random sample using all the other samples and
        // take it as master list
        // to fill the gaps of the other samples
        masterSample = (int) Math.floor(Math.random() * peakList.getNumberOfRawDataFiles());
        fillList(MASTERLIST);
        // Process all raw data files
        fillList(!MASTERLIST);
    } else {
        // Process all raw data files
        IntStream rawStream = IntStream.range(0, peakList.getNumberOfRawDataFiles());
        if (useParallelStream)
            rawStream = rawStream.parallel();
        rawStream.forEach(i -> {
            // Canceled?
            if (isCanceled()) {
                // inside stream - only skips this element
                return;
            }
            RawDataFile dataFile = peakList.getRawDataFile(i);
            List<Gap> gaps = new ArrayList<Gap>();
            // if necessary
            for (int row = 0; row < peakList.getNumberOfRows(); row++) {
                // Canceled?
                if (isCanceled()) {
                    // inside stream - only skips this element
                    return;
                }
                PeakListRow sourceRow = peakList.getRow(row);
                PeakListRow newRow = processedPeakList.getRow(row);
                Feature sourcePeak = sourceRow.getPeak(dataFile);
                if (sourcePeak == null) {
                    // Create a new gap
                    Range<Double> mzRange = mzTolerance.getToleranceRange(sourceRow.getAverageMZ());
                    Range<Double> rtRange = rtTolerance.getToleranceRange(sourceRow.getAverageRT());
                    Gap newGap = new Gap(newRow, dataFile, mzRange, rtRange, intTolerance);
                    gaps.add(newGap);
                } else {
                    newRow.addPeak(dataFile, sourcePeak);
                }
            }
            // Stop processing this file if there are no gaps
            if (gaps.size() == 0) {
                processedScans.addAndGet(dataFile.getNumOfScans());
                return;
            }
            // Get all scans of this data file
            int[] scanNumbers = dataFile.getScanNumbers(1);
            // Process each scan
            for (int scanNumber : scanNumbers) {
                // Canceled?
                if (isCanceled()) {
                    // inside stream - only skips this element
                    return;
                }
                // Get the scan
                Scan scan = dataFile.getScan(scanNumber);
                // Feed this scan to all gaps
                for (Gap gap : gaps) {
                    gap.offerNextScan(scan);
                }
                processedScans.incrementAndGet();
            }
            // Finalize gaps
            for (Gap gap : gaps) {
                gap.noMoreOffers();
            }
        });
    }
    // terminate - stream only skips all elements
    if (isCanceled())
        return;
    // Append processed feature list to the project
    project.addPeakList(processedPeakList);
    // Add quality parameters to peaks
    QualityParameters.calculateQualityParameters(processedPeakList);
    // Add task description to peakList
    processedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Gap filling ", parameters));
    // Remove the original peaklist if requested
    if (removeOriginal)
        project.removePeakList(peakList);
    logger.info("Finished gap-filling on " + peakList);
    setStatus(TaskStatus.FINISHED);
}
Also used : ArrayList(java.util.ArrayList) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) Feature(net.sf.mzmine.datamodel.Feature) PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Scan(net.sf.mzmine.datamodel.Scan) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) IntStream(java.util.stream.IntStream)

Example 17 with PeakIdentity

use of net.sf.mzmine.datamodel.PeakIdentity in project mzmine2 by mzmine.

the class MultiThreadPeakFinderMainTask method createResultsPeakList.

private PeakList createResultsPeakList() {
    SimplePeakList processedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
    // Fill new feature list with empty rows
    for (int row = 0; row < peakList.getNumberOfRows(); row++) {
        PeakListRow sourceRow = peakList.getRow(row);
        PeakListRow newRow = new SimplePeakListRow(sourceRow.getID());
        newRow.setComment(sourceRow.getComment());
        for (PeakIdentity ident : sourceRow.getPeakIdentities()) {
            newRow.addPeakIdentity(ident, false);
        }
        if (sourceRow.getPreferredPeakIdentity() != null) {
            newRow.setPreferredPeakIdentity(sourceRow.getPreferredPeakIdentity());
        }
        processedPeakList.addRow(newRow);
    }
    return processedPeakList;
}
Also used : PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow)

Example 18 with PeakIdentity

use of net.sf.mzmine.datamodel.PeakIdentity in project mzmine2 by mzmine.

the class PeaklistClearAnnotationsTask method run.

@Override
public void run() {
    try {
        setStatus(TaskStatus.PROCESSING);
        LOG.info("Filtering feature list rows");
        totalRows = origPeakList.getRows().length;
        // Filter the feature list.
        for (PeakListRow row : origPeakList.getRows()) {
            if (parameters.getParameter(PeaklistClearAnnotationsParameters.CLEAR_IDENTITY).getValue()) {
                for (PeakIdentity identity : row.getPeakIdentities()) row.removePeakIdentity(identity);
            }
            if (parameters.getParameter(PeaklistClearAnnotationsParameters.CLEAR_COMMENT).getValue()) {
                row.setComment("");
            }
            processedRows += 1;
        }
        if (getStatus() == TaskStatus.ERROR)
            return;
        if (isCanceled())
            return;
        // Add new peaklist to the project
        project.addPeakList(filteredPeakList);
        // Remove the original peaklist if requested
        /*
       * if (parameters .getParameter(PeaklistClearAnnotationsParameters.AUTO_REMOVE) .getValue()) {
       * project.removePeakList(origPeakList); }
       */
        setStatus(TaskStatus.FINISHED);
        LOG.info("Finished peak comparison rows filter");
    } catch (Throwable t) {
        t.printStackTrace();
        setErrorMessage(t.getMessage());
        setStatus(TaskStatus.ERROR);
        LOG.log(Level.SEVERE, "Peak comparison row filter error", t);
    }
}
Also used : PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow)

Example 19 with PeakIdentity

use of net.sf.mzmine.datamodel.PeakIdentity in project mzmine2 by mzmine.

the class MetaboAnalystExportTask method generateUniquePeakListRowName.

/**
 * Generates a unique name for each feature list row
 */
private String generateUniquePeakListRowName(PeakListRow row) {
    final double mz = row.getAverageMZ();
    final double rt = row.getAverageRT();
    final int rowId = row.getID();
    String generatedName = rowId + "/" + MZmineCore.getConfiguration().getMZFormat().format(mz) + "mz/" + MZmineCore.getConfiguration().getRTFormat().format(rt) + "min";
    PeakIdentity peakIdentity = row.getPreferredPeakIdentity();
    if (peakIdentity == null)
        return generatedName;
    String idName = peakIdentity.getPropertyValue(PeakIdentity.PROPERTY_NAME);
    if (idName == null)
        return generatedName;
    idName = idName.replace('"', '\'');
    generatedName = generatedName + " (" + idName + ")";
    return generatedName;
}
Also used : PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity)

Example 20 with PeakIdentity

use of net.sf.mzmine.datamodel.PeakIdentity in project mzmine2 by mzmine.

the class MzTabExportTask method run.

public void run() {
    setStatus(TaskStatus.PROCESSING);
    // Shall export several files?
    boolean substitute = fileName.getPath().contains(plNamePattern);
    // Total number of rows
    for (PeakList peakList : peakLists) {
        totalRows += peakList.getNumberOfRows();
    }
    // Process feature lists
    for (PeakList peakList : peakLists) {
        File curFile = fileName;
        try {
            // Filename
            if (substitute) {
                // Cleanup from illegal filename characters
                String cleanPlName = peakList.getName().replaceAll("[^a-zA-Z0-9.-]", "_");
                // Substitute
                String newFilename = fileName.getPath().replaceAll(Pattern.quote(plNamePattern), cleanPlName);
                curFile = new File(newFilename);
            }
            // Open file
            FileWriter writer;
            try {
                writer = new FileWriter(curFile);
            } catch (Exception e) {
                setStatus(TaskStatus.ERROR);
                setErrorMessage("Could not open file " + curFile + " for writing.");
                return;
            }
            // Metadata
            Metadata mtd = new Metadata();
            mtd.setMZTabMode(MZTabDescription.Mode.Summary);
            mtd.setMZTabType(MZTabDescription.Type.Quantification);
            mtd.setDescription(peakList.getName());
            mtd.addSoftwareParam(1, new CVParam("MS", "MS:1002342", "MZmine", MZmineCore.getMZmineVersion()));
            mtd.setSmallMoleculeQuantificationUnit(new CVParam("PRIDE", "PRIDE:0000330", "Arbitrary quantification unit", null));
            mtd.addSmallMoleculeSearchEngineScoreParam(1, new CVParam("MS", "MS:1001153", "search engine specific score", null));
            mtd.addFixedModParam(1, new CVParam("MS", "MS:1002453", "No fixed modifications searched", null));
            mtd.addVariableModParam(1, new CVParam("MS", "MS:1002454", "No variable modifications searched", null));
            // Create stable columns
            MZTabColumnFactory factory = MZTabColumnFactory.getInstance(Section.Small_Molecule);
            factory.addDefaultStableColumns();
            // Add optional columns which have stable order
            factory.addURIOptionalColumn();
            factory.addBestSearchEngineScoreOptionalColumn(SmallMoleculeColumn.BEST_SEARCH_ENGINE_SCORE, 1);
            final RawDataFile[] rawDataFiles = peakList.getRawDataFiles();
            int fileCounter = 0;
            for (RawDataFile file : rawDataFiles) {
                fileCounter++;
                /**
                 * TO DO: Add path to original imported raw file to MZmine and write it out here instead
                 */
                // MS run location
                MsRun msRun = new MsRun(fileCounter);
                msRun.setLocation(new URL("file:///" + file.getName()));
                mtd.addMsRun(msRun);
                mtd.addAssayMsRun(fileCounter, msRun);
                // Add samples to study variable assay
                for (UserParameter<?, ?> p : project.getParameters()) {
                    Assay assay = mtd.getAssayMap().get(fileCounter);
                    for (StudyVariable studyVariable : mtd.getStudyVariableMap().values()) {
                        if (studyVariable.getDescription().equals(String.valueOf(p) + ": " + String.valueOf(project.getParameterValue(p, file)))) {
                            mtd.addStudyVariableAssay(studyVariable.getId(), assay);
                        }
                    }
                }
                // Additional columns
                factory.addAbundanceOptionalColumn(new Assay(fileCounter));
                factory.addOptionalColumn(new Assay(fileCounter), "peak_mz", String.class);
                factory.addOptionalColumn(new Assay(fileCounter), "peak_rt", String.class);
                factory.addOptionalColumn(new Assay(fileCounter), "peak_height", String.class);
            }
            // Variable descriptions
            int parameterCounter = 0;
            for (UserParameter<?, ?> p : project.getParameters()) {
                for (Object e : ((ComboParameter<?>) p).getChoices()) {
                    parameterCounter++;
                    mtd.addStudyVariableDescription(parameterCounter, String.valueOf(p) + ": " + String.valueOf(e));
                    StudyVariable studyVariable = new StudyVariable(parameterCounter);
                    factory.addAbundanceOptionalColumn(studyVariable);
                }
            }
            // Write to file
            BufferedWriter out = new BufferedWriter(writer);
            out.write(mtd.toString());
            out.write(newLine);
            out.write(factory.toString());
            out.write(newLine);
            // Write data rows
            for (PeakListRow peakListRow : peakList.getRows()) {
                // Cancel?
                if (isCanceled()) {
                    return;
                }
                PeakIdentity peakIdentity = peakListRow.getPreferredPeakIdentity();
                if (exportall || peakIdentity != null) {
                    SmallMolecule sm = new SmallMolecule(factory, mtd);
                    if (peakIdentity != null) {
                        // Identity information
                        String identifier = escapeString(peakIdentity.getPropertyValue("ID"));
                        String database = peakIdentity.getPropertyValue("Identification method");
                        String formula = peakIdentity.getPropertyValue("Molecular formula");
                        String description = escapeString(peakIdentity.getPropertyValue("Name"));
                        String url = peakIdentity.getPropertyValue("URL");
                        if (identifier != null) {
                            sm.setIdentifier(identifier);
                        }
                        if (database != null) {
                            sm.setDatabase(database);
                        }
                        if (formula != null) {
                            sm.setChemicalFormula(formula);
                        }
                        if (description != null) {
                            sm.setDescription(description);
                        }
                        if (url != null) {
                            sm.setURI(url);
                        }
                    }
                    Double rowMZ = peakListRow.getAverageMZ();
                    int rowCharge = peakListRow.getRowCharge();
                    String rowRT = String.valueOf(peakListRow.getAverageRT());
                    if (rowMZ != null) {
                        sm.setExpMassToCharge(rowMZ);
                    }
                    if (rowCharge > 0) {
                        sm.setCharge(rowCharge);
                    }
                    if (rowRT != null) {
                        sm.setRetentionTime(rowRT);
                    }
                    int dataFileCount = 0;
                    for (RawDataFile dataFile : rawDataFiles) {
                        dataFileCount++;
                        Feature peak = peakListRow.getPeak(dataFile);
                        if (peak != null) {
                            String peakMZ = String.valueOf(peak.getMZ());
                            String peakRT = String.valueOf(String.valueOf(peak.getRT()));
                            String peakHeight = String.valueOf(peak.getHeight());
                            Double peakArea = peak.getArea();
                            sm.setOptionColumnValue(new Assay(dataFileCount), "peak_mz", peakMZ);
                            sm.setOptionColumnValue(new Assay(dataFileCount), "peak_rt", peakRT);
                            sm.setOptionColumnValue(new Assay(dataFileCount), "peak_height", peakHeight);
                            sm.setAbundanceColumnValue(new Assay(dataFileCount), peakArea);
                        }
                    }
                    out.write(sm.toString());
                    out.write(newLine);
                }
            }
            out.flush();
            out.close();
            writer.close();
        } catch (Exception e) {
            e.printStackTrace();
            setStatus(TaskStatus.ERROR);
            setErrorMessage("Could not export feature list to file " + curFile + ": " + e.getMessage());
            return;
        }
    }
    if (getStatus() == TaskStatus.PROCESSING)
        setStatus(TaskStatus.FINISHED);
}
Also used : FileWriter(java.io.FileWriter) Metadata(uk.ac.ebi.pride.jmztab.model.Metadata) CVParam(uk.ac.ebi.pride.jmztab.model.CVParam) Feature(net.sf.mzmine.datamodel.Feature) URL(java.net.URL) ComboParameter(net.sf.mzmine.parameters.parametertypes.ComboParameter) BufferedWriter(java.io.BufferedWriter) Assay(uk.ac.ebi.pride.jmztab.model.Assay) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) MsRun(uk.ac.ebi.pride.jmztab.model.MsRun) StudyVariable(uk.ac.ebi.pride.jmztab.model.StudyVariable) MZTabColumnFactory(uk.ac.ebi.pride.jmztab.model.MZTabColumnFactory) PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) SmallMolecule(uk.ac.ebi.pride.jmztab.model.SmallMolecule) PeakList(net.sf.mzmine.datamodel.PeakList) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) File(java.io.File)

Aggregations

PeakIdentity (net.sf.mzmine.datamodel.PeakIdentity)32 PeakListRow (net.sf.mzmine.datamodel.PeakListRow)19 Feature (net.sf.mzmine.datamodel.Feature)14 ArrayList (java.util.ArrayList)9 RawDataFile (net.sf.mzmine.datamodel.RawDataFile)9 SimplePeakListRow (net.sf.mzmine.datamodel.impl.SimplePeakListRow)9 PeakList (net.sf.mzmine.datamodel.PeakList)8 HashMap (java.util.HashMap)7 SimplePeakIdentity (net.sf.mzmine.datamodel.impl.SimplePeakIdentity)7 SimplePeakList (net.sf.mzmine.datamodel.impl.SimplePeakList)7 DataPoint (net.sf.mzmine.datamodel.DataPoint)6 SimplePeakListAppliedMethod (net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod)6 IOException (java.io.IOException)5 List (java.util.List)5 IsotopePattern (net.sf.mzmine.datamodel.IsotopePattern)5 File (java.io.File)4 HashSet (java.util.HashSet)4 Scan (net.sf.mzmine.datamodel.Scan)4 SimpleDataPoint (net.sf.mzmine.datamodel.impl.SimpleDataPoint)4 Range (com.google.common.collect.Range)3