Search in sources :

Example 1 with SimplePeakInformation

use of net.sf.mzmine.datamodel.impl.SimplePeakInformation in project mzmine2 by mzmine.

the class ADAP3DecompositionV1_5Task method decomposePeaks.

private PeakList decomposePeaks(PeakList peakList) throws CloneNotSupportedException, IOException {
    RawDataFile dataFile = peakList.getRawDataFile(0);
    // Create new feature list.
    final PeakList resolvedPeakList = new SimplePeakList(peakList + " " + parameters.getParameter(ADAP3DecompositionV1_5Parameters.SUFFIX).getValue(), dataFile);
    // Load previous applied methods.
    for (final PeakList.PeakListAppliedMethod method : peakList.getAppliedMethods()) {
        resolvedPeakList.addDescriptionOfAppliedTask(method);
    }
    // Add task description to feature list.
    resolvedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peak deconvolution by ADAP-3", parameters));
    // Collect peak information
    List<Peak> peaks = getPeaks(peakList, this.parameters.getParameter(ADAP3DecompositionV1_5Parameters.EDGE_TO_HEIGHT_RATIO).getValue(), this.parameters.getParameter(ADAP3DecompositionV1_5Parameters.DELTA_TO_HEIGHT_RATIO).getValue());
    // Find components (a.k.a. clusters of peaks with fragmentation spectra)
    List<Component> components = getComponents(peaks);
    // Create PeakListRow for each components
    List<PeakListRow> newPeakListRows = new ArrayList<>();
    int rowID = 0;
    for (final Component component : components) {
        if (component.getSpectrum().isEmpty())
            continue;
        PeakListRow row = new SimplePeakListRow(++rowID);
        // Add the reference peak
        PeakListRow refPeakRow = originalPeakList.getRow(component.getBestPeak().getInfo().peakID);
        Feature refPeak = new SimpleFeature(refPeakRow.getBestPeak());
        // Add spectrum
        List<DataPoint> dataPoints = new ArrayList<>();
        for (Map.Entry<Double, Double> entry : component.getSpectrum().entrySet()) {
            dataPoints.add(new SimpleDataPoint(entry.getKey(), entry.getValue()));
        }
        refPeak.setIsotopePattern(new SimpleIsotopePattern(dataPoints.toArray(new DataPoint[dataPoints.size()]), IsotopePattern.IsotopePatternStatus.PREDICTED, "Spectrum"));
        row.addPeak(dataFile, refPeak);
        // Add PeakInformation
        if (refPeakRow.getPeakInformation() == null) {
            SimplePeakInformation information = new SimplePeakInformation(new HashMap<>(refPeakRow.getPeakInformation().getAllProperties()));
            row.setPeakInformation(information);
        }
        // Set row properties
        row.setAverageMZ(refPeakRow.getAverageMZ());
        row.setAverageRT(refPeakRow.getAverageRT());
        // resolvedPeakList.addRow(row);
        newPeakListRows.add(row);
    }
    // ------------------------------------
    // Sort new peak rows by retention time
    // ------------------------------------
    Collections.sort(newPeakListRows, new Comparator<PeakListRow>() {

        @Override
        public int compare(PeakListRow row1, PeakListRow row2) {
            double retTime1 = row1.getAverageRT();
            double retTime2 = row2.getAverageRT();
            return Double.compare(retTime1, retTime2);
        }
    });
    for (PeakListRow row : newPeakListRows) resolvedPeakList.addRow(row);
    return resolvedPeakList;
}
Also used : ArrayList(java.util.ArrayList) SimplePeakListAppliedMethod(net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) Feature(net.sf.mzmine.datamodel.Feature) SimpleFeature(net.sf.mzmine.datamodel.impl.SimpleFeature) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) DataPoint(net.sf.mzmine.datamodel.DataPoint) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) Peak(dulab.adap.datamodel.Peak) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) SimpleIsotopePattern(net.sf.mzmine.datamodel.impl.SimpleIsotopePattern) Component(dulab.adap.datamodel.Component) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation) DataPoint(net.sf.mzmine.datamodel.DataPoint) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) SimpleFeature(net.sf.mzmine.datamodel.impl.SimpleFeature) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) SimplePeakList(net.sf.mzmine.datamodel.impl.SimplePeakList) PeakList(net.sf.mzmine.datamodel.PeakList) HashMap(java.util.HashMap) Map(java.util.Map) NavigableMap(java.util.NavigableMap) TreeMap(java.util.TreeMap)

Example 2 with SimplePeakInformation

use of net.sf.mzmine.datamodel.impl.SimplePeakInformation in project mzmine2 by mzmine.

the class SignificanceTask method calculateSignificance.

private void calculateSignificance() throws IllegalStateException {
    if (peakListRows.length == 0) {
        return;
    }
    List<Set<RawDataFile>> groups = getGroups(userParameter);
    finishedPercentage = 0.0;
    final double finishedStep = 1.0 / peakListRows.length;
    for (PeakListRow row : peakListRows) {
        if (isCanceled()) {
            break;
        }
        finishedPercentage += finishedStep;
        double[][] intensityGroups = new double[groups.size()][];
        for (int i = 0; i < groups.size(); ++i) {
            Set<RawDataFile> groupFiles = groups.get(i);
            intensityGroups[i] = Arrays.stream(row.getPeaks()).filter(peak -> groupFiles.contains(peak.getDataFile())).mapToDouble(Feature::getHeight).toArray();
        }
        Double pValue = oneWayAnova(intensityGroups);
        // Save results
        PeakInformation peakInformation = row.getPeakInformation();
        if (peakInformation == null) {
            peakInformation = new SimplePeakInformation();
        }
        peakInformation.getAllProperties().put(P_VALUE_KEY, pValue == null ? EMPTY_STRING : pValue.toString());
        row.setPeakInformation(peakInformation);
    }
}
Also used : IntStream(java.util.stream.IntStream) net.sf.mzmine.datamodel(net.sf.mzmine.datamodel) java.util(java.util) MZmineCore(net.sf.mzmine.main.MZmineCore) TaskStatus(net.sf.mzmine.taskcontrol.TaskStatus) Logger(java.util.logging.Logger) Collectors(java.util.stream.Collectors) Level(java.util.logging.Level) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation) ParameterSet(net.sf.mzmine.parameters.ParameterSet) AbstractTask(net.sf.mzmine.taskcontrol.AbstractTask) Entry(java.util.Map.Entry) UserParameter(net.sf.mzmine.parameters.UserParameter) FDistribution(org.apache.commons.math3.distribution.FDistribution) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) MathIllegalArgumentException(org.apache.commons.math3.exception.MathIllegalArgumentException) ParameterSet(net.sf.mzmine.parameters.ParameterSet) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation)

Example 3 with SimplePeakInformation

use of net.sf.mzmine.datamodel.impl.SimplePeakInformation in project mzmine2 by mzmine.

the class RowsFilterTask method copyPeakRow.

/**
 * Create a copy of a feature list row.
 *
 * @param row the row to copy.
 * @return the newly created copy.
 */
private static PeakListRow copyPeakRow(final PeakListRow row) {
    // Copy the feature list row.
    final PeakListRow newRow = new SimplePeakListRow(row.getID());
    PeakUtils.copyPeakListRowProperties(row, newRow);
    // Copy the peaks.
    for (final Feature peak : row.getPeaks()) {
        final Feature newPeak = new SimpleFeature(peak);
        PeakUtils.copyPeakProperties(peak, newPeak);
        newRow.addPeak(peak.getDataFile(), newPeak);
    }
    // Add PeakInformation
    if (row.getPeakInformation() != null) {
        SimplePeakInformation information = new SimplePeakInformation(new HashMap<>(row.getPeakInformation().getAllProperties()));
        newRow.setPeakInformation(information);
    }
    return newRow;
}
Also used : SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) SimplePeakListRow(net.sf.mzmine.datamodel.impl.SimplePeakListRow) Feature(net.sf.mzmine.datamodel.Feature) SimpleFeature(net.sf.mzmine.datamodel.impl.SimpleFeature) SimpleFeature(net.sf.mzmine.datamodel.impl.SimpleFeature) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation)

Example 4 with SimplePeakInformation

use of net.sf.mzmine.datamodel.impl.SimplePeakInformation in project mzmine2 by mzmine.

the class ADAPDetector method resolvePeaks.

@Override
public ResolvedPeak[] resolvePeaks(final Feature chromatogram, final ParameterSet parameters, RSessionWrapper rSession, CenterFunction mzCenterFunction, double msmsRange, double rTRangeMSMS) throws RSessionWrapperException {
    int[] scanNumbers = chromatogram.getScanNumbers();
    final int scanCount = scanNumbers.length;
    double[] retentionTimes = new double[scanCount];
    double[] intensities = new double[scanCount];
    RawDataFile dataFile = chromatogram.getDataFile();
    for (int i = 0; i < scanCount; i++) {
        final int scanNum = scanNumbers[i];
        retentionTimes[i] = dataFile.getScan(scanNum).getRetentionTime();
        DataPoint dp = chromatogram.getDataPoint(scanNum);
        if (dp != null)
            intensities[i] = dp.getIntensity();
        else
            intensities[i] = 0.0;
    }
    // List<PeakInfo> ADAPPeaks = new ArrayList<PeakInfo>();
    List<PeakInfo> ADAPPeaks = null;
    Range<Double> peakDuration = parameters.getParameter(PEAK_DURATION).getValue();
    final MZmineProcessingStep<SNEstimatorChoice> signalNoiseEstimator = parameters.getParameter(SN_ESTIMATORS).getValue();
    String SNCode = signalNoiseEstimator.getModule().getSNCode();
    double signalNoiseWindowMult = -1.0;
    boolean absWavCoeffs = false;
    Map<String, Object> informationSN = new HashMap<String, Object>();
    if (SNCode == "Wavelet Coefficient Estimator") {
        informationSN.put("code", "Wavelet Coefficient Estimator");
        signalNoiseWindowMult = signalNoiseEstimator.getParameterSet().getParameter(HALF_WAVELET_WINDOW).getValue();
        absWavCoeffs = signalNoiseEstimator.getParameterSet().getParameter(ABS_WAV_COEFFS).getValue();
        informationSN.put("multiplier", signalNoiseWindowMult);
        informationSN.put("absolutewavecoeffs", absWavCoeffs);
    }
    if (SNCode == "Intensity Window Estimator") {
        informationSN.put("code", "Intensity Window Estimator");
    }
    // get the average rt spacing
    double rtSum = 0.0;
    for (int i = 0; i < retentionTimes.length - 1; i++) {
        rtSum += retentionTimes[i + 1] - retentionTimes[i];
    }
    double avgRTInterval = rtSum / (retentionTimes.length - 1);
    // Change the lower and uper bounds for the wavelet scales from retention times to number of
    // scans.
    Range<Double> rtRangeForCWTScales = parameters.getParameter(RT_FOR_CWT_SCALES_DURATION).getValue();
    double rtLow = rtRangeForCWTScales.lowerEndpoint();
    double rtHigh = rtRangeForCWTScales.upperEndpoint();
    int numScansRTLow = (int) Math.round(rtLow / avgRTInterval);
    int numScansRTHigh = (int) Math.round(rtHigh / avgRTInterval);
    if (numScansRTLow < 1) {
        numScansRTLow = 1;
    }
    if (numScansRTHigh >= retentionTimes.length) {
        numScansRTHigh = retentionTimes.length;
    }
    ADAPPeaks = DeconvoluteSignal(retentionTimes, intensities, chromatogram.getMZ(), parameters.getParameter(SN_THRESHOLD).getValue(), parameters.getParameter(MIN_FEAT_HEIGHT).getValue(), peakDuration, parameters.getParameter(COEF_AREA_THRESHOLD).getValue(), numScansRTLow, numScansRTHigh, informationSN);
    final List<ResolvedPeak> resolvedPeaks;
    if (ADAPPeaks == null) {
        resolvedPeaks = new ArrayList<ResolvedPeak>(0);
    } else {
        // Process peak matrix.
        resolvedPeaks = new ArrayList<ResolvedPeak>(ADAPPeaks.size());
        // for (final double[] peakRow : peakMatrix) {
        for (int i = 0; i < ADAPPeaks.size(); i++) {
            PeakInfo curPeak = ADAPPeaks.get(i);
            SimplePeakInformation information = new SimplePeakInformation();
            information.addProperty("Signal-to-Noise", Double.toString(curPeak.signalToNoiseRatio));
            information.addProperty("Coefficient-over-area", Double.toString(curPeak.coeffOverArea));
            // information.addProperty("index",
            // //Integer.toString(scans[(int) peakIndex[j] - 1])); // Substract one because r-indices
            // start from 1
            // Integer.toString((int) curPeak.peakIndex));
            // information.addProperty("sharpness",
            // Double.toString(curPeak.sharpness));
            // information.addProperty("signalToNoiseRatio",
            // Double.toString(curPeak.signalToNoiseRatio));
            // information.addProperty("isShared",
            // Boolean.toString(curPeak.isShared));
            // //Boolean.toString(1.0 == curPeak.isShared));
            // information.addProperty("offset",
            // Integer.toString((int) curPeak.offset));
            ResolvedPeak peak = new ResolvedPeak(chromatogram, curPeak.leftApexIndex, curPeak.rightApexIndex, mzCenterFunction, msmsRange, rTRangeMSMS);
            peak.setPeakInformation(information);
            resolvedPeaks.add(peak);
        // resolvedPeaks.add(new ResolvedPeak(chromatogram,curPeak.leftApexIndex,
        // curPeak.rightApexIndex));
        }
    }
    return resolvedPeaks.toArray(new ResolvedPeak[resolvedPeaks.size()]);
}
Also used : HashMap(java.util.HashMap) PeakInfo(dulab.adap.datamodel.PeakInfo) DataPoint(net.sf.mzmine.datamodel.DataPoint) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) DataPoint(net.sf.mzmine.datamodel.DataPoint) ResolvedPeak(net.sf.mzmine.modules.peaklistmethods.peakpicking.deconvolution.ResolvedPeak) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation)

Example 5 with SimplePeakInformation

use of net.sf.mzmine.datamodel.impl.SimplePeakInformation in project mzmine2 by mzmine.

the class PeakListOpenHandler_2_5 method endElement.

/**
 * @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String, java.lang.String,
 *      java.lang.String)
 */
@Override
public void endElement(String namespaceURI, String sName, String qName) throws SAXException {
    if (canceled)
        throw new SAXException("Parsing canceled");
    // <NAME>
    if (qName.equals(PeakListElementName_2_5.PEAKLIST_NAME.getElementName())) {
        name = getTextOfElement();
        logger.info("Loading feature list: " + name);
        peakListName = name;
    }
    // <PEAKLIST_DATE>
    if (qName.equals(PeakListElementName_2_5.PEAKLIST_DATE.getElementName())) {
        dateCreated = getTextOfElement();
    }
    // <QUANTITY>
    if (qName.equals(PeakListElementName_2_5.QUANTITY.getElementName())) {
        String text = getTextOfElement();
        totalRows = Integer.parseInt(text);
    }
    // <RAW_FILE>
    if (qName.equals(PeakListElementName_2_5.RAWFILE.getElementName())) {
        rawDataFileID = getTextOfElement();
        RawDataFile dataFile = dataFilesIDMap.get(rawDataFileID);
        if (dataFile == null) {
            throw new SAXException("Cannot open feature list, because raw data file " + rawDataFileID + " is missing.");
        }
        currentPeakListDataFiles.add(dataFile);
    }
    // <SCAN_ID>
    if (qName.equals(PeakListElementName_2_5.SCAN_ID.getElementName())) {
        byte[] bytes = Base64.decodeToBytes(getTextOfElement());
        // make a data input stream
        DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
        scanNumbers = new int[numOfMZpeaks];
        for (int i = 0; i < numOfMZpeaks; i++) {
            try {
                scanNumbers[i] = dataInputStream.readInt();
            } catch (IOException ex) {
                throw new SAXException(ex);
            }
        }
    }
    // <REPRESENTATIVE_SCAN>
    if (qName.equals(PeakListElementName_2_5.REPRESENTATIVE_SCAN.getElementName())) {
        representativeScan = Integer.valueOf(getTextOfElement());
    }
    // <FRAGMENT_SCAN>
    if (qName.equals(PeakListElementName_2_5.FRAGMENT_SCAN.getElementName())) {
        fragmentScan = Integer.valueOf(getTextOfElement());
    }
    // <All_MS2_FRAGMENT_SCANS>
    if (qName.equals(PeakListElementName_2_5.ALL_MS2_FRAGMENT_SCANS.getElementName())) {
        Integer fragmentNumber = Integer.valueOf(getTextOfElement());
        currentAllMS2FragmentScans.add(fragmentNumber);
    }
    // <MASS>
    if (qName.equals(PeakListElementName_2_5.MZ.getElementName())) {
        byte[] bytes = Base64.decodeToBytes(getTextOfElement());
        // make a data input stream
        DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
        masses = new double[numOfMZpeaks];
        for (int i = 0; i < numOfMZpeaks; i++) {
            try {
                masses[i] = dataInputStream.readFloat();
            } catch (IOException ex) {
                throw new SAXException(ex);
            }
        }
    }
    // <HEIGHT>
    if (qName.equals(PeakListElementName_2_5.HEIGHT.getElementName())) {
        byte[] bytes = Base64.decodeToBytes(getTextOfElement());
        // make a data input stream
        DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
        intensities = new double[numOfMZpeaks];
        for (int i = 0; i < numOfMZpeaks; i++) {
            try {
                intensities[i] = dataInputStream.readFloat();
            } catch (IOException ex) {
                throw new SAXException(ex);
            }
        }
    }
    // <PEAK>
    if (qName.equals(PeakListElementName_2_5.PEAK.getElementName())) {
        DataPoint[] mzPeaks = new DataPoint[numOfMZpeaks];
        Range<Double> peakRTRange = null, peakMZRange = null, peakIntensityRange = null;
        RawDataFile dataFile = dataFilesIDMap.get(peakColumnID);
        if (dataFile == null)
            throw new SAXException("Error in project: data file " + peakColumnID + " not found");
        for (int i = 0; i < numOfMZpeaks; i++) {
            Scan sc = dataFile.getScan(scanNumbers[i]);
            double retentionTime = sc.getRetentionTime();
            double mz = masses[i];
            double intensity = intensities[i];
            if (peakIntensityRange == null) {
                peakIntensityRange = Range.singleton(intensity);
            } else {
                peakIntensityRange = peakIntensityRange.span(Range.singleton(intensity));
            }
            if (intensity > 0) {
                if (peakRTRange == null) {
                    peakRTRange = Range.singleton(retentionTime);
                } else {
                    peakRTRange = peakRTRange.span(Range.singleton(retentionTime));
                }
            }
            if (mz > 0.0) {
                mzPeaks[i] = new SimpleDataPoint(mz, intensity);
                if (peakMZRange == null)
                    peakMZRange = Range.singleton(mz);
                else
                    peakMZRange = peakMZRange.span(Range.singleton(mz));
            }
        }
        FeatureStatus status = FeatureStatus.valueOf(peakStatus);
        // convert vector of allMS2FragmentScans to array
        allMS2FragmentScanNumbers = new int[currentAllMS2FragmentScans.size()];
        for (int i = 0; i < allMS2FragmentScanNumbers.length; i++) {
            allMS2FragmentScanNumbers[i] = currentAllMS2FragmentScans.get(i);
        }
        // clear all MS2 fragment scan numbers list for next peak
        currentAllMS2FragmentScans.clear();
        SimpleFeature peak = new SimpleFeature(dataFile, mass, rt, height, area, scanNumbers, mzPeaks, status, representativeScan, fragmentScan, allMS2FragmentScanNumbers, peakRTRange, peakMZRange, peakIntensityRange);
        peak.setCharge(currentPeakCharge);
        if (currentIsotopes.size() > 0) {
            SimpleIsotopePattern newPattern = new SimpleIsotopePattern(currentIsotopes.toArray(new DataPoint[0]), currentIsotopePatternStatus, currentIsotopePatternDescription);
            peak.setIsotopePattern(newPattern);
            currentIsotopes.clear();
        }
        peak.setParentChromatogramRowID(parentChromatogramRowID);
        buildingRow.addPeak(dataFile, peak);
    }
    // <IDENTITY_PROPERTY>
    if (qName.equals(PeakListElementName_2_5.IDPROPERTY.getElementName())) {
        identityProperties.put(identityPropertyName, getTextOfElement());
    }
    // <INFO_PROPERTY>
    if (qName.equals(PeakListElementName_2_5.INFO_PROPERTY.getElementName())) {
        informationProperties.put(infoPropertyName, getTextOfElement());
    }
    // <PEAK_IDENTITY>
    if (qName.equals(PeakListElementName_2_5.PEAK_IDENTITY.getElementName())) {
        SimplePeakIdentity identity = new SimplePeakIdentity(identityProperties);
        buildingRow.addPeakIdentity(identity, preferred);
    }
    if (qName.equals(PeakListElementName_2_5.PEAK_INFORMATION.getElementName())) {
        PeakInformation information = new SimplePeakInformation(informationProperties);
        buildingRow.setPeakInformation(information);
    }
    // <ROW>
    if (qName.equals(PeakListElementName_2_5.ROW.getElementName())) {
        buildingPeakList.addRow(buildingRow);
        buildingRow = null;
        parsedRows++;
    }
    // <ISOTOPE>
    if (qName.equals(PeakListElementName_2_5.ISOTOPE.getElementName())) {
        String text = getTextOfElement();
        String[] items = text.split(":");
        double mz = Double.valueOf(items[0]);
        double intensity = Double.valueOf(items[1]);
        DataPoint isotope = new SimpleDataPoint(mz, intensity);
        currentIsotopes.add(isotope);
    }
    if (qName.equals(PeakListElementName_2_5.METHOD_NAME.getElementName())) {
        String appliedMethod = getTextOfElement();
        appliedMethods.add(appliedMethod);
    }
    if (qName.equals(PeakListElementName_2_5.METHOD_PARAMETERS.getElementName())) {
        String appliedMethodParam = getTextOfElement();
        appliedMethodParameters.add(appliedMethodParam);
    }
}
Also used : FeatureStatus(net.sf.mzmine.datamodel.Feature.FeatureStatus) IOException(java.io.IOException) SimplePeakIdentity(net.sf.mzmine.datamodel.impl.SimplePeakIdentity) DataInputStream(java.io.DataInputStream) DataPoint(net.sf.mzmine.datamodel.DataPoint) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) SimpleFeature(net.sf.mzmine.datamodel.impl.SimpleFeature) SAXException(org.xml.sax.SAXException) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) ByteArrayInputStream(java.io.ByteArrayInputStream) DataPoint(net.sf.mzmine.datamodel.DataPoint) SimpleDataPoint(net.sf.mzmine.datamodel.impl.SimpleDataPoint) Scan(net.sf.mzmine.datamodel.Scan) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation) PeakInformation(net.sf.mzmine.datamodel.PeakInformation) SimpleIsotopePattern(net.sf.mzmine.datamodel.impl.SimpleIsotopePattern) SimplePeakInformation(net.sf.mzmine.datamodel.impl.SimplePeakInformation)

Aggregations

SimplePeakInformation (net.sf.mzmine.datamodel.impl.SimplePeakInformation)5 DataPoint (net.sf.mzmine.datamodel.DataPoint)3 RawDataFile (net.sf.mzmine.datamodel.RawDataFile)3 SimpleFeature (net.sf.mzmine.datamodel.impl.SimpleFeature)3 HashMap (java.util.HashMap)2 Feature (net.sf.mzmine.datamodel.Feature)2 PeakListRow (net.sf.mzmine.datamodel.PeakListRow)2 SimpleDataPoint (net.sf.mzmine.datamodel.impl.SimpleDataPoint)2 SimpleIsotopePattern (net.sf.mzmine.datamodel.impl.SimpleIsotopePattern)2 SimplePeakListRow (net.sf.mzmine.datamodel.impl.SimplePeakListRow)2 Component (dulab.adap.datamodel.Component)1 Peak (dulab.adap.datamodel.Peak)1 PeakInfo (dulab.adap.datamodel.PeakInfo)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 DataInputStream (java.io.DataInputStream)1 IOException (java.io.IOException)1 java.util (java.util)1 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 Entry (java.util.Map.Entry)1