Search in sources :

Example 81 with RawDataFile

use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.

the class MassDetectionParameters method showSetupDialog.

@Override
public ExitCode showSetupDialog(Window parent, boolean valueCheckRequired) {
    ExitCode exitCode = super.showSetupDialog(parent, valueCheckRequired);
    // If the parameters are not complete, let's just stop here
    if (exitCode != ExitCode.OK)
        return exitCode;
    RawDataFile[] selectedFiles = getParameter(dataFiles).getValue().getMatchingRawDataFiles();
    // If no file selected (e.g. in batch mode setup), just return
    if ((selectedFiles == null) || (selectedFiles.length == 0))
        return exitCode;
    // Do an additional check for centroid/continuous data and show a
    // warning if there is a potential problem
    long numCentroided = 0, numProfile = 0;
    ScanSelection scanSel = getParameter(scanSelection).getValue();
    for (RawDataFile file : selectedFiles) {
        Scan[] scans = scanSel.getMatchingScans(file);
        for (Scan s : scans) {
            if (s.getSpectrumType() == MassSpectrumType.CENTROIDED)
                numCentroided++;
            else
                numProfile++;
        }
    }
    // If no scans found, let's just stop here
    if (numCentroided + numProfile == 0)
        return exitCode;
    // Do we have mostly centroided scans?
    final double proportionCentroided = (double) numCentroided / (numCentroided + numProfile);
    final boolean mostlyCentroided = proportionCentroided > 0.5;
    logger.finest("Proportion of scans estimated to be centroided: " + proportionCentroided);
    // Check the selected mass detector
    String massDetectorName = getParameter(massDetector).getValue().toString();
    if (mostlyCentroided && (!massDetectorName.startsWith("Centroid"))) {
        String msg = "MZmine thinks you are running the profile mode mass detector on (mostly) centroided scans. This will likely produce wrong results. Try the Centroid mass detector instead.";
        MZmineCore.getDesktop().displayMessage(null, msg);
    }
    if ((!mostlyCentroided) && (massDetectorName.startsWith("Centroid"))) {
        String msg = "MZmine thinks you are running the centroid mass detector on (mostly) profile scans. This will likely produce wrong results.";
        MZmineCore.getDesktop().displayMessage(null, msg);
    }
    return exitCode;
}
Also used : ScanSelection(net.sf.mzmine.parameters.parametertypes.selectors.ScanSelection) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) ExitCode(net.sf.mzmine.util.ExitCode) Scan(net.sf.mzmine.datamodel.Scan)

Example 82 with RawDataFile

use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.

the class MsMsSpectraMergeModule method merge.

private static MergedSpectrum merge(MergedSpectrum left, Scan right, DataPoint[] rightData, MzMergeMode mzMergeMode, IntensityMergeMode intensityMergeMode, MZTolerance ppm) {
    DataPoint[] byInt = rightData.clone();
    Arrays.sort(byInt, (u, v) -> Double.compare(v.getIntensity(), u.getIntensity()));
    MergedDataPoint[] merge = merge(left.data, byInt, mzMergeMode, intensityMergeMode, ppm);
    RawDataFile f = left.origins[0];
    RawDataFile[] fm;
    if (right.getDataFile().equals(left.origins[0])) {
        fm = left.origins;
    } else {
        HashSet<RawDataFile> rawDataFiles = new HashSet<>(Arrays.asList(left.origins));
        rawDataFiles.add(right.getDataFile());
        fm = rawDataFiles.toArray(new RawDataFile[0]);
    }
    int[] scanIds = Arrays.copyOf(left.scanIds, left.scanIds.length + 1);
    scanIds[scanIds.length - 1] = right.getScanNumber();
    return new MergedSpectrum(merge, fm, scanIds, left.precursorMz, left.polarity, left.precursorCharge, left.removedScansByLowQuality, left.removedScansByLowCosine, left.bestFragmentScanScore);
}
Also used : DataPoint(net.sf.mzmine.datamodel.DataPoint) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) HashSet(java.util.HashSet)

Example 83 with RawDataFile

use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.

the class MsMsPeakPickerModule method runModule.

@Override
@Nonnull
public ExitCode runModule(@Nonnull MZmineProject project, @Nonnull ParameterSet parameters, @Nonnull Collection<Task> tasks) {
    RawDataFile[] dataFiles = parameters.getParameter(MsMsPeakPickerParameters.dataFiles).getValue().getMatchingRawDataFiles();
    for (RawDataFile dataFile : dataFiles) {
        Task newTask = new MsMsPeakPickingTask(project, dataFile, parameters);
        tasks.add(newTask);
    }
    return ExitCode.OK;
}
Also used : Task(net.sf.mzmine.taskcontrol.Task) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) Nonnull(javax.annotation.Nonnull)

Example 84 with RawDataFile

use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.

the class CSVExportTask method exportPeakList.

private void exportPeakList(PeakList peakList, FileWriter writer, File fileName) {
    NumberFormat mzForm = MZmineCore.getConfiguration().getMZFormat();
    RawDataFile[] rawDataFiles = peakList.getRawDataFiles();
    // Buffer for writing
    StringBuffer line = new StringBuffer();
    // Write column headers
    // Common elements
    int length = commonElements.length;
    String name;
    for (int i = 0; i < length; i++) {
        name = commonElements[i].toString();
        name = name.replace("Export ", "");
        name = escapeStringForCSV(name);
        line.append(name + fieldSeparator);
    }
    // peak Information
    Set<String> peakInformationFields = new HashSet<>();
    for (PeakListRow row : peakList.getRows()) {
        if (!filter.filter(row))
            continue;
        if (row.getPeakInformation() != null) {
            for (String key : row.getPeakInformation().getAllProperties().keySet()) {
                peakInformationFields.add(key);
            }
        }
    }
    if (exportAllPeakInfo)
        for (String field : peakInformationFields) line.append(field + fieldSeparator);
    // Data file elements
    length = dataFileElements.length;
    for (int df = 0; df < peakList.getNumberOfRawDataFiles(); df++) {
        for (int i = 0; i < length; i++) {
            name = rawDataFiles[df].getName();
            name = name + " " + dataFileElements[i].toString();
            name = escapeStringForCSV(name);
            line.append(name + fieldSeparator);
        }
    }
    line.append("\n");
    try {
        writer.write(line.toString());
    } catch (Exception e) {
        setStatus(TaskStatus.ERROR);
        setErrorMessage("Could not write to file " + fileName);
        return;
    }
    // Write data rows
    for (PeakListRow peakListRow : peakList.getRows()) {
        if (!filter.filter(peakListRow)) {
            processedRows++;
            continue;
        }
        // Cancel?
        if (isCanceled()) {
            return;
        }
        // Reset the buffer
        line.setLength(0);
        // Common elements
        length = commonElements.length;
        for (int i = 0; i < length; i++) {
            switch(commonElements[i]) {
                case ROW_ID:
                    line.append(peakListRow.getID() + fieldSeparator);
                    break;
                case ROW_MZ:
                    line.append(peakListRow.getAverageMZ() + fieldSeparator);
                    break;
                case ROW_RT:
                    line.append(peakListRow.getAverageRT() + fieldSeparator);
                    break;
                case ROW_IDENTITY:
                    // Identity elements
                    PeakIdentity peakId = peakListRow.getPreferredPeakIdentity();
                    if (peakId == null) {
                        line.append(fieldSeparator);
                        break;
                    }
                    String propertyValue = peakId.toString();
                    propertyValue = escapeStringForCSV(propertyValue);
                    line.append(propertyValue + fieldSeparator);
                    break;
                case ROW_IDENTITY_ALL:
                    // Identity elements
                    PeakIdentity[] peakIdentities = peakListRow.getPeakIdentities();
                    propertyValue = "";
                    for (int x = 0; x < peakIdentities.length; x++) {
                        if (x > 0)
                            propertyValue += idSeparator;
                        propertyValue += peakIdentities[x].toString();
                    }
                    propertyValue = escapeStringForCSV(propertyValue);
                    line.append(propertyValue + fieldSeparator);
                    break;
                case ROW_IDENTITY_DETAILS:
                    peakId = peakListRow.getPreferredPeakIdentity();
                    if (peakId == null) {
                        line.append(fieldSeparator);
                        break;
                    }
                    propertyValue = peakId.getDescription();
                    if (propertyValue != null)
                        propertyValue = propertyValue.replaceAll("\\n", ";");
                    propertyValue = escapeStringForCSV(propertyValue);
                    line.append(propertyValue + fieldSeparator);
                    break;
                case ROW_COMMENT:
                    String comment = escapeStringForCSV(peakListRow.getComment());
                    line.append(comment + fieldSeparator);
                    break;
                case ROW_PEAK_NUMBER:
                    int numDetected = 0;
                    for (Feature p : peakListRow.getPeaks()) {
                        if (p.getFeatureStatus() == FeatureStatus.DETECTED) {
                            numDetected++;
                        }
                    }
                    line.append(numDetected + fieldSeparator);
                    break;
            }
        }
        // peak Information
        if (exportAllPeakInfo) {
            if (peakListRow.getPeakInformation() != null) {
                Map<String, String> allPropertiesMap = peakListRow.getPeakInformation().getAllProperties();
                for (String key : peakInformationFields) {
                    String value = allPropertiesMap.get(key);
                    if (value == null)
                        value = "";
                    line.append(value + fieldSeparator);
                }
            }
        }
        // Data file elements
        length = dataFileElements.length;
        for (RawDataFile dataFile : rawDataFiles) {
            for (int i = 0; i < length; i++) {
                Feature peak = peakListRow.getPeak(dataFile);
                if (peak != null) {
                    switch(dataFileElements[i]) {
                        case PEAK_STATUS:
                            line.append(peak.getFeatureStatus() + fieldSeparator);
                            break;
                        case PEAK_NAME:
                            line.append(PeakUtils.peakToString(peak) + fieldSeparator);
                            break;
                        case PEAK_MZ:
                            line.append(peak.getMZ() + fieldSeparator);
                            break;
                        case PEAK_RT:
                            line.append(peak.getRT() + fieldSeparator);
                            break;
                        case PEAK_RT_START:
                            line.append(peak.getRawDataPointsRTRange().lowerEndpoint() + fieldSeparator);
                            break;
                        case PEAK_RT_END:
                            line.append(peak.getRawDataPointsRTRange().upperEndpoint() + fieldSeparator);
                            break;
                        case PEAK_DURATION:
                            line.append(RangeUtils.rangeLength(peak.getRawDataPointsRTRange()) + fieldSeparator);
                            break;
                        case PEAK_HEIGHT:
                            line.append(peak.getHeight() + fieldSeparator);
                            break;
                        case PEAK_AREA:
                            line.append(peak.getArea() + fieldSeparator);
                            break;
                        case PEAK_CHARGE:
                            line.append(peak.getCharge() + fieldSeparator);
                            break;
                        case PEAK_DATAPOINTS:
                            line.append(peak.getScanNumbers().length + fieldSeparator);
                            break;
                        case PEAK_FWHM:
                            line.append(peak.getFWHM() + fieldSeparator);
                            break;
                        case PEAK_TAILINGFACTOR:
                            line.append(peak.getTailingFactor() + fieldSeparator);
                            break;
                        case PEAK_ASYMMETRYFACTOR:
                            line.append(peak.getAsymmetryFactor() + fieldSeparator);
                            break;
                        case PEAK_MZMIN:
                            line.append(peak.getRawDataPointsMZRange().lowerEndpoint() + fieldSeparator);
                            break;
                        case PEAK_MZMAX:
                            line.append(peak.getRawDataPointsMZRange().upperEndpoint() + fieldSeparator);
                            break;
                    }
                } else {
                    switch(dataFileElements[i]) {
                        case PEAK_STATUS:
                            line.append(FeatureStatus.UNKNOWN + fieldSeparator);
                            break;
                        default:
                            line.append("0" + fieldSeparator);
                            break;
                    }
                }
            }
        }
        line.append("\n");
        try {
            writer.write(line.toString());
        } catch (Exception e) {
            setStatus(TaskStatus.ERROR);
            setErrorMessage("Could not write to file " + fileName);
            return;
        }
        processedRows++;
    }
}
Also used : Feature(net.sf.mzmine.datamodel.Feature) PeakIdentity(net.sf.mzmine.datamodel.PeakIdentity) PeakListRow(net.sf.mzmine.datamodel.PeakListRow) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) NumberFormat(java.text.NumberFormat) HashSet(java.util.HashSet)

Example 85 with RawDataFile

use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.

the class SiriusExportTask method writeHeader.

private void writeHeader(BufferedWriter writer, PeakListRow row, RawDataFile raw, char polarity, MsType msType, Integer scanNumber, List<String> sources) throws IOException {
    final Feature feature = row.getPeak(raw);
    writer.write("BEGIN IONS");
    writer.newLine();
    writer.write("FEATURE_ID=");
    writer.write(String.valueOf(row.getID()));
    writer.newLine();
    writer.write("PEPMASS=");
    writer.write(String.valueOf(row.getBestPeak().getMZ()));
    writer.newLine();
    writer.write("CHARGE=");
    if (polarity == '-')
        writer.write("-");
    writer.write(String.valueOf(Math.abs(row.getRowCharge())));
    writer.newLine();
    writer.write("RTINSECONDS=");
    writer.write(String.valueOf(feature.getRT() * 60d));
    writer.newLine();
    switch(msType) {
        case CORRELATED:
            writer.write("SPECTYPE=CORRELATED MS");
            writer.newLine();
        case MS:
            writer.write("MSLEVEL=1");
            writer.newLine();
            break;
        case MSMS:
            writer.write("MSLEVEL=2");
            writer.newLine();
    }
    writer.write("FILENAME=");
    if (sources != null) {
        final String[] uniqSources = new HashSet<>(sources).toArray(new String[0]);
        writer.write(escape(uniqSources[0], ";"));
        for (int i = 1; i < uniqSources.length; ++i) {
            writer.write(";");
            writer.write(escape(uniqSources[i], ";"));
        }
        writer.newLine();
    } else if (msType == MsType.CORRELATED) {
        RawDataFile[] raws = row.getRawDataFiles();
        final Set<String> set = new HashSet<>();
        for (RawDataFile f : raws) set.add(f.getName());
        final String[] uniqSources = set.toArray(new String[0]);
        writer.write(escape(uniqSources[0], ";"));
        for (int i = 1; i < uniqSources.length; ++i) {
            writer.write(";");
            writer.write(escape(uniqSources[i], ";"));
        }
        writer.newLine();
    } else {
        writer.write(feature.getDataFile().getName());
        writer.newLine();
    }
    if (scanNumber != null) {
        writer.write("SCANS=");
        writer.write(String.valueOf(scanNumber));
        writer.newLine();
    }
}
Also used : HashSet(java.util.HashSet) ParameterSet(net.sf.mzmine.parameters.ParameterSet) Set(java.util.Set) RawDataFile(net.sf.mzmine.datamodel.RawDataFile) Feature(net.sf.mzmine.datamodel.Feature) DataPoint(net.sf.mzmine.datamodel.DataPoint)

Aggregations

RawDataFile (net.sf.mzmine.datamodel.RawDataFile)185 Feature (net.sf.mzmine.datamodel.Feature)59 PeakListRow (net.sf.mzmine.datamodel.PeakListRow)52 DataPoint (net.sf.mzmine.datamodel.DataPoint)51 Scan (net.sf.mzmine.datamodel.Scan)40 ArrayList (java.util.ArrayList)33 PeakList (net.sf.mzmine.datamodel.PeakList)33 Nonnull (javax.annotation.Nonnull)24 SimpleDataPoint (net.sf.mzmine.datamodel.impl.SimpleDataPoint)24 SimplePeakList (net.sf.mzmine.datamodel.impl.SimplePeakList)24 SimplePeakListRow (net.sf.mzmine.datamodel.impl.SimplePeakListRow)22 Task (net.sf.mzmine.taskcontrol.Task)20 SimplePeakListAppliedMethod (net.sf.mzmine.datamodel.impl.SimplePeakListAppliedMethod)19 SimpleFeature (net.sf.mzmine.datamodel.impl.SimpleFeature)17 IOException (java.io.IOException)14 ParameterSet (net.sf.mzmine.parameters.ParameterSet)14 File (java.io.File)13 MassList (net.sf.mzmine.datamodel.MassList)13 PeakListAppliedMethod (net.sf.mzmine.datamodel.PeakList.PeakListAppliedMethod)13 TreeMap (java.util.TreeMap)10