use of net.sf.mzmine.datamodel.impl.SimpleFeature in project mzmine2 by mzmine.
the class PeakUtils method copyPeakRow.
/**
* Creates a copy of a PeakListRow.
* @param row A row.
* @return A copy of row.
*/
public static PeakListRow copyPeakRow(final PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
final Feature newPeak = new SimpleFeature(peak);
PeakUtils.copyPeakProperties(peak, newPeak);
newRow.addPeak(peak.getDataFile(), newPeak);
}
return newRow;
}
use of net.sf.mzmine.datamodel.impl.SimpleFeature in project mzmine2 by mzmine.
the class ADAPInterface method peakToFeature.
@Nonnull
public static Feature peakToFeature(@Nonnull RawDataFile file, @Nonnull BetterPeak peak) {
Chromatogram chromatogram = peak.chromatogram;
// Retrieve scan numbers
int representativeScan = 0;
int[] scanNumbers = new int[chromatogram.length];
int count = 0;
for (int num : file.getScanNumbers()) {
double retTime = file.getScan(num).getRetentionTime();
Double intensity = chromatogram.getIntensity(retTime, false);
if (intensity != null)
scanNumbers[count++] = num;
if (retTime == peak.getRetTime())
representativeScan = num;
}
// Calculate peak area
double area = 0.0;
for (int i = 1; i < chromatogram.length; ++i) {
double base = (chromatogram.xs[i] - chromatogram.xs[i - 1]) * 60d;
double height = 0.5 * (chromatogram.ys[i] + chromatogram.ys[i - 1]);
area += base * height;
}
// Create array of DataPoints
DataPoint[] dataPoints = new DataPoint[chromatogram.length];
count = 0;
for (double intensity : chromatogram.ys) dataPoints[count++] = new SimpleDataPoint(peak.getMZ(), intensity);
return new SimpleFeature(file, peak.getMZ(), peak.getRetTime(), peak.getIntensity(), area, scanNumbers, dataPoints, Feature.FeatureStatus.ESTIMATED, representativeScan, representativeScan, new int[] {}, Range.closed(peak.getFirstRetTime(), peak.getLastRetTime()), Range.closed(peak.getMZ() - 0.01, peak.getMZ() + 0.01), Range.closed(0.0, peak.getIntensity()));
}
use of net.sf.mzmine.datamodel.impl.SimpleFeature in project mzmine2 by mzmine.
the class MzTabImportTask method importSmallMolecules.
private void importSmallMolecules(PeakList newPeakList, MZTabFile mzTabFile, Map<Integer, RawDataFile> rawDataFiles) {
SortedMap<Integer, Assay> assayMap = mzTabFile.getMetadata().getAssayMap();
Collection<SmallMolecule> smallMolecules = mzTabFile.getSmallMolecules();
// Loop through SML data
String formula, description, database, url = "";
double mzExp = 0, abundance = 0, peak_mz = 0, peak_rt = 0, peak_height = 0, rtValue = 0;
// int charge = 0;
int rowCounter = 0;
for (SmallMolecule smallMolecule : smallMolecules) {
// Stop the process if cancel() was called
if (isCanceled())
return;
rowCounter++;
formula = smallMolecule.getChemicalFormula();
// smile = smallMolecule.getSmiles();
// inchiKey = smallMolecule.getInchiKey();
description = smallMolecule.getDescription();
// species = smallMolecule.getSpecies();
database = smallMolecule.getDatabase();
if (smallMolecule.getURI() != null) {
url = smallMolecule.getURI().toString();
}
String identifier = smallMolecule.getIdentifier().toString();
SplitList<Double> rt = smallMolecule.getRetentionTime();
if (smallMolecule.getExpMassToCharge() != null) {
mzExp = smallMolecule.getExpMassToCharge();
}
// Calculate average RT if multiple values are available
if (rt != null && !rt.isEmpty()) {
rtValue = DoubleMath.mean(rt);
}
if ((url != null) && (url.equals("null"))) {
url = null;
}
if (identifier.equals("null")) {
identifier = null;
}
if (description == null && identifier != null) {
description = identifier;
}
// Add shared information to row
SimplePeakListRow newRow = new SimplePeakListRow(rowCounter);
newRow.setAverageMZ(mzExp);
newRow.setAverageRT(rtValue);
if (description != null) {
SimplePeakIdentity newIdentity = new SimplePeakIdentity(description, formula, database, identifier, url);
newRow.addPeakIdentity(newIdentity, false);
}
// Add raw data file entries to row
for (Entry<Integer, RawDataFile> rawDataEntry : rawDataFiles.entrySet()) {
RawDataFile rawData = rawDataEntry.getValue();
Assay dataFileAssay = assayMap.get(rawDataEntry.getKey());
abundance = 0;
peak_mz = 0;
peak_rt = 0;
peak_height = 0;
if (smallMolecule.getAbundanceColumnValue(dataFileAssay) != null) {
abundance = smallMolecule.getAbundanceColumnValue(dataFileAssay);
}
if (smallMolecule.getOptionColumnValue(dataFileAssay, "peak_mz") != null) {
peak_mz = Double.parseDouble(smallMolecule.getOptionColumnValue(dataFileAssay, "peak_mz"));
} else {
peak_mz = mzExp;
}
if (smallMolecule.getOptionColumnValue(dataFileAssay, "peak_rt") != null) {
peak_rt = Double.parseDouble(smallMolecule.getOptionColumnValue(dataFileAssay, "peak_rt"));
} else {
peak_rt = rtValue;
}
if (smallMolecule.getOptionColumnValue(dataFileAssay, "peak_height") != null) {
peak_height = Double.parseDouble(smallMolecule.getOptionColumnValue(dataFileAssay, "peak_height"));
} else {
peak_height = 0.0;
}
int[] scanNumbers = {};
DataPoint[] finalDataPoint = new DataPoint[1];
finalDataPoint[0] = new SimpleDataPoint(peak_mz, peak_height);
int representativeScan = 0;
int fragmentScan = 0;
int[] allFragmentScans = new int[] { 0 };
Range<Double> finalRTRange = Range.singleton(peak_rt);
Range<Double> finalMZRange = Range.singleton(peak_mz);
Range<Double> finalIntensityRange = Range.singleton(peak_height);
FeatureStatus status = FeatureStatus.DETECTED;
Feature peak = new SimpleFeature(rawData, peak_mz, peak_rt, peak_height, abundance, scanNumbers, finalDataPoint, status, representativeScan, fragmentScan, allFragmentScans, finalRTRange, finalMZRange, finalIntensityRange);
if (abundance > 0) {
newRow.addPeak(rawData, peak);
}
}
// Add row to feature list
newPeakList.addRow(newRow);
}
}
use of net.sf.mzmine.datamodel.impl.SimpleFeature in project mzmine2 by mzmine.
the class PeakListOpenHandler_2_5 method endElement.
/**
* @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String, java.lang.String,
* java.lang.String)
*/
@Override
public void endElement(String namespaceURI, String sName, String qName) throws SAXException {
if (canceled)
throw new SAXException("Parsing canceled");
// <NAME>
if (qName.equals(PeakListElementName_2_5.PEAKLIST_NAME.getElementName())) {
name = getTextOfElement();
logger.info("Loading feature list: " + name);
peakListName = name;
}
// <PEAKLIST_DATE>
if (qName.equals(PeakListElementName_2_5.PEAKLIST_DATE.getElementName())) {
dateCreated = getTextOfElement();
}
// <QUANTITY>
if (qName.equals(PeakListElementName_2_5.QUANTITY.getElementName())) {
String text = getTextOfElement();
totalRows = Integer.parseInt(text);
}
// <RAW_FILE>
if (qName.equals(PeakListElementName_2_5.RAWFILE.getElementName())) {
rawDataFileID = getTextOfElement();
RawDataFile dataFile = dataFilesIDMap.get(rawDataFileID);
if (dataFile == null) {
throw new SAXException("Cannot open feature list, because raw data file " + rawDataFileID + " is missing.");
}
currentPeakListDataFiles.add(dataFile);
}
// <SCAN_ID>
if (qName.equals(PeakListElementName_2_5.SCAN_ID.getElementName())) {
byte[] bytes = Base64.decodeToBytes(getTextOfElement());
// make a data input stream
DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
scanNumbers = new int[numOfMZpeaks];
for (int i = 0; i < numOfMZpeaks; i++) {
try {
scanNumbers[i] = dataInputStream.readInt();
} catch (IOException ex) {
throw new SAXException(ex);
}
}
}
// <REPRESENTATIVE_SCAN>
if (qName.equals(PeakListElementName_2_5.REPRESENTATIVE_SCAN.getElementName())) {
representativeScan = Integer.valueOf(getTextOfElement());
}
// <FRAGMENT_SCAN>
if (qName.equals(PeakListElementName_2_5.FRAGMENT_SCAN.getElementName())) {
fragmentScan = Integer.valueOf(getTextOfElement());
}
// <All_MS2_FRAGMENT_SCANS>
if (qName.equals(PeakListElementName_2_5.ALL_MS2_FRAGMENT_SCANS.getElementName())) {
Integer fragmentNumber = Integer.valueOf(getTextOfElement());
currentAllMS2FragmentScans.add(fragmentNumber);
}
// <MASS>
if (qName.equals(PeakListElementName_2_5.MZ.getElementName())) {
byte[] bytes = Base64.decodeToBytes(getTextOfElement());
// make a data input stream
DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
masses = new double[numOfMZpeaks];
for (int i = 0; i < numOfMZpeaks; i++) {
try {
masses[i] = dataInputStream.readFloat();
} catch (IOException ex) {
throw new SAXException(ex);
}
}
}
// <HEIGHT>
if (qName.equals(PeakListElementName_2_5.HEIGHT.getElementName())) {
byte[] bytes = Base64.decodeToBytes(getTextOfElement());
// make a data input stream
DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(bytes));
intensities = new double[numOfMZpeaks];
for (int i = 0; i < numOfMZpeaks; i++) {
try {
intensities[i] = dataInputStream.readFloat();
} catch (IOException ex) {
throw new SAXException(ex);
}
}
}
// <PEAK>
if (qName.equals(PeakListElementName_2_5.PEAK.getElementName())) {
DataPoint[] mzPeaks = new DataPoint[numOfMZpeaks];
Range<Double> peakRTRange = null, peakMZRange = null, peakIntensityRange = null;
RawDataFile dataFile = dataFilesIDMap.get(peakColumnID);
if (dataFile == null)
throw new SAXException("Error in project: data file " + peakColumnID + " not found");
for (int i = 0; i < numOfMZpeaks; i++) {
Scan sc = dataFile.getScan(scanNumbers[i]);
double retentionTime = sc.getRetentionTime();
double mz = masses[i];
double intensity = intensities[i];
if (peakIntensityRange == null) {
peakIntensityRange = Range.singleton(intensity);
} else {
peakIntensityRange = peakIntensityRange.span(Range.singleton(intensity));
}
if (intensity > 0) {
if (peakRTRange == null) {
peakRTRange = Range.singleton(retentionTime);
} else {
peakRTRange = peakRTRange.span(Range.singleton(retentionTime));
}
}
if (mz > 0.0) {
mzPeaks[i] = new SimpleDataPoint(mz, intensity);
if (peakMZRange == null)
peakMZRange = Range.singleton(mz);
else
peakMZRange = peakMZRange.span(Range.singleton(mz));
}
}
FeatureStatus status = FeatureStatus.valueOf(peakStatus);
// convert vector of allMS2FragmentScans to array
allMS2FragmentScanNumbers = new int[currentAllMS2FragmentScans.size()];
for (int i = 0; i < allMS2FragmentScanNumbers.length; i++) {
allMS2FragmentScanNumbers[i] = currentAllMS2FragmentScans.get(i);
}
// clear all MS2 fragment scan numbers list for next peak
currentAllMS2FragmentScans.clear();
SimpleFeature peak = new SimpleFeature(dataFile, mass, rt, height, area, scanNumbers, mzPeaks, status, representativeScan, fragmentScan, allMS2FragmentScanNumbers, peakRTRange, peakMZRange, peakIntensityRange);
peak.setCharge(currentPeakCharge);
if (currentIsotopes.size() > 0) {
SimpleIsotopePattern newPattern = new SimpleIsotopePattern(currentIsotopes.toArray(new DataPoint[0]), currentIsotopePatternStatus, currentIsotopePatternDescription);
peak.setIsotopePattern(newPattern);
currentIsotopes.clear();
}
peak.setParentChromatogramRowID(parentChromatogramRowID);
buildingRow.addPeak(dataFile, peak);
}
// <IDENTITY_PROPERTY>
if (qName.equals(PeakListElementName_2_5.IDPROPERTY.getElementName())) {
identityProperties.put(identityPropertyName, getTextOfElement());
}
// <INFO_PROPERTY>
if (qName.equals(PeakListElementName_2_5.INFO_PROPERTY.getElementName())) {
informationProperties.put(infoPropertyName, getTextOfElement());
}
// <PEAK_IDENTITY>
if (qName.equals(PeakListElementName_2_5.PEAK_IDENTITY.getElementName())) {
SimplePeakIdentity identity = new SimplePeakIdentity(identityProperties);
buildingRow.addPeakIdentity(identity, preferred);
}
if (qName.equals(PeakListElementName_2_5.PEAK_INFORMATION.getElementName())) {
PeakInformation information = new SimplePeakInformation(informationProperties);
buildingRow.setPeakInformation(information);
}
// <ROW>
if (qName.equals(PeakListElementName_2_5.ROW.getElementName())) {
buildingPeakList.addRow(buildingRow);
buildingRow = null;
parsedRows++;
}
// <ISOTOPE>
if (qName.equals(PeakListElementName_2_5.ISOTOPE.getElementName())) {
String text = getTextOfElement();
String[] items = text.split(":");
double mz = Double.valueOf(items[0]);
double intensity = Double.valueOf(items[1]);
DataPoint isotope = new SimpleDataPoint(mz, intensity);
currentIsotopes.add(isotope);
}
if (qName.equals(PeakListElementName_2_5.METHOD_NAME.getElementName())) {
String appliedMethod = getTextOfElement();
appliedMethods.add(appliedMethod);
}
if (qName.equals(PeakListElementName_2_5.METHOD_PARAMETERS.getElementName())) {
String appliedMethodParam = getTextOfElement();
appliedMethodParameters.add(appliedMethodParam);
}
}
use of net.sf.mzmine.datamodel.impl.SimpleFeature in project mzmine2 by mzmine.
the class SmoothingTask method run.
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
try {
// Get filter weights.
final double[] filterWeights = SavitzkyGolayFilter.getNormalizedWeights(filterWidth);
// Create new feature list
newPeakList = new SimplePeakList(origPeakList + " " + suffix, origPeakList.getRawDataFiles());
// Process each row.
for (final PeakListRow row : origPeakList.getRows()) {
if (!isCanceled()) {
// Create a new peak-list row.
final int originalID = row.getID();
final PeakListRow newRow = new SimplePeakListRow(originalID);
// Process each peak.
for (final Feature peak : row.getPeaks()) {
if (!isCanceled()) {
// Copy original peak intensities.
final int[] scanNumbers = peak.getScanNumbers();
final int numScans = scanNumbers.length;
final double[] intensities = new double[numScans];
for (int i = 0; i < numScans; i++) {
final DataPoint dataPoint = peak.getDataPoint(scanNumbers[i]);
intensities[i] = dataPoint == null ? 0.0 : dataPoint.getIntensity();
}
// Smooth peak.
final double[] smoothed = convolve(intensities, filterWeights);
// Measure peak (max, ranges, area etc.)
final RawDataFile dataFile = peak.getDataFile();
final DataPoint[] newDataPoints = new DataPoint[numScans];
double maxIntensity = 0.0;
int maxScanNumber = -1;
DataPoint maxDataPoint = null;
Range<Double> intensityRange = null;
double area = 0.0;
for (int i = 0; i < numScans; i++) {
final int scanNumber = scanNumbers[i];
final DataPoint dataPoint = peak.getDataPoint(scanNumber);
final double intensity = smoothed[i];
if (dataPoint != null && intensity > 0.0) {
// Create a new data point.
final double mz = dataPoint.getMZ();
final double rt = dataFile.getScan(scanNumber).getRetentionTime();
final DataPoint newDataPoint = new SimpleDataPoint(mz, intensity);
newDataPoints[i] = newDataPoint;
// Track maximum intensity data point.
if (intensity > maxIntensity) {
maxIntensity = intensity;
maxScanNumber = scanNumber;
maxDataPoint = newDataPoint;
}
// Update ranges.
if (intensityRange == null) {
intensityRange = Range.singleton(intensity);
} else {
intensityRange = intensityRange.span(Range.singleton(intensity));
}
// Accumulate peak area.
if (i != 0) {
final DataPoint lastDP = newDataPoints[i - 1];
final double lastIntensity = lastDP == null ? 0.0 : lastDP.getIntensity();
final double lastRT = dataFile.getScan(scanNumbers[i - 1]).getRetentionTime();
area += (rt - lastRT) * 60d * (intensity + lastIntensity) / 2.0;
}
}
}
assert maxDataPoint != null;
if (!isCanceled() && maxScanNumber >= 0) {
// Create a new peak.
newRow.addPeak(dataFile, new SimpleFeature(dataFile, maxDataPoint.getMZ(), peak.getRT(), maxIntensity, area, scanNumbers, newDataPoints, peak.getFeatureStatus(), maxScanNumber, peak.getMostIntenseFragmentScanNumber(), peak.getAllMS2FragmentScanNumbers(), peak.getRawDataPointsRTRange(), peak.getRawDataPointsMZRange(), intensityRange));
}
}
}
newPeakList.addRow(newRow);
progress++;
}
}
// Finish up.
if (!isCanceled()) {
// Add new peak-list to the project.
project.addPeakList(newPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(newPeakList);
// Remove the original peak-list if requested.
if (removeOriginal) {
project.removePeakList(origPeakList);
}
// Copy previously applied methods
for (final PeakListAppliedMethod method : origPeakList.getAppliedMethods()) {
newPeakList.addDescriptionOfAppliedTask(method);
}
// Add task description to peak-list.
newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peaks smoothed by Savitzky-Golay filter", parameters));
LOG.finest("Finished peak smoothing: " + progress + " rows processed");
setStatus(TaskStatus.FINISHED);
}
} catch (Throwable t) {
LOG.log(Level.SEVERE, "Smoothing error", t);
setErrorMessage(t.getMessage());
setStatus(TaskStatus.ERROR);
}
}
Aggregations