use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class StorableScan method getDataPointsByMass.
/**
* @return Returns scan datapoints within a given range
*/
@Nonnull
public DataPoint[] getDataPointsByMass(@Nonnull Range<Double> mzRange) {
DataPoint[] dataPoints = getDataPoints();
int startIndex, endIndex;
for (startIndex = 0; startIndex < dataPoints.length; startIndex++) {
if (dataPoints[startIndex].getMZ() >= mzRange.lowerEndpoint()) {
break;
}
}
for (endIndex = startIndex; endIndex < dataPoints.length; endIndex++) {
if (dataPoints[endIndex].getMZ() > mzRange.upperEndpoint()) {
break;
}
}
DataPoint[] pointsWithinRange = new DataPoint[endIndex - startIndex];
// Copy the relevant points
System.arraycopy(dataPoints, startIndex, pointsWithinRange, 0, endIndex - startIndex);
return pointsWithinRange;
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class SpectralDBPeakIdentity method getQueryDataPoints.
public DataPoint[] getQueryDataPoints(DataPointsTag tag) {
switch(tag) {
case ORIGINAL:
DataPoint[] dp = getQueryDataPoints();
if (dp == null)
return new DataPoint[0];
Arrays.sort(dp, new DataPointSorter(SortingProperty.MZ, SortingDirection.Ascending));
return dp;
case FILTERED:
return similarity.getQuery();
case ALIGNED:
return similarity.getAlignedDataPoints()[1];
case MERGED:
return new DataPoint[0];
}
return new DataPoint[0];
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class GnpsJsonParser method getDBEntry.
public SpectralDBEntry getDBEntry(JsonObject main) {
// extract dps
DataPoint[] dps = getDataPoints(main);
if (dps == null)
return null;
// extract meta data
Map<DBEntryField, Object> map = new EnumMap<>(DBEntryField.class);
for (DBEntryField f : DBEntryField.values()) {
String id = f.getGnpsJsonID();
if (id != null && !id.isEmpty()) {
try {
Object o = null;
if (f.getObjectClass() == Double.class || f.getObjectClass() == Integer.class || f.getObjectClass() == Float.class) {
o = main.getJsonNumber(id);
} else {
o = main.getString(id, null);
if (o != null && o.equals("N/A"))
o = null;
}
// add value
if (o != null) {
if (o instanceof JsonNumber) {
if (f.getObjectClass().equals(Integer.class)) {
o = ((JsonNumber) o).intValue();
} else {
o = ((JsonNumber) o).doubleValue();
}
}
// add
map.put(f, o);
}
} catch (Exception e) {
logger.log(Level.WARNING, "Cannot convert value to its type", e);
}
}
}
return new SpectralDBEntry(map, dps);
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class RansacAlignerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running Ransac aligner");
// twice, first for score calculation, second for actual alignment.
for (int i = 0; i < peakLists.length; i++) {
totalRows += peakLists[i].getNumberOfRows() * 2;
}
// Collect all data files
List<RawDataFile> allDataFiles = new ArrayList<RawDataFile>();
for (PeakList peakList : peakLists) {
for (RawDataFile dataFile : peakList.getRawDataFiles()) {
// Each data file can only have one column in aligned feature list
if (allDataFiles.contains(dataFile)) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Cannot run alignment, because file " + dataFile + " is present in multiple feature lists");
return;
}
allDataFiles.add(dataFile);
}
}
// Create a new aligned feature list
alignedPeakList = new SimplePeakList(peakListName, allDataFiles.toArray(new RawDataFile[0]));
// Iterate source feature lists
for (PeakList peakList : peakLists) {
HashMap<PeakListRow, PeakListRow> alignmentMapping = this.getAlignmentMap(peakList);
PeakListRow[] allRows = peakList.getRows();
// Align all rows using mapping
for (PeakListRow row : allRows) {
PeakListRow targetRow = alignmentMapping.get(row);
// If we have no mapping for this row, add a new one
if (targetRow == null) {
targetRow = new SimplePeakListRow(newRowID);
newRowID++;
alignedPeakList.addRow(targetRow);
}
// Add all peaks from the original row to the aligned row
for (RawDataFile file : row.getRawDataFiles()) {
targetRow.addPeak(file, row.getPeak(file));
}
// Add all non-existing identities from the original row to the
// aligned row
PeakUtils.copyPeakListRowProperties(row, targetRow);
processedRows++;
}
}
// Next feature list
// Add new aligned feature list to the project
project.addPeakList(alignedPeakList);
// Edit by Aleksandr Smirnov
PeakListRow row = alignedPeakList.getRow(1);
double alignedRetTime = row.getAverageRT();
for (Feature peak : row.getPeaks()) {
double retTimeDelta = alignedRetTime - peak.getRT();
RawDataFile dataFile = peak.getDataFile();
SortedMap<Double, Double> chromatogram = new TreeMap<>();
for (int scan : peak.getScanNumbers()) {
DataPoint dataPoint = peak.getDataPoint(scan);
double retTime = dataFile.getScan(scan).getRetentionTime() + retTimeDelta;
if (dataPoint != null)
chromatogram.put(retTime, dataPoint.getIntensity());
}
}
// End of Edit
// Add task description to peakList
alignedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Ransac aligner", parameters));
logger.info("Finished RANSAC aligner");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.DataPoint in project mzmine2 by mzmine.
the class ManualPickerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.finest("Starting manual peak picker, RT: " + rtRange + ", m/z: " + mzRange);
// Calculate total number of scans to process
for (RawDataFile dataFile : dataFiles) {
int[] scanNumbers = dataFile.getScanNumbers(1, rtRange);
totalScans += scanNumbers.length;
}
// Find peak in each data file
for (RawDataFile dataFile : dataFiles) {
ManualPeak newPeak = new ManualPeak(dataFile);
boolean dataPointFound = false;
int[] scanNumbers = dataFile.getScanNumbers(1, rtRange);
for (int scanNumber : scanNumbers) {
if (isCanceled())
return;
// Get next scan
Scan scan = dataFile.getScan(scanNumber);
// Find most intense m/z peak
DataPoint basePeak = ScanUtils.findBasePeak(scan, mzRange);
if (basePeak != null) {
if (basePeak.getIntensity() > 0)
dataPointFound = true;
newPeak.addDatapoint(scan.getScanNumber(), basePeak);
} else {
final double mzCenter = (mzRange.lowerEndpoint() + mzRange.upperEndpoint()) / 2.0;
DataPoint fakeDataPoint = new SimpleDataPoint(mzCenter, 0);
newPeak.addDatapoint(scan.getScanNumber(), fakeDataPoint);
}
processedScans++;
}
if (dataPointFound) {
newPeak.finalizePeak();
if (newPeak.getArea() > 0)
peakListRow.addPeak(dataFile, newPeak);
} else {
peakListRow.removePeak(dataFile);
}
}
// Notify the GUI that peaklist contents have changed
if (peakList != null) {
// Check if the feature list row has been added to the feature list, and
// if it has not, add it
List<PeakListRow> rows = Arrays.asList(peakList.getRows());
if (!rows.contains(peakListRow)) {
peakList.addRow(peakListRow);
}
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(peakList);
project.notifyObjectChanged(peakList, true);
}
if (table != null) {
((AbstractTableModel) table.getModel()).fireTableDataChanged();
}
logger.finest("Finished manual peak picker, " + processedScans + " scans processed");
setStatus(TaskStatus.FINISHED);
}
Aggregations