use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class IsotopeGrouperTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running isotopic peak grouper on " + peakList);
// We assume source peakList contains one datafile
RawDataFile dataFile = peakList.getRawDataFile(0);
// Create a new deisotoped peakList
deisotopedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Collect all selected charge states
int[] charges = new int[maximumCharge];
for (int i = 0; i < maximumCharge; i++) charges[i] = i + 1;
// Sort peaks by descending height
Feature[] sortedPeaks = peakList.getPeaks(dataFile);
Arrays.sort(sortedPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
// Loop through all peaks
totalPeaks = sortedPeaks.length;
for (int ind = 0; ind < totalPeaks; ind++) {
if (isCanceled())
return;
Feature aPeak = sortedPeaks[ind];
// Check if peak was already deleted
if (aPeak == null) {
processedPeaks++;
continue;
}
// Check which charge state fits best around this peak
int bestFitCharge = 0;
int bestFitScore = -1;
Vector<Feature> bestFitPeaks = null;
for (int charge : charges) {
Vector<Feature> fittedPeaks = new Vector<Feature>();
fittedPeaks.add(aPeak);
fitPattern(fittedPeaks, aPeak, charge, sortedPeaks);
int score = fittedPeaks.size();
if ((score > bestFitScore) || ((score == bestFitScore) && (bestFitCharge > charge))) {
bestFitScore = score;
bestFitCharge = charge;
bestFitPeaks = fittedPeaks;
}
}
PeakListRow oldRow = peakList.getPeakRow(aPeak);
assert bestFitPeaks != null;
// isotope, we skip this left the original peak in the feature list.
if (bestFitPeaks.size() == 1) {
deisotopedPeakList.addRow(oldRow);
processedPeaks++;
continue;
}
// Convert the peak pattern to array
Feature[] originalPeaks = bestFitPeaks.toArray(new Feature[0]);
// Create a new SimpleIsotopePattern
DataPoint[] isotopes = new DataPoint[bestFitPeaks.size()];
for (int i = 0; i < isotopes.length; i++) {
Feature p = originalPeaks[i];
isotopes[i] = new SimpleDataPoint(p.getMZ(), p.getHeight());
}
SimpleIsotopePattern newPattern = new SimpleIsotopePattern(isotopes, IsotopePatternStatus.DETECTED, aPeak.toString());
// the lowest m/z peak
if (chooseMostIntense) {
Arrays.sort(originalPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
} else {
Arrays.sort(originalPeaks, new PeakSorter(SortingProperty.MZ, SortingDirection.Ascending));
}
Feature newPeak = new SimpleFeature(originalPeaks[0]);
newPeak.setIsotopePattern(newPattern);
newPeak.setCharge(bestFitCharge);
// Keep old ID
int oldID = oldRow.getID();
SimplePeakListRow newRow = new SimplePeakListRow(oldID);
PeakUtils.copyPeakListRowProperties(oldRow, newRow);
newRow.addPeak(dataFile, newPeak);
deisotopedPeakList.addRow(newRow);
// Remove all peaks already assigned to isotope pattern
for (int i = 0; i < sortedPeaks.length; i++) {
if (bestFitPeaks.contains(sortedPeaks[i]))
sortedPeaks[i] = null;
}
// Update completion rate
processedPeaks++;
}
// Add new peakList to the project
project.addPeakList(deisotopedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : peakList.getAppliedMethods()) {
deisotopedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
deisotopedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Isotopic peaks grouper", parameters));
// Remove the original peakList if requested
if (removeOriginal)
project.removePeakList(peakList);
logger.info("Finished isotopic peak grouper on " + peakList);
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class RawDataFileImpl method readDataPoints.
public synchronized DataPoint[] readDataPoints(int ID) throws IOException {
final Long currentOffset = dataPointsOffsets.get(ID);
final Integer numOfDataPoints = dataPointsLengths.get(ID);
if ((currentOffset == null) || (numOfDataPoints == null)) {
throw new IllegalArgumentException("Unknown storage ID " + ID);
}
final int numOfBytes = numOfDataPoints * 2 * 4;
if (buffer.capacity() < numOfBytes) {
buffer = ByteBuffer.allocate(numOfBytes * 2);
} else {
// JDK 9 breaks compatibility with JRE8: need to cast
// https://stackoverflow.com/questions/48693695/java-nio-buffer-not-loading-clear-method-on-runtime
((Buffer) buffer).clear();
}
dataPointsFile.seek(currentOffset);
dataPointsFile.read(buffer.array(), 0, numOfBytes);
FloatBuffer floatBuffer = buffer.asFloatBuffer();
DataPoint[] dataPoints = new DataPoint[numOfDataPoints];
for (int i = 0; i < numOfDataPoints; i++) {
float mz = floatBuffer.get();
float intensity = floatBuffer.get();
dataPoints[i] = new SimpleDataPoint(mz, intensity);
}
return dataPoints;
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class ManualPickerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.finest("Starting manual peak picker, RT: " + rtRange + ", m/z: " + mzRange);
// Calculate total number of scans to process
for (RawDataFile dataFile : dataFiles) {
int[] scanNumbers = dataFile.getScanNumbers(1, rtRange);
totalScans += scanNumbers.length;
}
// Find peak in each data file
for (RawDataFile dataFile : dataFiles) {
ManualPeak newPeak = new ManualPeak(dataFile);
boolean dataPointFound = false;
int[] scanNumbers = dataFile.getScanNumbers(1, rtRange);
for (int scanNumber : scanNumbers) {
if (isCanceled())
return;
// Get next scan
Scan scan = dataFile.getScan(scanNumber);
// Find most intense m/z peak
DataPoint basePeak = ScanUtils.findBasePeak(scan, mzRange);
if (basePeak != null) {
if (basePeak.getIntensity() > 0)
dataPointFound = true;
newPeak.addDatapoint(scan.getScanNumber(), basePeak);
} else {
final double mzCenter = (mzRange.lowerEndpoint() + mzRange.upperEndpoint()) / 2.0;
DataPoint fakeDataPoint = new SimpleDataPoint(mzCenter, 0);
newPeak.addDatapoint(scan.getScanNumber(), fakeDataPoint);
}
processedScans++;
}
if (dataPointFound) {
newPeak.finalizePeak();
if (newPeak.getArea() > 0)
peakListRow.addPeak(dataFile, newPeak);
} else {
peakListRow.removePeak(dataFile);
}
}
// Notify the GUI that peaklist contents have changed
if (peakList != null) {
// Check if the feature list row has been added to the feature list, and
// if it has not, add it
List<PeakListRow> rows = Arrays.asList(peakList.getRows());
if (!rows.contains(peakListRow)) {
peakList.addRow(peakListRow);
}
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(peakList);
project.notifyObjectChanged(peakList, true);
}
if (table != null) {
((AbstractTableModel) table.getModel()).fireTableDataChanged();
}
logger.finest("Finished manual peak picker, " + processedScans + " scans processed");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class PearsonCorrelation method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started Scan Alignment on " + dataFile);
scanNumbers = dataFile.getScanNumbers(1);
totalScans = scanNumbers.length;
RawDataFileWriter newRDFW = null;
try {
newRDFW = MZmineCore.createNewFile(dataFile.getName() + ' ' + suffix);
// [relative scan][j value]
DataPoint[][] mzValues = null;
int i, j, si, sj, ii, k, shift, ks;
int[] shiftedScans = new int[mzSpan * 2 + 1];
for (i = 0; i < totalScans; i++) {
if (isCanceled())
return;
Scan scan = dataFile.getScan(scanNumbers[i]);
si = (int) Math.max(0, i - scanSpan);
sj = (int) (si + 2 * scanSpan);
if (sj >= totalScans) {
si = (int) Math.max(0, si - (sj - totalScans + 1));
sj = (int) (si + 2 * scanSpan);
}
if (scan != null) {
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1)
mzValues = new DataPoint[sj - si + 1][];
// Load Data Points
for (j = si; j <= sj; j++) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[j - si] = xscan.getDataPoints();
}
// Estimate Correlations
ii = i - si;
final SimpleScan newScan = new SimpleScan(scan);
DataPoint[] newDP = new DataPoint[mzValues[ii].length];
int maxShift = 0;
double maxCorrelation = 0;
int ndp = mzValues[ii].length;
// System.out.print("Scan="+i);
for (shift = -mzSpan; shift <= mzSpan; shift++) {
PearsonCorrelation thisShift = new PearsonCorrelation();
for (k = 0; k < ndp; k++) {
ks = k + shift;
if (ks >= 0 && ks < ndp && mzValues[ii][ks].getIntensity() >= minimumHeight) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
int f = 0;
for (j = 0; j < mzValues.length; j++) {
// System.out.println(j);
if (j != ii) {
if (mzValues[j].length > k && Math.abs(mzValues[j][k].getMZ() - mz) < 1e-10) {
f = k;
} else {
f = findFirstMass(mz, mzValues[j]);
if (Math.abs(mzValues[j][f].getMZ() - mz) > 1e-10) {
f = -f;
}
}
if (f >= 0) {
if (logScale) {
thisShift.enter(Math.log(mzValues[j][f].getIntensity()), Math.log(mzValues[ii][ks].getIntensity()));
} else {
thisShift.enter(mzValues[j][f].getIntensity(), mzValues[ii][ks].getIntensity());
}
}
}
}
}
}
// correlation="+Math.round(thisShift.correlation()*1000)/1000.0);
if (thisShift.correlation() > maxCorrelation) {
maxShift = shift;
maxCorrelation = thisShift.correlation();
}
// newDP[k] = new SimpleDataPoint(mz, c > 0 ? a/c : 0);
}
// Copy DataPoints with maxShift as the shift
shift = maxShift;
// System.out.println("\nScan="+i+", Shift="+maxShift+", Correlation="+maxCorrelation);
shiftedScans[maxShift + mzSpan]++;
for (k = 0; k < ndp; k++) {
ks = k + shift;
if (ks >= 0 && ks < ndp) {
newDP[k] = new SimpleDataPoint(mzValues[ii][k].getMZ(), mzValues[ii][ks].getIntensity());
} else {
newDP[k] = new SimpleDataPoint(mzValues[ii][k].getMZ(), 0);
}
}
newScan.setDataPoints(newDP);
newRDFW.addScan(newScan);
}
processedScans++;
}
if (!isCanceled()) {
// Finalize writing
newRDF = newRDFW.finishWriting();
// Add the newly created file to the project
project.addFile(newRDF);
// Remove the original data file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
String shifts = "";
for (i = -mzSpan; i <= mzSpan; i++) {
shifts = shifts + i + ":" + shiftedScans[i + mzSpan] + " | ";
}
logger.info("Finished Scan Alignment on " + dataFile + ". Scans per shift = " + shifts);
}
} catch (IOException e) {
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class BaselineCorrector method subtractTICBaselines.
/**
* Perform baseline correction in bins (TIC).
*
* @param origDataFile dataFile of concern.
* @param dataPoints input data points to correct.
* @param baselines the baselines - one per m/z bin.
* @param numBins the number of m/z bins.
* @param scanIndex the current scan index that these data points come from.
* @return the corrected data points.
*/
private DataPoint[] subtractTICBaselines(final RawDataFile origDataFile, final DataPoint[] dataPoints, final double[][] baselines, final int numBins, final int scanIndex) {
// Create an ArrayList for new data points.
final DataPoint[] newDataPoints = new DataPoint[dataPoints.length];
// Determine MZ range.
final Range<Double> mzRange = origDataFile.getDataMZRange();
// Loop through all original data points.
int i = 0;
for (final DataPoint dp : dataPoints) {
// Subtract baseline.
final double mz = dp.getMZ();
final int bin = RangeUtils.binNumber(mzRange, numBins, mz);
final double baselineIntenstity = baselines[bin][scanIndex];
newDataPoints[i++] = baselineIntenstity <= 0.0 ? new SimpleDataPoint(dp) : new SimpleDataPoint(mz, Math.max(0.0, dp.getIntensity() * (1.0 - baselineIntenstity)));
}
// Return the new data points.
return newDataPoints;
}
Aggregations