use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class ScanSmoothingTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started Scan Smoothing on " + dataFile);
scanNumbers = dataFile.getScanNumbers(1);
totalScans = scanNumbers.length;
RawDataFileWriter newRDFW = null;
int timepassed = 0;
int mzpassed = 0;
try {
newRDFW = MZmineCore.createNewFile(dataFile.getName() + ' ' + suffix);
// [relative scan][j value]
DataPoint[][] mzValues = null;
int i, j, si, sj, ii, k, ssi, ssj;
for (i = 0; i < totalScans; i++) {
if (isCanceled())
return;
// Smoothing in TIME space
Scan scan = dataFile.getScan(scanNumbers[i]);
if (scan != null) {
double rt = scan.getRetentionTime();
final SimpleScan newScan = new SimpleScan(scan);
DataPoint[] newDP = null;
sj = si = i;
ssi = ssj = i;
if (timeSpan > 0 || scanSpan > 0) {
double timeMZtol = Math.max(mzTol, 1e-5);
for (si = i; si > 1; si--) {
Scan scanS = dataFile.getScan(scanNumbers[si - 1]);
if (scanS == null || scanS.getRetentionTime() < rt - timeSpan / 2) {
break;
}
}
for (sj = i; sj < totalScans - 1; sj++) {
Scan scanS = dataFile.getScan(scanNumbers[sj + 1]);
if (scanS == null || scanS.getRetentionTime() >= rt + timeSpan / 2) {
break;
}
}
ssi = i - (scanSpan - 1) / 2;
ssj = i + (scanSpan - 1) / 2;
if (ssi < 0) {
ssj += -ssi;
ssi = 0;
}
if (ssj >= totalScans) {
ssi -= (ssj - totalScans + 1);
ssj = totalScans - 1;
}
if (sj - si + 1 < scanSpan) {
si = ssi;
sj = ssj;
// si = Math.min(si, ssi);
// sj = Math.max(sj, ssj);
}
if (sj > si) {
timepassed++;
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1)
mzValues = new DataPoint[sj - si + 1][];
// Load Data Points
for (j = si; j <= sj; j++) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[j - si] = xscan.getDataPoints();
}
// Estimate Averages
ii = i - si;
newDP = new DataPoint[mzValues[ii].length];
for (k = 0; k < mzValues[ii].length; k++) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
double intensidad = 0;
if (dp.getIntensity() > 0) {
// only process
// those > 0
double a = 0;
short c = 0;
int f = 0;
for (j = 0; j < mzValues.length; j++) {
// System.out.println(j);
if (mzValues[j].length > k && Math.abs(mzValues[j][k].getMZ() - mz) < timeMZtol) {
f = k;
} else {
f = findFirstMass(mz, mzValues[j]);
if (Math.abs(mzValues[j][f].getMZ() - mz) > timeMZtol) {
f = -f;
}
}
if (f >= 0 && mzValues[j][f].getIntensity() >= minimumHeight) {
a += mzValues[j][f].getIntensity();
c++;
} else {
c = (short) (c + 0);
}
}
intensidad = c > 0 ? a / c : 0;
}
newDP[k] = new SimpleDataPoint(mz, intensidad);
}
}
} else if (scan != null) {
newDP = scan.getDataPoints();
}
if ((mzTol > 0 || mzPoints > 0)) {
mzpassed++;
DataPoint[] updatedDP = new DataPoint[newDP.length];
for (k = 0; k < newDP.length; k++) {
double mz = newDP[k].getMZ();
double intensidad = 0;
if (newDP[k].getIntensity() > 0) {
for (si = k; si > 0 && (newDP[si].getMZ() + mzTol >= mz || k - si <= mzPoints); si--) ;
for (sj = k; sj < newDP.length - 1 && (newDP[sj].getMZ() - mzTol <= mz || sj - k <= mzPoints); sj++) ;
double sum = 0;
for (j = si; j <= sj; j++) {
sum += newDP[j].getIntensity();
}
intensidad = sum / (sj - si + 1);
}
updatedDP[k] = new SimpleDataPoint(mz, intensidad);
}
newDP = updatedDP;
}
// Register new smoothing data
if (scan != null && newDP != null) {
newScan.setDataPoints(newDP);
newRDFW.addScan(newScan);
}
}
processedScans++;
}
if (!isCanceled()) {
// Finalize writing
newRDF = newRDFW.finishWriting();
// Add the newly created file to the project
project.addFile(newRDF);
// Remove the original data file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
if (mzpassed + timepassed < totalScans / 2) {
logger.warning("It seems that parameters were not properly set. Scans processed : time=" + timepassed + ", mz=" + mzpassed);
}
logger.info("Finished Scan Smoothing on " + dataFile);
}
} catch (IOException e) {
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class BaselineCorrector method subtractBasePeakBaselines.
/**
* Perform baseline correction in bins (base peak).
*
* @param origDataFile dataFile of concern.
* @param dataPoints input data points to correct.
* @param baselines the baselines - one per m/z bin.
* @param numBins the number of m/z bins.
* @param scanIndex the current scan index that these data points come from.
* @return the corrected data points.
*/
private DataPoint[] subtractBasePeakBaselines(final RawDataFile origDataFile, final DataPoint[] dataPoints, final double[][] baselines, final int numBins, final int scanIndex) {
// Create an ArrayList for new data points.
final DataPoint[] newDataPoints = new DataPoint[dataPoints.length];
// Determine MZ range.
final Range<Double> mzRange = origDataFile.getDataMZRange();
// Loop through all original data points.
int i = 0;
for (final DataPoint dp : dataPoints) {
// Subtract baseline.
final double mz = dp.getMZ();
final int bin = RangeUtils.binNumber(mzRange, numBins, mz);
final double baselineIntenstity = baselines[bin][scanIndex];
newDataPoints[i++] = baselineIntenstity <= 0.0 ? new SimpleDataPoint(dp) : new SimpleDataPoint(mz, Math.max(0.0, dp.getIntensity() - baselineIntenstity));
}
// Return the new data points.
return newDataPoints;
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class BaselineCorrector method copyScansToWriter.
/**
* Copy scans to RawDataFileWriter.
*
* @param origDataFile dataFile of concern.
* @param writer writer to copy scans to.
* @param level MS-level of scans to copy.
* @throws IOException if there are i/o problems.
*/
private void copyScansToWriter(final RawDataFile origDataFile, final RawDataFileWriter writer, final int level) throws IOException {
LOG.finest("Copy scans");
// Get scan numbers for MS-level.
final int[] scanNumbers = origDataFile.getScanNumbers(level);
final int numScans = scanNumbers.length;
// Create copy of scans.
for (int scanIndex = 0; !isAborted(origDataFile) && scanIndex < numScans; scanIndex++) {
// Get original scan.
final Scan origScan = origDataFile.getScan(scanNumbers[scanIndex]);
// Get data points (m/z and intensity pairs) of the original scan
final DataPoint[] origDataPoints = origScan.getDataPoints();
final DataPoint[] newDataPoints = new DataPoint[origDataPoints.length];
// Copy original data points.
int i = 0;
for (final DataPoint dp : origDataPoints) {
newDataPoints[i++] = new SimpleDataPoint(dp);
}
// Create new copied scan.
final SimpleScan newScan = new SimpleScan(origScan);
newScan.setDataPoints(newDataPoints);
writer.addScan(newScan);
progressMap.get(origDataFile)[0]++;
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class RndResampleFilter method filterScan.
public Scan filterScan(Scan scan, ParameterSet parameters) {
boolean sum_duplicates = parameters.getParameter(RndResampleFilterParameters.SUM_DUPLICATES).getValue();
boolean remove_zero_intensity = parameters.getParameter(RndResampleFilterParameters.REMOVE_ZERO_INTENSITY).getValue();
// If CENTROIDED scan, use it as-is
Scan inputScan;
if (scan.getSpectrumType() == MassSpectrumType.CENTROIDED)
inputScan = scan;
else
// Otherwise, detect local maxima
inputScan = new LocMaxCentroidingAlgorithm(scan).centroidScan();
DataPoint[] dps = inputScan.getDataPoints();
// Cleanup first: Remove zero intensity data points (if requested)
// Reuse dps array
int newNumOfDataPoints = 0;
for (int i = 0; i < dps.length; ++i) {
if (!remove_zero_intensity || dps[i].getIntensity() > 0.0) {
dps[newNumOfDataPoints] = dps[i];
++newNumOfDataPoints;
}
}
// Getting started
SimpleDataPoint[] newDps = new SimpleDataPoint[newNumOfDataPoints];
for (int i = 0; i < newNumOfDataPoints; ++i) {
// Set the new m/z value to nearest integer / unit value
int newMz = (int) Math.round(dps[i].getMZ());
// Create new DataPoint accordingly (intensity untouched)
newDps[i] = new SimpleDataPoint(newMz, dps[i].getIntensity());
}
// Post-treatments
// Cleanup: Merge duplicates/overlap
// ArrayList<SimpleDataPoint> dpsList = new
// ArrayList<SimpleDataPoint>();
double prevMz = -1.0, curMz = -1.0;
double newIntensity = 0.0;
double divider = 1.0;
// Reuse dps array
newNumOfDataPoints = 0;
for (int i = 0; i < newDps.length; ++i) {
curMz = newDps[i].getMZ();
if (i > 0) {
// Handle duplicates
if (curMz == prevMz) {
if (sum_duplicates) {
// Use sum
newIntensity += newDps[i].getIntensity();
dps[newNumOfDataPoints - 1] = new SimpleDataPoint(prevMz, newIntensity);
} else {
// Use average
newIntensity += newDps[i].getIntensity();
dps[newNumOfDataPoints - 1] = new SimpleDataPoint(prevMz, newIntensity);
divider += 1.0;
}
} else {
dps[newNumOfDataPoints - 1] = new SimpleDataPoint(prevMz, newIntensity / divider);
dps[newNumOfDataPoints] = newDps[i];
++newNumOfDataPoints;
newIntensity = dps[newNumOfDataPoints - 1].getIntensity();
divider = 1.0;
}
} else {
dps[newNumOfDataPoints] = newDps[i];
++newNumOfDataPoints;
}
prevMz = newDps[i].getMZ();
}
// Create updated scan
SimpleScan newScan = new SimpleScan(inputScan);
newScan.setDataPoints(Arrays.copyOfRange(dps, 0, newNumOfDataPoints));
newScan.setSpectrumType(MassSpectrumType.CENTROIDED);
return newScan;
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class SGFilter method filterScan.
@Override
public Scan filterScan(Scan scan, ParameterSet parameters) {
int numOfDataPoints = parameters.getParameter(SGFilterParameters.datapoints).getValue();
assert Avalues.containsKey(numOfDataPoints);
assert Hvalues.containsKey(numOfDataPoints);
int[] aVals = Avalues.get(numOfDataPoints);
int h = Hvalues.get(numOfDataPoints).intValue();
// changed to also allow MS2 if selected in ScanSelection
int marginSize = (numOfDataPoints + 1) / 2 - 1;
double sumOfInts;
DataPoint[] oldDataPoints = scan.getDataPoints();
int newDataPointsLength = oldDataPoints.length - (marginSize * 2);
// only process scans with datapoints
if (newDataPointsLength < 1) {
return scan;
}
DataPoint[] newDataPoints = new DataPoint[newDataPointsLength];
for (int spectrumInd = marginSize; spectrumInd < (oldDataPoints.length - marginSize); spectrumInd++) {
// zero intensity data points must be left unchanged
if (oldDataPoints[spectrumInd].getIntensity() == 0) {
newDataPoints[spectrumInd - marginSize] = oldDataPoints[spectrumInd];
continue;
}
sumOfInts = aVals[0] * oldDataPoints[spectrumInd].getIntensity();
for (int windowInd = 1; windowInd <= marginSize; windowInd++) {
sumOfInts += aVals[windowInd] * (oldDataPoints[spectrumInd + windowInd].getIntensity() + oldDataPoints[spectrumInd - windowInd].getIntensity());
}
sumOfInts = sumOfInts / h;
if (sumOfInts < 0) {
sumOfInts = 0;
}
newDataPoints[spectrumInd - marginSize] = new SimpleDataPoint(oldDataPoints[spectrumInd].getMZ(), sumOfInts);
}
SimpleScan newScan = new SimpleScan(scan);
newScan.setDataPoints(newDataPoints);
return newScan;
}
Aggregations