use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class PearsonCorrelation method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started Scan Alignment on " + dataFile);
scanNumbers = dataFile.getScanNumbers(1);
totalScans = scanNumbers.length;
RawDataFileWriter newRDFW = null;
try {
newRDFW = MZmineCore.createNewFile(dataFile.getName() + ' ' + suffix);
// [relative scan][j value]
DataPoint[][] mzValues = null;
int i, j, si, sj, ii, k, shift, ks;
int[] shiftedScans = new int[mzSpan * 2 + 1];
for (i = 0; i < totalScans; i++) {
if (isCanceled())
return;
Scan scan = dataFile.getScan(scanNumbers[i]);
si = (int) Math.max(0, i - scanSpan);
sj = (int) (si + 2 * scanSpan);
if (sj >= totalScans) {
si = (int) Math.max(0, si - (sj - totalScans + 1));
sj = (int) (si + 2 * scanSpan);
}
if (scan != null) {
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1)
mzValues = new DataPoint[sj - si + 1][];
// Load Data Points
for (j = si; j <= sj; j++) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[j - si] = xscan.getDataPoints();
}
// Estimate Correlations
ii = i - si;
final SimpleScan newScan = new SimpleScan(scan);
DataPoint[] newDP = new DataPoint[mzValues[ii].length];
int maxShift = 0;
double maxCorrelation = 0;
int ndp = mzValues[ii].length;
// System.out.print("Scan="+i);
for (shift = -mzSpan; shift <= mzSpan; shift++) {
PearsonCorrelation thisShift = new PearsonCorrelation();
for (k = 0; k < ndp; k++) {
ks = k + shift;
if (ks >= 0 && ks < ndp && mzValues[ii][ks].getIntensity() >= minimumHeight) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
int f = 0;
for (j = 0; j < mzValues.length; j++) {
// System.out.println(j);
if (j != ii) {
if (mzValues[j].length > k && Math.abs(mzValues[j][k].getMZ() - mz) < 1e-10) {
f = k;
} else {
f = findFirstMass(mz, mzValues[j]);
if (Math.abs(mzValues[j][f].getMZ() - mz) > 1e-10) {
f = -f;
}
}
if (f >= 0) {
if (logScale) {
thisShift.enter(Math.log(mzValues[j][f].getIntensity()), Math.log(mzValues[ii][ks].getIntensity()));
} else {
thisShift.enter(mzValues[j][f].getIntensity(), mzValues[ii][ks].getIntensity());
}
}
}
}
}
}
// correlation="+Math.round(thisShift.correlation()*1000)/1000.0);
if (thisShift.correlation() > maxCorrelation) {
maxShift = shift;
maxCorrelation = thisShift.correlation();
}
// newDP[k] = new SimpleDataPoint(mz, c > 0 ? a/c : 0);
}
// Copy DataPoints with maxShift as the shift
shift = maxShift;
// System.out.println("\nScan="+i+", Shift="+maxShift+", Correlation="+maxCorrelation);
shiftedScans[maxShift + mzSpan]++;
for (k = 0; k < ndp; k++) {
ks = k + shift;
if (ks >= 0 && ks < ndp) {
newDP[k] = new SimpleDataPoint(mzValues[ii][k].getMZ(), mzValues[ii][ks].getIntensity());
} else {
newDP[k] = new SimpleDataPoint(mzValues[ii][k].getMZ(), 0);
}
}
newScan.setDataPoints(newDP);
newRDFW.addScan(newScan);
}
processedScans++;
}
if (!isCanceled()) {
// Finalize writing
newRDF = newRDFW.finishWriting();
// Add the newly created file to the project
project.addFile(newRDF);
// Remove the original data file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
String shifts = "";
for (i = -mzSpan; i <= mzSpan; i++) {
shifts = shifts + i + ":" + shiftedScans[i + mzSpan] + " | ";
}
logger.info("Finished Scan Alignment on " + dataFile + ". Scans per shift = " + shifts);
}
} catch (IOException e) {
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class ScanSmoothingTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started Scan Smoothing on " + dataFile);
scanNumbers = dataFile.getScanNumbers(1);
totalScans = scanNumbers.length;
RawDataFileWriter newRDFW = null;
int timepassed = 0;
int mzpassed = 0;
try {
newRDFW = MZmineCore.createNewFile(dataFile.getName() + ' ' + suffix);
// [relative scan][j value]
DataPoint[][] mzValues = null;
int i, j, si, sj, ii, k, ssi, ssj;
for (i = 0; i < totalScans; i++) {
if (isCanceled())
return;
// Smoothing in TIME space
Scan scan = dataFile.getScan(scanNumbers[i]);
if (scan != null) {
double rt = scan.getRetentionTime();
final SimpleScan newScan = new SimpleScan(scan);
DataPoint[] newDP = null;
sj = si = i;
ssi = ssj = i;
if (timeSpan > 0 || scanSpan > 0) {
double timeMZtol = Math.max(mzTol, 1e-5);
for (si = i; si > 1; si--) {
Scan scanS = dataFile.getScan(scanNumbers[si - 1]);
if (scanS == null || scanS.getRetentionTime() < rt - timeSpan / 2) {
break;
}
}
for (sj = i; sj < totalScans - 1; sj++) {
Scan scanS = dataFile.getScan(scanNumbers[sj + 1]);
if (scanS == null || scanS.getRetentionTime() >= rt + timeSpan / 2) {
break;
}
}
ssi = i - (scanSpan - 1) / 2;
ssj = i + (scanSpan - 1) / 2;
if (ssi < 0) {
ssj += -ssi;
ssi = 0;
}
if (ssj >= totalScans) {
ssi -= (ssj - totalScans + 1);
ssj = totalScans - 1;
}
if (sj - si + 1 < scanSpan) {
si = ssi;
sj = ssj;
// si = Math.min(si, ssi);
// sj = Math.max(sj, ssj);
}
if (sj > si) {
timepassed++;
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1)
mzValues = new DataPoint[sj - si + 1][];
// Load Data Points
for (j = si; j <= sj; j++) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[j - si] = xscan.getDataPoints();
}
// Estimate Averages
ii = i - si;
newDP = new DataPoint[mzValues[ii].length];
for (k = 0; k < mzValues[ii].length; k++) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
double intensidad = 0;
if (dp.getIntensity() > 0) {
// only process
// those > 0
double a = 0;
short c = 0;
int f = 0;
for (j = 0; j < mzValues.length; j++) {
// System.out.println(j);
if (mzValues[j].length > k && Math.abs(mzValues[j][k].getMZ() - mz) < timeMZtol) {
f = k;
} else {
f = findFirstMass(mz, mzValues[j]);
if (Math.abs(mzValues[j][f].getMZ() - mz) > timeMZtol) {
f = -f;
}
}
if (f >= 0 && mzValues[j][f].getIntensity() >= minimumHeight) {
a += mzValues[j][f].getIntensity();
c++;
} else {
c = (short) (c + 0);
}
}
intensidad = c > 0 ? a / c : 0;
}
newDP[k] = new SimpleDataPoint(mz, intensidad);
}
}
} else if (scan != null) {
newDP = scan.getDataPoints();
}
if ((mzTol > 0 || mzPoints > 0)) {
mzpassed++;
DataPoint[] updatedDP = new DataPoint[newDP.length];
for (k = 0; k < newDP.length; k++) {
double mz = newDP[k].getMZ();
double intensidad = 0;
if (newDP[k].getIntensity() > 0) {
for (si = k; si > 0 && (newDP[si].getMZ() + mzTol >= mz || k - si <= mzPoints); si--) ;
for (sj = k; sj < newDP.length - 1 && (newDP[sj].getMZ() - mzTol <= mz || sj - k <= mzPoints); sj++) ;
double sum = 0;
for (j = si; j <= sj; j++) {
sum += newDP[j].getIntensity();
}
intensidad = sum / (sj - si + 1);
}
updatedDP[k] = new SimpleDataPoint(mz, intensidad);
}
newDP = updatedDP;
}
// Register new smoothing data
if (scan != null && newDP != null) {
newScan.setDataPoints(newDP);
newRDFW.addScan(newScan);
}
}
processedScans++;
}
if (!isCanceled()) {
// Finalize writing
newRDF = newRDFW.finishWriting();
// Add the newly created file to the project
project.addFile(newRDF);
// Remove the original data file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
if (mzpassed + timepassed < totalScans / 2) {
logger.warning("It seems that parameters were not properly set. Scans processed : time=" + timepassed + ", mz=" + mzpassed);
}
logger.info("Finished Scan Smoothing on " + dataFile);
}
} catch (IOException e) {
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class BaselineCorrector method correctBasePeakBaselines.
/**
* Correct the baselines (using base peak chromatograms).
*
* @param origDataFile dataFile of concern.
* @param writer data file writer.
* @param level the MS level.
* @param numBins number of m/z bins.
* @param parameters parameters specific to the actual method for baseline computing.
* @throws IOException if there are i/o problems.
* @throws RSessionWrapperException
* @throws BaselineCorrectionException
* @throws InterruptedException
*/
private void correctBasePeakBaselines(final RSessionWrapper rSession, final RawDataFile origDataFile, final RawDataFileWriter writer, final int level, final int numBins, final ParameterSet parameters) throws IOException, RSessionWrapperException {
// Get scan numbers from original file.
final int[] scanNumbers = origDataFile.getScanNumbers(level);
final int numScans = scanNumbers.length;
// Build chromatograms.
LOG.finest("Building base peak chromatograms.");
final double[][] baseChrom = buildBasePeakChromatograms(origDataFile, level, numBins);
// Calculate baselines: done in-place, i.e. overwrite chromatograms to
// save memory.
LOG.finest("Calculating baselines.");
for (int binIndex = 0; !isAborted(origDataFile) && binIndex < numBins; binIndex++) {
baseChrom[binIndex] = computeBaseline(rSession, origDataFile, baseChrom[binIndex], parameters);
progressMap.get(origDataFile)[0]++;
}
// Subtract baselines.
LOG.finest("Subtracting baselines.");
for (int scanIndex = 0; !isAborted(origDataFile) && scanIndex < numScans; scanIndex++) {
// Get original scan.
final Scan origScan = origDataFile.getScan(scanNumbers[scanIndex]);
// Get data points (m/z and intensity pairs) of the original scan
final DataPoint[] origDataPoints = origScan.getDataPoints();
// Create and write new corrected scan.
final SimpleScan newScan = new SimpleScan(origScan);
newScan.setDataPoints(subtractBasePeakBaselines(origDataFile, origDataPoints, baseChrom, numBins, scanIndex));
writer.addScan(newScan);
progressMap.get(origDataFile)[0]++;
}
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class BaselineCorrector method correctTICBaselines.
/**
* Correct the baselines (using TIC chromatograms).
*
* @param origDataFile dataFile of concern.
* @param writer data file writer.
* @param level the MS level.
* @param numBins number of m/z bins.
* @param parameters parameters specific to the actual method for baseline computing.
* @throws IOException if there are i/o problems.
* @throws RSessionWrapperException
* @throws BaselineCorrectionException
*/
private void correctTICBaselines(final RSessionWrapper rSession, final RawDataFile origDataFile, final RawDataFileWriter writer, final int level, final int numBins, final ParameterSet parameters) throws IOException, RSessionWrapperException {
// Get scan numbers from original file.
final int[] scanNumbers = origDataFile.getScanNumbers(level);
final int numScans = scanNumbers.length;
// Build chromatograms.
LOG.finest("Building TIC chromatograms.");
final double[][] baseChrom = buildTICChromatograms(origDataFile, level, numBins);
// Calculate baselines: done in-place, i.e. overwrite chromatograms to
// save memory.
LOG.finest("Calculating baselines.");
for (int binIndex = 0; !isAborted(origDataFile) && binIndex < numBins; binIndex++) {
// Calculate baseline.
// final double[] baseline = asymBaseline(baseChrom[binIndex]);
final double[] baseline = computeBaseline(rSession, origDataFile, baseChrom[binIndex], parameters);
// Normalize the baseline w.r.t. chromatogram (TIC).
for (int scanIndex = 0; !isAborted(origDataFile) && scanIndex < numScans; scanIndex++) {
final double bc = baseChrom[binIndex][scanIndex];
if (bc != 0.0) {
baseChrom[binIndex][scanIndex] = baseline[scanIndex] / bc;
}
}
progressMap.get(origDataFile)[0]++;
}
// Subtract baselines.
LOG.finest("Subtracting baselines.");
for (int scanIndex = 0; !isAborted(origDataFile) && scanIndex < numScans; scanIndex++) {
// Get original scan.
final Scan origScan = origDataFile.getScan(scanNumbers[scanIndex]);
// Get data points (m/z and intensity pairs) of the original scan
final DataPoint[] origDataPoints = origScan.getDataPoints();
// Create and write new corrected scan.
final SimpleScan newScan = new SimpleScan(origScan);
newScan.setDataPoints(subtractTICBaselines(origDataFile, origDataPoints, baseChrom, numBins, scanIndex));
writer.addScan(newScan);
progressMap.get(origDataFile)[0]++;
}
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class BaselineCorrector method copyScansToWriter.
/**
* Copy scans to RawDataFileWriter.
*
* @param origDataFile dataFile of concern.
* @param writer writer to copy scans to.
* @param level MS-level of scans to copy.
* @throws IOException if there are i/o problems.
*/
private void copyScansToWriter(final RawDataFile origDataFile, final RawDataFileWriter writer, final int level) throws IOException {
LOG.finest("Copy scans");
// Get scan numbers for MS-level.
final int[] scanNumbers = origDataFile.getScanNumbers(level);
final int numScans = scanNumbers.length;
// Create copy of scans.
for (int scanIndex = 0; !isAborted(origDataFile) && scanIndex < numScans; scanIndex++) {
// Get original scan.
final Scan origScan = origDataFile.getScan(scanNumbers[scanIndex]);
// Get data points (m/z and intensity pairs) of the original scan
final DataPoint[] origDataPoints = origScan.getDataPoints();
final DataPoint[] newDataPoints = new DataPoint[origDataPoints.length];
// Copy original data points.
int i = 0;
for (final DataPoint dp : origDataPoints) {
newDataPoints[i++] = new SimpleDataPoint(dp);
}
// Create new copied scan.
final SimpleScan newScan = new SimpleScan(origScan);
newScan.setDataPoints(newDataPoints);
writer.addScan(newScan);
progressMap.get(origDataFile)[0]++;
}
}
Aggregations