use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class NativeFileReadTask method readRAWDump.
/**
* This method reads the dump of the RAW data file produced by RAWdump.exe utility (see
* RAWdump.cpp source for details).
*/
private void readRAWDump(InputStream dumpStream) throws IOException {
String line;
byte[] byteBuffer = new byte[100000];
double[] mzValuesBuffer = new double[10000];
double[] intensityValuesBuffer = new double[10000];
while ((line = TextUtils.readLineFromStream(dumpStream)) != null) {
if (isCanceled()) {
return;
}
if (line.startsWith("ERROR: ")) {
throw (new IOException(line.substring("ERROR: ".length())));
}
if (line.startsWith("NUMBER OF SCANS: ")) {
totalScans = Integer.parseInt(line.substring("NUMBER OF SCANS: ".length()));
}
if (line.startsWith("SCAN NUMBER: ")) {
scanNumber = Integer.parseInt(line.substring("SCAN NUMBER: ".length()));
}
if (line.startsWith("SCAN ID: ")) {
scanId = line.substring("SCAN ID: ".length());
}
if (line.startsWith("MS LEVEL: ")) {
msLevel = Integer.parseInt(line.substring("MS LEVEL: ".length()));
}
if (line.startsWith("POLARITY: ")) {
if (line.contains("-"))
polarity = PolarityType.NEGATIVE;
else if (line.contains("+"))
polarity = PolarityType.POSITIVE;
else
polarity = PolarityType.UNKNOWN;
// In such case, we can parse it from the scan filter line (scanId).
if ((polarity == PolarityType.UNKNOWN) && (fileType == RawDataFileType.THERMO_RAW) && (!Strings.isNullOrEmpty(scanId))) {
if (scanId.startsWith("-"))
polarity = PolarityType.NEGATIVE;
else if (scanId.startsWith("+"))
polarity = PolarityType.POSITIVE;
}
}
if (line.startsWith("RETENTION TIME: ")) {
// Retention time is reported in minutes.
retentionTime = Double.parseDouble(line.substring("RETENTION TIME: ".length()));
}
if (line.startsWith("PRECURSOR: ")) {
String[] tokens = line.split(" ");
double token2 = Double.parseDouble(tokens[1]);
int token3 = Integer.parseInt(tokens[2]);
precursorMZ = token2;
precursorCharge = token3;
// FTMS + p ESI d Full ms2 279.16@hcd25.00 [50.00-305.00]
if (precursorMZ == 0.0 && fileType == RawDataFileType.THERMO_RAW && (!Strings.isNullOrEmpty(scanId))) {
Pattern precursorPattern = Pattern.compile(".* ms\\d+ (\\d+\\.\\d+)[@ ]");
Matcher m = precursorPattern.matcher(scanId);
if (m.find()) {
String precursorMzString = m.group(1);
try {
precursorMZ = Double.parseDouble(precursorMzString);
} catch (Exception e) {
e.printStackTrace();
// ignore
}
}
}
}
if (line.startsWith("MASS VALUES: ")) {
Pattern p = Pattern.compile("MASS VALUES: (\\d+) x (\\d+) BYTES");
Matcher m = p.matcher(line);
if (!m.matches())
throw new IOException("Could not parse line " + line);
numOfDataPoints = Integer.parseInt(m.group(1));
final int byteSize = Integer.parseInt(m.group(2));
final int numOfBytes = numOfDataPoints * byteSize;
if (byteBuffer.length < numOfBytes)
byteBuffer = new byte[numOfBytes * 2];
dumpStream.read(byteBuffer, 0, numOfBytes);
ByteBuffer mzByteBuffer = ByteBuffer.wrap(byteBuffer, 0, numOfBytes).order(ByteOrder.LITTLE_ENDIAN);
if (mzValuesBuffer.length < numOfDataPoints)
mzValuesBuffer = new double[numOfDataPoints * 2];
for (int i = 0; i < numOfDataPoints; i++) {
double newValue;
if (byteSize == 8)
newValue = mzByteBuffer.getDouble();
else
newValue = mzByteBuffer.getFloat();
mzValuesBuffer[i] = newValue;
}
}
if (line.startsWith("INTENSITY VALUES: ")) {
Pattern p = Pattern.compile("INTENSITY VALUES: (\\d+) x (\\d+) BYTES");
Matcher m = p.matcher(line);
if (!m.matches())
throw new IOException("Could not parse line " + line);
// VALUES
if (numOfDataPoints != Integer.parseInt(m.group(1))) {
throw new IOException("Scan " + scanNumber + " contained " + numOfDataPoints + " mass values, but " + m.group(1) + " intensity values");
}
final int byteSize = Integer.parseInt(m.group(2));
final int numOfBytes = numOfDataPoints * byteSize;
if (byteBuffer.length < numOfBytes)
byteBuffer = new byte[numOfBytes * 2];
dumpStream.read(byteBuffer, 0, numOfBytes);
ByteBuffer intensityByteBuffer = ByteBuffer.wrap(byteBuffer, 0, numOfBytes).order(ByteOrder.LITTLE_ENDIAN);
if (intensityValuesBuffer.length < numOfDataPoints)
intensityValuesBuffer = new double[numOfDataPoints * 2];
for (int i = 0; i < numOfDataPoints; i++) {
double newValue;
if (byteSize == 8)
newValue = intensityByteBuffer.getDouble();
else
newValue = intensityByteBuffer.getFloat();
intensityValuesBuffer[i] = newValue;
}
// INTENSITY VALUES was the last item of the scan, so now we can
// convert the data to DataPoint[] array and create a new scan
DataPoint[] dataPoints = new DataPoint[numOfDataPoints];
for (int i = 0; i < numOfDataPoints; i++) {
dataPoints[i] = new SimpleDataPoint(mzValuesBuffer[i], intensityValuesBuffer[i]);
}
// Auto-detect whether this scan is centroided
MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(dataPoints);
SimpleScan newScan = new SimpleScan(null, scanNumber, msLevel, retentionTime, precursorMZ, precursorCharge, null, dataPoints, spectrumType, polarity, scanId, mzRange);
newMZmineFile.addScan(newScan);
parsedScans++;
// Clean the variables for next scan
scanNumber = 0;
scanId = null;
polarity = null;
mzRange = null;
msLevel = 0;
retentionTime = 0;
precursorMZ = 0;
precursorCharge = 0;
numOfDataPoints = 0;
}
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class NetCDFReadTask method readNextScan.
/**
* Reads one scan from the file. Requires that general information has already been read.
*/
private Scan readNextScan() throws IOException {
// Get scan starting position and length
int[] scanStartPosition = new int[1];
int[] scanLength = new int[1];
Integer[] startAndLength = scansIndex.get(scanNum);
// End of file
if (startAndLength == null) {
return null;
}
scanStartPosition[0] = startAndLength[0];
scanLength[0] = startAndLength[1];
// Get retention time of the scan
Double retentionTime = scansRetentionTimes.get(scanNum);
if (retentionTime == null) {
logger.severe("Could not find retention time for scan " + scanNum);
throw (new IOException("Could not find retention time for scan " + scanNum));
}
// An empty scan needs special attention..
if (scanLength[0] == 0) {
scanNum++;
return new SimpleScan(null, scanNum, 1, retentionTime.doubleValue(), 0, 0, null, new DataPoint[0], MassSpectrumType.CENTROIDED, PolarityType.UNKNOWN, "", null);
}
// Is there any way how to extract polarity from netcdf?
PolarityType polarity = PolarityType.UNKNOWN;
// Is there any way how to extract scan definition from netcdf?
String scanDefinition = "";
// Read mass and intensity values
Array massValueArray;
Array intensityValueArray;
try {
massValueArray = massValueVariable.read(scanStartPosition, scanLength);
intensityValueArray = intensityValueVariable.read(scanStartPosition, scanLength);
} catch (Exception e) {
logger.log(Level.SEVERE, "Could not read from variables mass_values and/or intensity_values.", e);
throw (new IOException("Could not read from variables mass_values and/or intensity_values."));
}
Index massValuesIndex = massValueArray.getIndex();
Index intensityValuesIndex = intensityValueArray.getIndex();
int arrayLength = massValueArray.getShape()[0];
DataPoint[] dataPoints = new DataPoint[arrayLength];
for (int j = 0; j < arrayLength; j++) {
Index massIndex0 = massValuesIndex.set0(j);
Index intensityIndex0 = intensityValuesIndex.set0(j);
double mz = massValueArray.getDouble(massIndex0) * massValueScaleFactor;
double intensity = intensityValueArray.getDouble(intensityIndex0) * intensityValueScaleFactor;
dataPoints[j] = new SimpleDataPoint(mz, intensity);
}
scanNum++;
// Auto-detect whether this scan is centroided
MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(dataPoints);
SimpleScan buildingScan = new SimpleScan(null, scanNum, 1, retentionTime.doubleValue(), 0, 0, null, dataPoints, spectrumType, polarity, scanDefinition, null);
return buildingScan;
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class GridMassTask method moveProbeToCenter.
void moveProbeToCenter(Probe p, int sRadius, double mzRadius) {
int i, j, k;
double maxMZ, minMZ;
boolean move = true;
Datum max = new Datum(new SimpleDataPoint(0, -1), 0, new SimpleDataPoint(0, -1));
while (move) {
k = Math.min(totalScans - 1, p.scanCenter + sRadius);
for (i = Math.max(p.scanCenter - sRadius, 0); i <= k; i++) {
Datum[] di = roi[i];
if (di != null && di.length > 0) {
minMZ = p.mzCenter - mzRadius;
int idx = findFirstMass(minMZ, di);
maxMZ = p.mzCenter + mzRadius;
for (j = idx; j < di.length && di[j].mz <= maxMZ; j++) {
Datum d = di[j];
if (d.intensity > max.intensity && d.mz >= minMZ) {
max = d;
}
}
}
}
if (max.intensity >= 0 && (max.mz != p.mzCenter || max.scan != p.scanCenter)) {
p.mzCenter = max.mz;
p.scanCenter = max.scan;
p.intensityCenter = max.intensity;
// p.moves++;
} else {
move = false;
}
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class GridMassTask method smoothDataPoints.
public IndexedDataPoint[][] smoothDataPoints(RawDataFile dataFile, double timeSpan, double timeMZSpan, int scanSpan, double mzTol, int mzPoints, double minimumHeight) {
int[] scanNumbers = dataFile.getScanNumbers(1);
int totalScans = scanNumbers.length;
// [relative scan][j value]
DataPoint[][] mzValues = null;
DataPoint[] mzValuesJ = null;
int[] mzValuesScan = null;
int[] mzValuesMZidx = null;
IndexedDataPoint[][] newMZValues = null;
IndexedDataPoint[] tmpDP = new IndexedDataPoint[0];
newMZValues = new IndexedDataPoint[totalScans][];
int i, j, si, sj, ii, k, ssi, ssj, m;
double timeSmoothingMZtol = Math.max(timeMZSpan, 1e-6);
int modts = Math.max(1, totalScans / 10);
for (i = 0; i < totalScans; i++) {
if (i % 100 == 0 && isCanceled())
return null;
// Smoothing in TIME space
Scan scan = dataFile.getScan(scanNumbers[i]);
double rt = retentiontime[i];
DataPoint[] xDP = null;
IndexedDataPoint[] iDP = null;
sj = si = i;
ssi = ssj = i;
int t = 0;
if (timeSpan > 0 || scanSpan > 0) {
if (scan != null) {
for (si = i; si > 1; si--) {
if (retentiontime[si - 1] < rt - timeSpan / 2) {
break;
}
}
for (sj = i; sj < totalScans - 1; sj++) {
if (retentiontime[sj + 1] >= rt + timeSpan / 2) {
break;
}
}
ssi = i - (scanSpan - 1) / 2;
ssj = i + (scanSpan - 1) / 2;
if (ssi < 0) {
ssj += -ssi;
ssi = 0;
}
if (ssj >= totalScans) {
ssi -= (ssj - totalScans + 1);
ssj = totalScans - 1;
}
if (sj - si + 1 < scanSpan) {
si = ssi;
sj = ssj;
}
}
if (scan != null && sj > si) {
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1) {
mzValues = new DataPoint[sj - si + 1][];
mzValuesScan = new int[sj - si + 1];
mzValuesMZidx = new int[sj - si + 1];
}
// Load Data Points
for (j = si; j <= sj; j++) {
int jsi = j - si;
if (mzValues[jsi] == null || jsi >= mzValuesScan.length - 1 || mzValuesScan[jsi + 1] != scanNumbers[j]) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[jsi] = xscan.getDataPoints();
mzValuesScan[jsi] = scanNumbers[j];
} else {
mzValues[jsi] = mzValues[jsi + 1];
mzValuesScan[jsi] = mzValuesScan[jsi + 1];
}
mzValuesMZidx[jsi] = 0;
}
// Estimate Averages
ii = i - si;
if (tmpDP.length < mzValues[ii].length)
tmpDP = new IndexedDataPoint[mzValues[ii].length * 3 / 2];
for (k = 0; k < mzValues[ii].length; k++) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
double intensidad = 0;
if (dp.getIntensity() > 0) {
// only process those > 0
double a = 0;
short c = 0;
int f = 0;
for (j = 0; j <= sj - si; j++) {
for (mzValuesJ = mzValues[j]; mzValuesMZidx[j] < mzValuesJ.length - 1 && mzValuesJ[mzValuesMZidx[j] + 1].getMZ() < mz - timeSmoothingMZtol; mzValuesMZidx[j]++) ;
f = mzValuesMZidx[j];
for (m = mzValuesMZidx[j] + 1; m < mzValuesJ.length && mzValuesJ[m].getMZ() < mz + timeSmoothingMZtol; m++) {
if (Math.abs(mzValuesJ[m].getMZ() - mz) < Math.abs(mzValuesJ[f].getMZ() - mz)) {
f = m;
} else {
// parar la búsqueda
break;
}
}
if (f > 0 && f < mzValuesJ.length && Math.abs(mzValuesJ[f].getMZ() - mz) <= timeSmoothingMZtol && mzValuesJ[f].getIntensity() > 0) {
// >=
// minimumHeight
// ?
// System.out.println("mz="+mz+"; Closer="+mzValuesJ[f].getMZ()+", f="+f+",
// Intensity="+mzValuesJ[f].getIntensity());
a += mzValuesJ[f].getIntensity();
c++;
}
}
intensidad = c > 0 ? a / c : 0;
if (intensidad >= minimumHeight) {
tmpDP[t++] = new IndexedDataPoint(k, new SimpleDataPoint(mz, intensidad));
}
}
}
}
} else if (scan != null) {
xDP = scan.getDataPoints();
if (tmpDP.length < xDP.length)
tmpDP = new IndexedDataPoint[xDP.length];
for (k = 0; k < xDP.length; k++) {
if (xDP[k].getIntensity() >= minimumHeight) {
tmpDP[t++] = new IndexedDataPoint(k, xDP[k]);
}
}
}
iDP = new IndexedDataPoint[t];
for (k = 0; k < t; k++) {
iDP[k] = tmpDP[k];
}
newMZValues[i] = iDP;
setProcedure(i, totalScans, 0);
if (i % modts == 0) {
logger.info("Smoothing/Caching " + dataFile + "..." + (i / modts) * 10 + "%");
}
}
return newMZValues;
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class GridMassTask method intensities.
Spot intensities(int l, int r, double min, double max, Chromatogram chr, PearsonCorrelation stats, int spotId) {
boolean passSpot = false;
Spot s = new Spot();
if (r >= scans.length)
r = scans.length - 1;
if (l < 0)
l = 0;
for (int i = l; i <= r; i++) {
Datum[] mzs = roi[i];
if (mzs != null) {
Datum mzMax = null;
for (int j = findFirstMass(min, mzs); j < mzs.length; j++) {
double mass = mzs[j].mz;
double mjint = mzs[j].intensity;
if (mass >= min) {
if (mass <= max) {
if (mzs[j].spotId == spotId || passSpot) {
s.addPoint(i, mass, (mjint >= minimumHeight ? mjint : -mjint));
if (mjint >= minimumHeight) {
if (mzMax == null || mjint > mzMax.intensity) {
mzMax = mzs[j];
}
}
} else {
if (mjint >= minimumHeight)
s.pointsNoSpot++;
}
} else {
break;
}
}
}
if (chr != null && mzMax != null) {
// Add ONLY THE MAX INTENSITY PER SCAN
// mzMax
chr.addMzPeak(scans[i].getScanNumber(), new SimpleDataPoint(mzMax.mz, mzMax.intensity));
}
if (stats != null && mzMax != null) {
stats.enter(i, mzMax.mz);
}
}
}
return s;
}
Aggregations