use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class RndResampleFilter method filterScan.
public Scan filterScan(Scan scan, ParameterSet parameters) {
boolean sum_duplicates = parameters.getParameter(RndResampleFilterParameters.SUM_DUPLICATES).getValue();
boolean remove_zero_intensity = parameters.getParameter(RndResampleFilterParameters.REMOVE_ZERO_INTENSITY).getValue();
// If CENTROIDED scan, use it as-is
Scan inputScan;
if (scan.getSpectrumType() == MassSpectrumType.CENTROIDED)
inputScan = scan;
else
// Otherwise, detect local maxima
inputScan = new LocMaxCentroidingAlgorithm(scan).centroidScan();
DataPoint[] dps = inputScan.getDataPoints();
// Cleanup first: Remove zero intensity data points (if requested)
// Reuse dps array
int newNumOfDataPoints = 0;
for (int i = 0; i < dps.length; ++i) {
if (!remove_zero_intensity || dps[i].getIntensity() > 0.0) {
dps[newNumOfDataPoints] = dps[i];
++newNumOfDataPoints;
}
}
// Getting started
SimpleDataPoint[] newDps = new SimpleDataPoint[newNumOfDataPoints];
for (int i = 0; i < newNumOfDataPoints; ++i) {
// Set the new m/z value to nearest integer / unit value
int newMz = (int) Math.round(dps[i].getMZ());
// Create new DataPoint accordingly (intensity untouched)
newDps[i] = new SimpleDataPoint(newMz, dps[i].getIntensity());
}
// Post-treatments
// Cleanup: Merge duplicates/overlap
// ArrayList<SimpleDataPoint> dpsList = new
// ArrayList<SimpleDataPoint>();
double prevMz = -1.0, curMz = -1.0;
double newIntensity = 0.0;
double divider = 1.0;
// Reuse dps array
newNumOfDataPoints = 0;
for (int i = 0; i < newDps.length; ++i) {
curMz = newDps[i].getMZ();
if (i > 0) {
// Handle duplicates
if (curMz == prevMz) {
if (sum_duplicates) {
// Use sum
newIntensity += newDps[i].getIntensity();
dps[newNumOfDataPoints - 1] = new SimpleDataPoint(prevMz, newIntensity);
} else {
// Use average
newIntensity += newDps[i].getIntensity();
dps[newNumOfDataPoints - 1] = new SimpleDataPoint(prevMz, newIntensity);
divider += 1.0;
}
} else {
dps[newNumOfDataPoints - 1] = new SimpleDataPoint(prevMz, newIntensity / divider);
dps[newNumOfDataPoints] = newDps[i];
++newNumOfDataPoints;
newIntensity = dps[newNumOfDataPoints - 1].getIntensity();
divider = 1.0;
}
} else {
dps[newNumOfDataPoints] = newDps[i];
++newNumOfDataPoints;
}
prevMz = newDps[i].getMZ();
}
// Create updated scan
SimpleScan newScan = new SimpleScan(inputScan);
newScan.setDataPoints(Arrays.copyOfRange(dps, 0, newNumOfDataPoints));
newScan.setSpectrumType(MassSpectrumType.CENTROIDED);
return newScan;
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class SGFilter method filterScan.
@Override
public Scan filterScan(Scan scan, ParameterSet parameters) {
int numOfDataPoints = parameters.getParameter(SGFilterParameters.datapoints).getValue();
assert Avalues.containsKey(numOfDataPoints);
assert Hvalues.containsKey(numOfDataPoints);
int[] aVals = Avalues.get(numOfDataPoints);
int h = Hvalues.get(numOfDataPoints).intValue();
// changed to also allow MS2 if selected in ScanSelection
int marginSize = (numOfDataPoints + 1) / 2 - 1;
double sumOfInts;
DataPoint[] oldDataPoints = scan.getDataPoints();
int newDataPointsLength = oldDataPoints.length - (marginSize * 2);
// only process scans with datapoints
if (newDataPointsLength < 1) {
return scan;
}
DataPoint[] newDataPoints = new DataPoint[newDataPointsLength];
for (int spectrumInd = marginSize; spectrumInd < (oldDataPoints.length - marginSize); spectrumInd++) {
// zero intensity data points must be left unchanged
if (oldDataPoints[spectrumInd].getIntensity() == 0) {
newDataPoints[spectrumInd - marginSize] = oldDataPoints[spectrumInd];
continue;
}
sumOfInts = aVals[0] * oldDataPoints[spectrumInd].getIntensity();
for (int windowInd = 1; windowInd <= marginSize; windowInd++) {
sumOfInts += aVals[windowInd] * (oldDataPoints[spectrumInd + windowInd].getIntensity() + oldDataPoints[spectrumInd - windowInd].getIntensity());
}
sumOfInts = sumOfInts / h;
if (sumOfInts < 0) {
sumOfInts = 0;
}
newDataPoints[spectrumInd - marginSize] = new SimpleDataPoint(oldDataPoints[spectrumInd].getMZ(), sumOfInts);
}
SimpleScan newScan = new SimpleScan(scan);
newScan.setDataPoints(newDataPoints);
return newScan;
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class CropFilterTask method run.
/**
* @see Runnable#run()
*/
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started crop filter on " + dataFile);
scans = scanSelection.getMatchingScans(dataFile);
totalScans = scans.length;
// Check if we have any scans
if (totalScans == 0) {
setStatus(TaskStatus.ERROR);
setErrorMessage("No scans match the selected criteria");
return;
}
try {
RawDataFileWriter rawDataFileWriter = MZmineCore.createNewFile(dataFile.getName() + " " + suffix);
for (Scan scan : scans) {
SimpleScan scanCopy = new SimpleScan(scan);
// Check if we have something to crop
if (!mzRange.encloses(scan.getDataPointMZRange())) {
DataPoint[] croppedDataPoints = scan.getDataPointsByMass(mzRange);
scanCopy.setDataPoints(croppedDataPoints);
}
rawDataFileWriter.addScan(scanCopy);
processedScans++;
}
RawDataFile filteredRawDataFile = rawDataFileWriter.finishWriting();
project.addFile(filteredRawDataFile);
// Remove the original file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
} catch (Exception e) {
setStatus(TaskStatus.ERROR);
setErrorMessage(e.toString());
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class NativeFileReadTask method readRAWDump.
/**
* This method reads the dump of the RAW data file produced by RAWdump.exe utility (see
* RAWdump.cpp source for details).
*/
private void readRAWDump(InputStream dumpStream) throws IOException {
String line;
byte[] byteBuffer = new byte[100000];
double[] mzValuesBuffer = new double[10000];
double[] intensityValuesBuffer = new double[10000];
while ((line = TextUtils.readLineFromStream(dumpStream)) != null) {
if (isCanceled()) {
return;
}
if (line.startsWith("ERROR: ")) {
throw (new IOException(line.substring("ERROR: ".length())));
}
if (line.startsWith("NUMBER OF SCANS: ")) {
totalScans = Integer.parseInt(line.substring("NUMBER OF SCANS: ".length()));
}
if (line.startsWith("SCAN NUMBER: ")) {
scanNumber = Integer.parseInt(line.substring("SCAN NUMBER: ".length()));
}
if (line.startsWith("SCAN ID: ")) {
scanId = line.substring("SCAN ID: ".length());
}
if (line.startsWith("MS LEVEL: ")) {
msLevel = Integer.parseInt(line.substring("MS LEVEL: ".length()));
}
if (line.startsWith("POLARITY: ")) {
if (line.contains("-"))
polarity = PolarityType.NEGATIVE;
else if (line.contains("+"))
polarity = PolarityType.POSITIVE;
else
polarity = PolarityType.UNKNOWN;
// In such case, we can parse it from the scan filter line (scanId).
if ((polarity == PolarityType.UNKNOWN) && (fileType == RawDataFileType.THERMO_RAW) && (!Strings.isNullOrEmpty(scanId))) {
if (scanId.startsWith("-"))
polarity = PolarityType.NEGATIVE;
else if (scanId.startsWith("+"))
polarity = PolarityType.POSITIVE;
}
}
if (line.startsWith("RETENTION TIME: ")) {
// Retention time is reported in minutes.
retentionTime = Double.parseDouble(line.substring("RETENTION TIME: ".length()));
}
if (line.startsWith("PRECURSOR: ")) {
String[] tokens = line.split(" ");
double token2 = Double.parseDouble(tokens[1]);
int token3 = Integer.parseInt(tokens[2]);
precursorMZ = token2;
precursorCharge = token3;
// FTMS + p ESI d Full ms2 279.16@hcd25.00 [50.00-305.00]
if (precursorMZ == 0.0 && fileType == RawDataFileType.THERMO_RAW && (!Strings.isNullOrEmpty(scanId))) {
Pattern precursorPattern = Pattern.compile(".* ms\\d+ (\\d+\\.\\d+)[@ ]");
Matcher m = precursorPattern.matcher(scanId);
if (m.find()) {
String precursorMzString = m.group(1);
try {
precursorMZ = Double.parseDouble(precursorMzString);
} catch (Exception e) {
e.printStackTrace();
// ignore
}
}
}
}
if (line.startsWith("MASS VALUES: ")) {
Pattern p = Pattern.compile("MASS VALUES: (\\d+) x (\\d+) BYTES");
Matcher m = p.matcher(line);
if (!m.matches())
throw new IOException("Could not parse line " + line);
numOfDataPoints = Integer.parseInt(m.group(1));
final int byteSize = Integer.parseInt(m.group(2));
final int numOfBytes = numOfDataPoints * byteSize;
if (byteBuffer.length < numOfBytes)
byteBuffer = new byte[numOfBytes * 2];
dumpStream.read(byteBuffer, 0, numOfBytes);
ByteBuffer mzByteBuffer = ByteBuffer.wrap(byteBuffer, 0, numOfBytes).order(ByteOrder.LITTLE_ENDIAN);
if (mzValuesBuffer.length < numOfDataPoints)
mzValuesBuffer = new double[numOfDataPoints * 2];
for (int i = 0; i < numOfDataPoints; i++) {
double newValue;
if (byteSize == 8)
newValue = mzByteBuffer.getDouble();
else
newValue = mzByteBuffer.getFloat();
mzValuesBuffer[i] = newValue;
}
}
if (line.startsWith("INTENSITY VALUES: ")) {
Pattern p = Pattern.compile("INTENSITY VALUES: (\\d+) x (\\d+) BYTES");
Matcher m = p.matcher(line);
if (!m.matches())
throw new IOException("Could not parse line " + line);
// VALUES
if (numOfDataPoints != Integer.parseInt(m.group(1))) {
throw new IOException("Scan " + scanNumber + " contained " + numOfDataPoints + " mass values, but " + m.group(1) + " intensity values");
}
final int byteSize = Integer.parseInt(m.group(2));
final int numOfBytes = numOfDataPoints * byteSize;
if (byteBuffer.length < numOfBytes)
byteBuffer = new byte[numOfBytes * 2];
dumpStream.read(byteBuffer, 0, numOfBytes);
ByteBuffer intensityByteBuffer = ByteBuffer.wrap(byteBuffer, 0, numOfBytes).order(ByteOrder.LITTLE_ENDIAN);
if (intensityValuesBuffer.length < numOfDataPoints)
intensityValuesBuffer = new double[numOfDataPoints * 2];
for (int i = 0; i < numOfDataPoints; i++) {
double newValue;
if (byteSize == 8)
newValue = intensityByteBuffer.getDouble();
else
newValue = intensityByteBuffer.getFloat();
intensityValuesBuffer[i] = newValue;
}
// INTENSITY VALUES was the last item of the scan, so now we can
// convert the data to DataPoint[] array and create a new scan
DataPoint[] dataPoints = new DataPoint[numOfDataPoints];
for (int i = 0; i < numOfDataPoints; i++) {
dataPoints[i] = new SimpleDataPoint(mzValuesBuffer[i], intensityValuesBuffer[i]);
}
// Auto-detect whether this scan is centroided
MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(dataPoints);
SimpleScan newScan = new SimpleScan(null, scanNumber, msLevel, retentionTime, precursorMZ, precursorCharge, null, dataPoints, spectrumType, polarity, scanId, mzRange);
newMZmineFile.addScan(newScan);
parsedScans++;
// Clean the variables for next scan
scanNumber = 0;
scanId = null;
polarity = null;
mzRange = null;
msLevel = 0;
retentionTime = 0;
precursorMZ = 0;
precursorCharge = 0;
numOfDataPoints = 0;
}
}
}
use of net.sf.mzmine.datamodel.impl.SimpleScan in project mzmine2 by mzmine.
the class NetCDFReadTask method readNextScan.
/**
* Reads one scan from the file. Requires that general information has already been read.
*/
private Scan readNextScan() throws IOException {
// Get scan starting position and length
int[] scanStartPosition = new int[1];
int[] scanLength = new int[1];
Integer[] startAndLength = scansIndex.get(scanNum);
// End of file
if (startAndLength == null) {
return null;
}
scanStartPosition[0] = startAndLength[0];
scanLength[0] = startAndLength[1];
// Get retention time of the scan
Double retentionTime = scansRetentionTimes.get(scanNum);
if (retentionTime == null) {
logger.severe("Could not find retention time for scan " + scanNum);
throw (new IOException("Could not find retention time for scan " + scanNum));
}
// An empty scan needs special attention..
if (scanLength[0] == 0) {
scanNum++;
return new SimpleScan(null, scanNum, 1, retentionTime.doubleValue(), 0, 0, null, new DataPoint[0], MassSpectrumType.CENTROIDED, PolarityType.UNKNOWN, "", null);
}
// Is there any way how to extract polarity from netcdf?
PolarityType polarity = PolarityType.UNKNOWN;
// Is there any way how to extract scan definition from netcdf?
String scanDefinition = "";
// Read mass and intensity values
Array massValueArray;
Array intensityValueArray;
try {
massValueArray = massValueVariable.read(scanStartPosition, scanLength);
intensityValueArray = intensityValueVariable.read(scanStartPosition, scanLength);
} catch (Exception e) {
logger.log(Level.SEVERE, "Could not read from variables mass_values and/or intensity_values.", e);
throw (new IOException("Could not read from variables mass_values and/or intensity_values."));
}
Index massValuesIndex = massValueArray.getIndex();
Index intensityValuesIndex = intensityValueArray.getIndex();
int arrayLength = massValueArray.getShape()[0];
DataPoint[] dataPoints = new DataPoint[arrayLength];
for (int j = 0; j < arrayLength; j++) {
Index massIndex0 = massValuesIndex.set0(j);
Index intensityIndex0 = intensityValuesIndex.set0(j);
double mz = massValueArray.getDouble(massIndex0) * massValueScaleFactor;
double intensity = intensityValueArray.getDouble(intensityIndex0) * intensityValueScaleFactor;
dataPoints[j] = new SimpleDataPoint(mz, intensity);
}
scanNum++;
// Auto-detect whether this scan is centroided
MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(dataPoints);
SimpleScan buildingScan = new SimpleScan(null, scanNum, 1, retentionTime.doubleValue(), 0, 0, null, dataPoints, spectrumType, polarity, scanDefinition, null);
return buildingScan;
}
Aggregations