use of net.sf.mzmine.datamodel.impl.SimpleMassList in project mzmine2 by mzmine.
the class MassDetectionTask method run.
/**
* @see Runnable#run()
*/
public void run() {
// make arrays to contain everything you need
ArrayList<Integer> pointsInScans = new ArrayList<>();
ArrayList<Double> allMZ = new ArrayList<>();
ArrayList<Double> allIntensities = new ArrayList<>();
// idecies of full mass list where scan starts?
ArrayList<Integer> startIndex = new ArrayList<>();
ArrayList<Double> scanAcquisitionTime = new ArrayList<>();
// XCMS needs this one
ArrayList<Double> totalIntensity = new ArrayList<>();
double curTotalIntensity;
int lastPointCount = 0;
startIndex.add(0);
try {
setStatus(TaskStatus.PROCESSING);
logger.info("Started mass detector on " + dataFile);
final Scan[] scans = scanSelection.getMatchingScans(dataFile);
totalScans = scans.length;
// Process scans one by one
for (Scan scan : scans) {
if (isCanceled())
return;
MassDetector detector = massDetector.getModule();
DataPoint[] mzPeaks = detector.getMassValues(scan, massDetector.getParameterSet());
SimpleMassList newMassList = new SimpleMassList(name, scan, mzPeaks);
// Add new mass list to the scan
scan.addMassList(newMassList);
if (this.saveToCDF) {
curTotalIntensity = 0;
for (int a = 0; a < mzPeaks.length; a++) {
DataPoint curMzPeak = mzPeaks[a];
allMZ.add(curMzPeak.getMZ());
allIntensities.add(curMzPeak.getIntensity());
curTotalIntensity += curMzPeak.getIntensity();
}
scanAcquisitionTime.add(scan.getRetentionTime());
pointsInScans.add(0);
startIndex.add(mzPeaks.length + lastPointCount);
totalIntensity.add(curTotalIntensity);
lastPointCount = mzPeaks.length + lastPointCount;
}
processedScans++;
}
// Update the GUI with all new mass lists
MZmineProjectImpl project = (MZmineProjectImpl) MZmineCore.getProjectManager().getCurrentProject();
final RawDataTreeModel treeModel = project.getRawDataTreeModel();
treeModel.updateGUIWithNewObjects();
;
if (this.saveToCDF) {
// ************** write mass list *******************************
final String outFileNamePath = outFilename.getPath();
logger.info("Saving mass detector results to netCDF file " + outFileNamePath);
NetcdfFileWriter writer = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, outFileNamePath, null);
Dimension dim_massValues = writer.addDimension(null, "mass_values", allMZ.size());
Dimension dim_intensityValues = writer.addDimension(null, "intensity_values", allIntensities.size());
Dimension dim_scanIndex = writer.addDimension(null, "scan_index", startIndex.size() - 1);
Dimension dim_scanAcquisitionTime = writer.addDimension(null, "scan_acquisition_time", scanAcquisitionTime.size());
Dimension dim_totalIntensity = writer.addDimension(null, "total_intensity", totalIntensity.size());
Dimension dim_pointsInScans = writer.addDimension(null, "point_count", pointsInScans.size());
// add dimensions to list
List<Dimension> dims = new ArrayList<>();
dims.add(dim_massValues);
dims.add(dim_intensityValues);
dims.add(dim_scanIndex);
dims.add(dim_scanAcquisitionTime);
dims.add(dim_totalIntensity);
dims.add(dim_pointsInScans);
// make the variables that contain the actual data I think.
Variable var_massValues = writer.addVariable(null, "mass_values", DataType.DOUBLE, "mass_values");
Variable var_intensityValues = writer.addVariable(null, "intensity_values", DataType.DOUBLE, "intensity_values");
Variable var_scanIndex = writer.addVariable(null, "scan_index", DataType.INT, "scan_index");
Variable var_scanAcquisitionTime = writer.addVariable(null, "scan_acquisition_time", DataType.DOUBLE, "scan_acquisition_time");
Variable var_totalIntensity = writer.addVariable(null, "total_intensity", DataType.DOUBLE, "total_intensity");
Variable var_pointsInScans = writer.addVariable(null, "point_count", DataType.INT, "point_count");
var_massValues.addAttribute(new Attribute("units", "M/Z"));
var_intensityValues.addAttribute(new Attribute("units", "Arbitrary Intensity Units"));
var_scanIndex.addAttribute(new Attribute("units", "index"));
var_scanAcquisitionTime.addAttribute(new Attribute("units", "seconds"));
var_totalIntensity.addAttribute(new Attribute("units", "Arbitrary Intensity Units"));
var_pointsInScans.addAttribute(new Attribute("units", "count"));
var_massValues.addAttribute(new Attribute("scale_factor", 1.0));
var_intensityValues.addAttribute(new Attribute("scale_factor", 1.0));
var_scanIndex.addAttribute(new Attribute("scale_factor", 1.0));
var_scanAcquisitionTime.addAttribute(new Attribute("scale_factor", 1.0));
var_totalIntensity.addAttribute(new Attribute("scale_factor", 1.0));
var_pointsInScans.addAttribute(new Attribute("scale_factor", 1.0));
// create file
writer.create();
ArrayDouble.D1 arr_massValues = new ArrayDouble.D1(dim_massValues.getLength());
ArrayDouble.D1 arr_intensityValues = new ArrayDouble.D1(dim_intensityValues.getLength());
ArrayDouble.D1 arr_scanIndex = new ArrayDouble.D1(dim_scanIndex.getLength());
ArrayDouble.D1 arr_scanAcquisitionTime = new ArrayDouble.D1(dim_scanAcquisitionTime.getLength());
ArrayDouble.D1 arr_totalIntensity = new ArrayDouble.D1(dim_totalIntensity.getLength());
ArrayDouble.D1 arr_pointsInScans = new ArrayDouble.D1(dim_pointsInScans.getLength());
for (int i = 0; i < allMZ.size(); i++) {
arr_massValues.set(i, allMZ.get(i));
arr_intensityValues.set(i, allIntensities.get(i));
}
int i = 0;
for (; i < scanAcquisitionTime.size(); i++) {
arr_scanAcquisitionTime.set(i, scanAcquisitionTime.get(i) * 60);
arr_pointsInScans.set(i, pointsInScans.get(i));
arr_scanIndex.set(i, startIndex.get(i));
arr_totalIntensity.set(i, totalIntensity.get(i));
}
// arr_scanIndex.set(i,startIndex.get(i));
// For tiny test file
// arr_intensityValues .set(0,200);
// arr_scanIndex .set(0,0);
// arr_scanAcquisitionTime .set(0,10);
// arr_totalIntensity .set(0,200);
// arr_pointsInScans .set(0,0);
// arr_intensityValues .set(1,300);
// arr_scanIndex .set(1,1);
// arr_scanAcquisitionTime .set(1,20);
// arr_totalIntensity .set(1,300);
// arr_pointsInScans .set(1,0);
writer.write(var_massValues, arr_massValues);
writer.write(var_intensityValues, arr_intensityValues);
writer.write(var_scanIndex, arr_scanIndex);
writer.write(var_scanAcquisitionTime, arr_scanAcquisitionTime);
writer.write(var_totalIntensity, arr_totalIntensity);
writer.write(var_pointsInScans, arr_pointsInScans);
writer.close();
}
} catch (Exception e) {
e.printStackTrace();
setErrorMessage(e.getMessage());
setStatus(TaskStatus.ERROR);
}
setStatus(TaskStatus.FINISHED);
logger.info("Finished mass detector on " + dataFile);
}
use of net.sf.mzmine.datamodel.impl.SimpleMassList in project mzmine2 by mzmine.
the class ShoulderPeaksFilterTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started mass filter on " + dataFile);
scanNumbers = dataFile.getScanNumbers();
totalScans = scanNumbers.length;
// Check if we have at least one scan with a mass list of given name
boolean haveMassList = false;
for (int i = 0; i < totalScans; i++) {
Scan scan = dataFile.getScan(scanNumbers[i]);
MassList massList = scan.getMassList(massListName);
if (massList != null) {
haveMassList = true;
break;
}
}
if (!haveMassList) {
setStatus(TaskStatus.ERROR);
setErrorMessage(dataFile.getName() + " has no mass list called '" + massListName + "'");
return;
}
// Process all scans
for (int i = 0; i < totalScans; i++) {
if (isCanceled())
return;
Scan scan = dataFile.getScan(scanNumbers[i]);
MassList massList = scan.getMassList(massListName);
// Skip those scans which do not have a mass list of given name
if (massList == null) {
processedScans++;
continue;
}
DataPoint[] mzPeaks = massList.getDataPoints();
DataPoint[] newMzPeaks = ShoulderPeaksFilter.filterMassValues(mzPeaks, parameters);
SimpleMassList newMassList = new SimpleMassList(massListName + " " + suffix, scan, newMzPeaks);
scan.addMassList(newMassList);
// Remove old mass list
if (autoRemove)
scan.removeMassList(massList);
processedScans++;
}
setStatus(TaskStatus.FINISHED);
logger.info("Finished shoulder peaks filter on " + dataFile);
}
use of net.sf.mzmine.datamodel.impl.SimpleMassList in project mzmine2 by mzmine.
the class RawDataFileOpenHandler_2_3 method startElement.
/**
* @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String,
* java.lang.String, org.xml.sax.Attributes)
*/
public void startElement(String namespaceURI, String lName, String qName, Attributes attrs) throws SAXException {
if (canceled)
throw new SAXException("Parsing canceled");
// This will remove any remaining characters from previous elements
getTextOfElement();
if (qName.equals(RawDataElementName_2_3.QUANTITY_FRAGMENT_SCAN.getElementName())) {
numberOfFragments = Integer.parseInt(attrs.getValue(RawDataElementName_2_3.QUANTITY.getElementName()));
if (numberOfFragments > 0) {
fragmentScan = new int[numberOfFragments];
fragmentCount = 0;
}
}
if (qName.equals(RawDataElementName_2_3.MASS_LIST.getElementName())) {
String name = attrs.getValue(RawDataElementName_2_3.NAME.getElementName());
SimpleMassList newML = new SimpleMassList(name, null, null);
currentMassLists.add(newML);
}
}
use of net.sf.mzmine.datamodel.impl.SimpleMassList in project mzmine2 by mzmine.
the class RawDataFileOpenHandler_2_3 method endElement.
/**
* @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String, java.lang.String,
* java.lang.String)
*/
public void endElement(String namespaceURI, String sName, String qName) throws SAXException {
if (canceled)
throw new SAXException("Parsing canceled");
// <NAME>
if (qName.equals(RawDataElementName_2_3.NAME.getElementName())) {
// Adds the scan file and the name to the new raw data file
String name = getTextOfElement();
logger.info("Loading raw data file: " + name);
newRawDataFile.setName(name);
}
if (qName.equals(RawDataElementName_2_3.QUANTITY_SCAN.getElementName())) {
// number of scans - actually not used for anything
Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.SCAN_ID.getElementName())) {
scanNumber = Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.MS_LEVEL.getElementName())) {
msLevel = Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.PARENT_SCAN.getElementName())) {
Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.PRECURSOR_MZ.getElementName())) {
precursorMZ = Double.parseDouble(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.PRECURSOR_CHARGE.getElementName())) {
precursorCharge = Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.RETENTION_TIME.getElementName())) {
// Before MZmine 2.6 retention time was saved in seconds, but now we
// use
// minutes, so we need to divide by 60
retentionTime = Double.parseDouble(getTextOfElement()) / 60d;
}
if (qName.equals(RawDataElementName_2_3.CENTROIDED.getElementName())) {
boolean centroided = Boolean.parseBoolean(getTextOfElement());
if (centroided)
spectrumType = MassSpectrumType.CENTROIDED;
else
spectrumType = MassSpectrumType.PROFILE;
}
if (qName.equals(RawDataElementName_2_3.QUANTITY_DATAPOINTS.getElementName())) {
dataPointsNumber = Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.FRAGMENT_SCAN.getElementName())) {
fragmentScan[fragmentCount++] = Integer.parseInt(getTextOfElement());
}
if (qName.equals(RawDataElementName_2_3.MASS_LIST.getElementName())) {
char[] encodedDataPoints = getTextOfElement().toCharArray();
DataPoint[] dataPoints = ScanUtils.decodeDataPointsBase64(encodedDataPoints);
SimpleMassList newML = currentMassLists.get(currentMassLists.size() - 1);
newML.setDataPoints(dataPoints);
}
if (qName.equals(RawDataElementName_2_3.SCAN.getElementName())) {
try {
int newStorageID = 1;
TreeMap<Integer, Long> dataPointsOffsets = newRawDataFile.getDataPointsOffsets();
TreeMap<Integer, Integer> dataPointsLengths = newRawDataFile.getDataPointsLengths();
if (!dataPointsOffsets.isEmpty())
newStorageID = dataPointsOffsets.lastKey().intValue() + 1;
StorableScan storableScan = new StorableScan(newRawDataFile, newStorageID, dataPointsNumber, scanNumber, msLevel, retentionTime, precursorMZ, precursorCharge, fragmentScan, spectrumType, PolarityType.UNKNOWN, "", null);
newRawDataFile.addScan(storableScan);
dataPointsOffsets.put(newStorageID, storageFileOffset);
dataPointsLengths.put(newStorageID, dataPointsNumber);
for (SimpleMassList newML : currentMassLists) {
newML.setScan(storableScan);
}
allMassLists.addAll(currentMassLists);
currentMassLists.clear();
} catch (IOException e) {
throw new SAXException(e);
}
storageFileOffset += dataPointsNumber * 4 * 2;
}
}
use of net.sf.mzmine.datamodel.impl.SimpleMassList in project mzmine2 by mzmine.
the class RawDataFileOpenHandler_2_3 method readRawDataFile.
/**
* Extract the scan file and copies it into the temporary folder. Create a new raw data file using
* the information from the XML raw data description file
*
* @param Name raw data file name
* @throws SAXException
* @throws ParserConfigurationException
*/
public RawDataFile readRawDataFile(InputStream is, File scansFile) throws IOException, ParserConfigurationException, SAXException {
storageFileOffset = 0;
charBuffer = new StringBuffer();
currentMassLists = new ArrayList<SimpleMassList>();
allMassLists = new ArrayList<SimpleMassList>();
newRawDataFile = (RawDataFileImpl) MZmineCore.createNewFile(null);
newRawDataFile.openDataPointsFile(scansFile);
// Reads the XML file (raw data description)
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
saxParser.parse(is, this);
// instead of being part of the data points file.
for (SimpleMassList ml : allMassLists) {
Scan s = ml.getScan();
s.addMassList(ml);
}
RawDataFile rawDataFile = newRawDataFile.finishWriting();
return rawDataFile;
}
Aggregations