use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class SQLExportTask method exportPeakListRow.
private void exportPeakListRow(PeakListRow row) throws SQLException {
// Cancel?
if (isCanceled()) {
return;
}
// Value for looping through raw data files
boolean loopDataFiles = false;
StringBuilder sql = new StringBuilder();
sql.append("INSERT INTO ");
sql.append(tableName);
sql.append(" (");
for (int i = 0; i < exportColumns.getRowCount(); i++) {
sql.append(exportColumns.getValueAt(i, 0));
if (i < exportColumns.getRowCount() - 1)
sql.append(",");
}
sql.append(" ) VALUES (");
for (int i = 0; i < exportColumns.getRowCount(); i++) {
sql.append("?");
if (i < exportColumns.getRowCount() - 1)
sql.append(",");
}
sql.append(")");
PreparedStatement statement = dbConnection.prepareStatement(sql.toString());
if (row == null) {
for (int i = 0; i < exportColumns.getRowCount(); i++) {
SQLExportDataType dataType = (SQLExportDataType) exportColumns.getValueAt(i, 1);
String dataValue = (String) exportColumns.getValueAt(i, 2);
switch(dataType) {
case CONSTANT:
statement.setString(i + 1, dataValue);
break;
case RAWFILE:
RawDataFile[] rawdatafiles = peakList.getRawDataFiles();
statement.setString(i + 1, rawdatafiles[0].getName());
break;
default:
statement.setString(i + 1, null);
break;
}
}
statement.executeUpdate();
} else {
for (RawDataFile rawDataFile : row.getRawDataFiles()) {
Feature peak = row.getPeak(rawDataFile);
for (int i = 0; i < exportColumns.getRowCount(); i++) {
SQLExportDataType dataType = (SQLExportDataType) exportColumns.getValueAt(i, 1);
String dataValue = (String) exportColumns.getValueAt(i, 2);
switch(dataType) {
case CONSTANT:
statement.setString(i + 1, dataValue);
break;
case MZ:
statement.setDouble(i + 1, row.getAverageMZ());
break;
case RT:
statement.setDouble(i + 1, row.getAverageRT());
break;
case ID:
statement.setInt(i + 1, row.getID());
break;
case PEAKCHARGE:
statement.setDouble(i + 1, peak.getCharge());
loopDataFiles = true;
break;
case PEAKDURATION:
statement.setDouble(i + 1, RangeUtils.rangeLength(peak.getRawDataPointsRTRange()));
loopDataFiles = true;
break;
case PEAKSTATUS:
statement.setString(i + 1, peak.getFeatureStatus().name());
loopDataFiles = true;
break;
case PEAKMZ:
statement.setDouble(i + 1, peak.getMZ());
loopDataFiles = true;
break;
case PEAKRT:
statement.setDouble(i + 1, peak.getRT());
loopDataFiles = true;
break;
case PEAKRT_START:
statement.setDouble(i + 1, peak.getRawDataPointsRTRange().lowerEndpoint());
loopDataFiles = true;
break;
case PEAKRT_END:
statement.setDouble(i + 1, peak.getRawDataPointsRTRange().upperEndpoint());
loopDataFiles = true;
break;
case PEAKHEIGHT:
statement.setDouble(i + 1, peak.getHeight());
loopDataFiles = true;
break;
case PEAKAREA:
statement.setDouble(i + 1, peak.getArea());
loopDataFiles = true;
break;
case DATAPOINTS:
statement.setDouble(i + 1, peak.getScanNumbers().length);
loopDataFiles = true;
break;
case FWHM:
statement.setDouble(i + 1, peak.getFWHM());
loopDataFiles = true;
break;
case TAILINGFACTOR:
statement.setDouble(i + 1, peak.getTailingFactor());
loopDataFiles = true;
break;
case ASYMMETRYFACTOR:
statement.setDouble(i + 1, peak.getAsymmetryFactor());
loopDataFiles = true;
break;
case RAWFILE:
statement.setString(i + 1, rawDataFile.getName());
loopDataFiles = true;
break;
case HEIGHT:
statement.setDouble(i + 1, row.getAverageHeight());
break;
case AREA:
statement.setDouble(i + 1, row.getAverageArea());
break;
case COMMENT:
statement.setString(i + 1, row.getComment());
break;
case IDENTITY:
PeakIdentity id = row.getPreferredPeakIdentity();
if (id != null) {
statement.setString(i + 1, id.getName());
} else {
statement.setNull(i + 1, Types.VARCHAR);
}
break;
case ISOTOPEPATTERN:
IsotopePattern isotopes = row.getBestIsotopePattern();
if (isotopes == null) {
statement.setNull(i + 1, Types.BLOB);
break;
}
DataPoint[] dataPoints = isotopes.getDataPoints();
byte[] bytes = ScanUtils.encodeDataPointsToBytes(dataPoints);
ByteArrayInputStream is = new ByteArrayInputStream(bytes);
statement.setBlob(i + 1, is);
break;
case MSMS:
int msmsScanNum = row.getBestPeak().getMostIntenseFragmentScanNumber();
// Check if there is any MS/MS scan
if (msmsScanNum <= 0) {
statement.setNull(i + 1, Types.BLOB);
break;
}
RawDataFile dataFile = row.getBestPeak().getDataFile();
Scan msmsScan = dataFile.getScan(msmsScanNum);
MassList msmsMassList = msmsScan.getMassList(dataValue);
// Check if there is a masslist for the scan
if (msmsMassList == null) {
statement.setNull(i + 1, Types.BLOB);
break;
}
dataPoints = msmsMassList.getDataPoints();
bytes = ScanUtils.encodeDataPointsToBytes(dataPoints);
is = new ByteArrayInputStream(bytes);
statement.setBlob(i + 1, is);
break;
default:
break;
}
}
statement.executeUpdate();
// data files in feature list
if (!loopDataFiles) {
break;
}
}
}
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class XMLExportTask method run.
/**
* @see java.lang.Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
// Shall export several files?
boolean substitute = fileName.getPath().contains(plNamePattern);
// Process feature lists
for (int i = 0; i < peakLists.length; i++) {
PeakList peakList = peakLists[i];
File curFile = fileName;
try {
// Filename
if (substitute) {
// Cleanup from illegal filename characters
String cleanPlName = peakList.getName().replaceAll("[^a-zA-Z0-9.-]", "_");
// Substitute
String newFilename = fileName.getPath().replaceAll(Pattern.quote(plNamePattern), cleanPlName);
curFile = new File(newFilename);
}
// Open file
FileWriter writer;
try {
writer = new FileWriter(curFile);
} catch (Exception e) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Could not open file " + curFile + " for writing.");
return;
}
logger.info("Started saving feature list " + peakList.getName());
// write the saving file
FileOutputStream fos = new FileOutputStream(curFile);
OutputStream finalStream = fos;
if (compression) {
@SuppressWarnings("resource") ZipOutputStream zos = new ZipOutputStream(fos);
zos.setLevel(9);
zos.putNextEntry(new ZipEntry(fileName.getName()));
finalStream = zos;
}
Hashtable<RawDataFile, String> dataFilesIDMap = new Hashtable<RawDataFile, String>();
for (RawDataFile file : peakList.getRawDataFiles()) {
dataFilesIDMap.put(file, file.getName());
}
PeakListSaveHandler peakListSaveHandler = new PeakListSaveHandler(finalStream, dataFilesIDMap);
peakListSaveHandlers[i] = peakListSaveHandler;
peakListSaveHandler.savePeakList(peakList);
finalStream.close();
} catch (Exception e) {
/* we may already have set the status to CANCELED */
if (getStatus() == TaskStatus.PROCESSING) {
setStatus(TaskStatus.ERROR);
}
setErrorMessage(e.toString());
e.printStackTrace();
return;
}
logger.info("Finished saving " + peakList.getName());
setStatus(TaskStatus.FINISHED);
}
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class IsotopeGrouperTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running isotopic peak grouper on " + peakList);
// We assume source peakList contains one datafile
RawDataFile dataFile = peakList.getRawDataFile(0);
// Create a new deisotoped peakList
deisotopedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Collect all selected charge states
int[] charges = new int[maximumCharge];
for (int i = 0; i < maximumCharge; i++) charges[i] = i + 1;
// Sort peaks by descending height
Feature[] sortedPeaks = peakList.getPeaks(dataFile);
Arrays.sort(sortedPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
// Loop through all peaks
totalPeaks = sortedPeaks.length;
for (int ind = 0; ind < totalPeaks; ind++) {
if (isCanceled())
return;
Feature aPeak = sortedPeaks[ind];
// Check if peak was already deleted
if (aPeak == null) {
processedPeaks++;
continue;
}
// Check which charge state fits best around this peak
int bestFitCharge = 0;
int bestFitScore = -1;
Vector<Feature> bestFitPeaks = null;
for (int charge : charges) {
Vector<Feature> fittedPeaks = new Vector<Feature>();
fittedPeaks.add(aPeak);
fitPattern(fittedPeaks, aPeak, charge, sortedPeaks);
int score = fittedPeaks.size();
if ((score > bestFitScore) || ((score == bestFitScore) && (bestFitCharge > charge))) {
bestFitScore = score;
bestFitCharge = charge;
bestFitPeaks = fittedPeaks;
}
}
PeakListRow oldRow = peakList.getPeakRow(aPeak);
assert bestFitPeaks != null;
// isotope, we skip this left the original peak in the feature list.
if (bestFitPeaks.size() == 1) {
deisotopedPeakList.addRow(oldRow);
processedPeaks++;
continue;
}
// Convert the peak pattern to array
Feature[] originalPeaks = bestFitPeaks.toArray(new Feature[0]);
// Create a new SimpleIsotopePattern
DataPoint[] isotopes = new DataPoint[bestFitPeaks.size()];
for (int i = 0; i < isotopes.length; i++) {
Feature p = originalPeaks[i];
isotopes[i] = new SimpleDataPoint(p.getMZ(), p.getHeight());
}
SimpleIsotopePattern newPattern = new SimpleIsotopePattern(isotopes, IsotopePatternStatus.DETECTED, aPeak.toString());
// the lowest m/z peak
if (chooseMostIntense) {
Arrays.sort(originalPeaks, new PeakSorter(SortingProperty.Height, SortingDirection.Descending));
} else {
Arrays.sort(originalPeaks, new PeakSorter(SortingProperty.MZ, SortingDirection.Ascending));
}
Feature newPeak = new SimpleFeature(originalPeaks[0]);
newPeak.setIsotopePattern(newPattern);
newPeak.setCharge(bestFitCharge);
// Keep old ID
int oldID = oldRow.getID();
SimplePeakListRow newRow = new SimplePeakListRow(oldID);
PeakUtils.copyPeakListRowProperties(oldRow, newRow);
newRow.addPeak(dataFile, newPeak);
deisotopedPeakList.addRow(newRow);
// Remove all peaks already assigned to isotope pattern
for (int i = 0; i < sortedPeaks.length; i++) {
if (bestFitPeaks.contains(sortedPeaks[i]))
sortedPeaks[i] = null;
}
// Update completion rate
processedPeaks++;
}
// Add new peakList to the project
project.addPeakList(deisotopedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : peakList.getAppliedMethods()) {
deisotopedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
deisotopedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Isotopic peaks grouper", parameters));
// Remove the original peakList if requested
if (removeOriginal)
project.removePeakList(peakList);
logger.info("Finished isotopic peak grouper on " + peakList);
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class Candidates method getAvgPeakHeights.
/**
* @param ID
* @return avg heights of all with the ids, but only if they are contained in same scans and mass
* lists
*/
private double[] getAvgPeakHeights(int[] ID) {
PeakListRow[] rows = plh.getRowsByID(ID);
RawDataFile[] raws = rows[0].getRawDataFiles();
if (raws.length < 1)
return null;
double[] mzs = new double[ID.length];
for (int i = 0; i < rows.length; i++) mzs[i] = rows[i].getAverageMZ();
double[] avgHeights = new double[ID.length];
int pointsAdded = 0;
for (RawDataFile raw : raws) {
if (!raw.getDataMZRange().contains(rows[0].getAverageMZ()))
continue;
int[] scanNums = raw.getScanNumbers();
for (int i = 0; i < scanNums.length; i++) {
Scan scan = raw.getScan(scanNums[i]);
MassList list = scan.getMassList(massListName);
if (list == null || !massListContainsEveryMZ(list, mzs, minHeight))
continue;
double[] avgBuffer = new double[mzs.length];
boolean allFound = true;
for (int j = 0; j < mzs.length; j++) {
DataPoint[] points = getMassListDataPointsByMass(list, mzTolerance.getToleranceRange(mzs[j]));
if (points.length == 0)
continue;
DataPoint dp = getClosestDataPoint(points, rows[j].getAverageMZ(), minHeight);
if (// yes the list contained something close to every datapoint that was over
dp == null) // minHeight, BUT
{
// the closest might not have been. Check is done inside getClosestDataPoint();
allFound = false;
break;
}
avgBuffer[j] = dp.getIntensity();
}
if (allFound) {
pointsAdded++;
for (int j = 0; j < mzs.length; j++) avgHeights[j] += avgBuffer[j];
}
}
}
if (pointsAdded == 0) {
logger.warning("Error: Peaks with ids: " + Arrays.toString(ID) + " were not in same scans at all. Please update the parameters.");
return null;
}
for (int i = 0; i < avgHeights.length; i++) avgHeights[i] /= (pointsAdded);
return avgHeights;
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class ADAP3DecompositionV2Module method runModule.
@Override
@Nonnull
public ExitCode runModule(@Nonnull MZmineProject project, @Nonnull ParameterSet parameters, @Nonnull Collection<Task> tasks) {
Map<RawDataFile, ChromatogramPeakPair> lists = ChromatogramPeakPair.fromParameterSet(parameters);
for (ChromatogramPeakPair pair : lists.values()) {
Task newTask = new ADAP3DecompositionV2Task(project, pair, parameters);
tasks.add(newTask);
}
return ExitCode.OK;
}
Aggregations