use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class ProjectTreeMouseHandler method handleDoubleClickEvent.
private void handleDoubleClickEvent(MouseEvent e) {
TreePath clickedPath = tree.getPathForLocation(e.getX(), e.getY());
if (clickedPath == null)
return;
DefaultMutableTreeNode node = (DefaultMutableTreeNode) clickedPath.getLastPathComponent();
Object clickedObject = node.getUserObject();
if (clickedObject instanceof RawDataFile) {
RawDataFile clickedFile = (RawDataFile) clickedObject;
TICVisualizerModule.setupNewTICVisualizer(clickedFile);
}
if (clickedObject instanceof PeakList) {
PeakList clickedPeakList = (PeakList) clickedObject;
PeakListTableModule.showNewPeakListVisualizerWindow(clickedPeakList);
}
if (clickedObject instanceof Scan) {
Scan clickedScan = (Scan) clickedObject;
SpectraVisualizerModule.showNewSpectrumWindow(clickedScan.getDataFile(), clickedScan.getScanNumber());
}
if (clickedObject instanceof MassList) {
MassList clickedMassList = (MassList) clickedObject;
Scan clickedScan = clickedMassList.getScan();
SpectraVisualizerWindow window = SpectraVisualizerModule.showNewSpectrumWindow(clickedScan.getDataFile(), clickedScan.getScanNumber());
MassListDataSet dataset = new MassListDataSet(clickedMassList);
window.addDataSet(dataset, Color.green);
}
if (clickedObject instanceof PeakListRow) {
PeakListRow clickedPeak = (PeakListRow) clickedObject;
PeakSummaryVisualizerModule.showNewPeakSummaryWindow(clickedPeak);
}
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class RawDataTreeModel method valueForPathChanged.
public void valueForPathChanged(TreePath path, Object value) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) path.getLastPathComponent();
Object object = node.getUserObject();
String newName = (String) value;
if (object instanceof RawDataFile) {
RawDataFile df = (RawDataFile) object;
df.setName(newName);
}
if (object instanceof PeakList) {
PeakList pl = (PeakList) object;
pl.setName(newName);
}
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class SimplePeakList method addRow.
@Override
public void addRow(PeakListRow row) {
List<RawDataFile> myFiles = Arrays.asList(this.getRawDataFiles());
for (RawDataFile testFile : row.getRawDataFiles()) {
if (!myFiles.contains(testFile))
throw (new IllegalArgumentException("Data file " + testFile + " is not in this feature list"));
}
peakListRows.add(row);
if (row.getDataPointMaxIntensity() > maxDataPointIntensity) {
maxDataPointIntensity = row.getDataPointMaxIntensity();
}
if (mzRange == null) {
mzRange = Range.singleton(row.getAverageMZ());
rtRange = Range.singleton(row.getAverageRT());
} else {
mzRange = mzRange.span(Range.singleton(row.getAverageMZ()));
rtRange = rtRange.span(Range.singleton(row.getAverageRT()));
}
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class ADAP3DecompositionV1_5Task method getPeaks.
/**
* Convert MZmine PeakList to a list of ADAP Peaks
*
* @param peakList MZmine PeakList object
* @param edgeToHeightThreshold edge-to-height threshold to determine peaks that can be merged
* @param deltaToHeightThreshold delta-to-height threshold to determine peaks that can be merged
* @return list of ADAP Peaks
*/
@Nonnull
public static List<Peak> getPeaks(final PeakList peakList, final double edgeToHeightThreshold, final double deltaToHeightThreshold) {
RawDataFile dataFile = peakList.getRawDataFile(0);
List<Peak> peaks = new ArrayList<>();
for (PeakListRow row : peakList.getRows()) {
Feature peak = row.getBestPeak();
int[] scanNumbers = peak.getScanNumbers();
// Build chromatogram
NavigableMap<Double, Double> chromatogram = new TreeMap<>();
for (int scanNumber : scanNumbers) {
DataPoint dataPoint = peak.getDataPoint(scanNumber);
if (dataPoint != null)
chromatogram.put(dataFile.getScan(scanNumber).getRetentionTime(), dataPoint.getIntensity());
}
if (chromatogram.size() <= 1)
continue;
// Fill out PeakInfo
PeakInfo info = new PeakInfo();
try {
// Note: info.peakID is the index of PeakListRow in PeakList.peakListRows (starts from 0)
// row.getID is row.myID (starts from 1)
info.peakID = row.getID() - 1;
double height = -Double.MIN_VALUE;
for (int scan : scanNumbers) {
double intensity = peak.getDataPoint(scan).getIntensity();
if (intensity > height) {
height = intensity;
info.peakIndex = scan;
}
}
info.leftApexIndex = scanNumbers[0];
info.rightApexIndex = scanNumbers[scanNumbers.length - 1];
info.retTime = peak.getRT();
info.mzValue = peak.getMZ();
info.intensity = peak.getHeight();
info.leftPeakIndex = info.leftApexIndex;
info.rightPeakIndex = info.rightApexIndex;
} catch (Exception e) {
LOG.info("Skipping " + row + ": " + e.getMessage());
continue;
}
peaks.add(new Peak(chromatogram, info));
}
FeatureTools.correctPeakBoundaries(peaks, edgeToHeightThreshold, deltaToHeightThreshold);
return peaks;
}
use of net.sf.mzmine.datamodel.RawDataFile in project mzmine2 by mzmine.
the class ADAP3DecompositionV1_5Task method decomposePeaks.
private PeakList decomposePeaks(PeakList peakList) throws CloneNotSupportedException, IOException {
RawDataFile dataFile = peakList.getRawDataFile(0);
// Create new feature list.
final PeakList resolvedPeakList = new SimplePeakList(peakList + " " + parameters.getParameter(ADAP3DecompositionV1_5Parameters.SUFFIX).getValue(), dataFile);
// Load previous applied methods.
for (final PeakList.PeakListAppliedMethod method : peakList.getAppliedMethods()) {
resolvedPeakList.addDescriptionOfAppliedTask(method);
}
// Add task description to feature list.
resolvedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Peak deconvolution by ADAP-3", parameters));
// Collect peak information
List<Peak> peaks = getPeaks(peakList, this.parameters.getParameter(ADAP3DecompositionV1_5Parameters.EDGE_TO_HEIGHT_RATIO).getValue(), this.parameters.getParameter(ADAP3DecompositionV1_5Parameters.DELTA_TO_HEIGHT_RATIO).getValue());
// Find components (a.k.a. clusters of peaks with fragmentation spectra)
List<Component> components = getComponents(peaks);
// Create PeakListRow for each components
List<PeakListRow> newPeakListRows = new ArrayList<>();
int rowID = 0;
for (final Component component : components) {
if (component.getSpectrum().isEmpty())
continue;
PeakListRow row = new SimplePeakListRow(++rowID);
// Add the reference peak
PeakListRow refPeakRow = originalPeakList.getRow(component.getBestPeak().getInfo().peakID);
Feature refPeak = new SimpleFeature(refPeakRow.getBestPeak());
// Add spectrum
List<DataPoint> dataPoints = new ArrayList<>();
for (Map.Entry<Double, Double> entry : component.getSpectrum().entrySet()) {
dataPoints.add(new SimpleDataPoint(entry.getKey(), entry.getValue()));
}
refPeak.setIsotopePattern(new SimpleIsotopePattern(dataPoints.toArray(new DataPoint[dataPoints.size()]), IsotopePattern.IsotopePatternStatus.PREDICTED, "Spectrum"));
row.addPeak(dataFile, refPeak);
// Add PeakInformation
if (refPeakRow.getPeakInformation() == null) {
SimplePeakInformation information = new SimplePeakInformation(new HashMap<>(refPeakRow.getPeakInformation().getAllProperties()));
row.setPeakInformation(information);
}
// Set row properties
row.setAverageMZ(refPeakRow.getAverageMZ());
row.setAverageRT(refPeakRow.getAverageRT());
// resolvedPeakList.addRow(row);
newPeakListRows.add(row);
}
// ------------------------------------
// Sort new peak rows by retention time
// ------------------------------------
Collections.sort(newPeakListRows, new Comparator<PeakListRow>() {
@Override
public int compare(PeakListRow row1, PeakListRow row2) {
double retTime1 = row1.getAverageRT();
double retTime2 = row2.getAverageRT();
return Double.compare(retTime1, retTime2);
}
});
for (PeakListRow row : newPeakListRows) resolvedPeakList.addRow(row);
return resolvedPeakList;
}
Aggregations