use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class LinearNormalizerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running linear normalizer");
// This hashtable maps rows from original alignment result to rows of
// the normalized alignment
Hashtable<PeakListRow, SimplePeakListRow> rowMap = new Hashtable<PeakListRow, SimplePeakListRow>();
// Create new feature list
normalizedPeakList = new SimplePeakList(originalPeakList + " " + suffix, originalPeakList.getRawDataFiles());
// Loop through all raw data files, and find the peak with biggest
// height
double maxOriginalHeight = 0.0;
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
Feature p = originalpeakListRow.getPeak(file);
if (p != null) {
if (maxOriginalHeight <= p.getHeight())
maxOriginalHeight = p.getHeight();
}
}
}
// Loop through all raw data files, and normalize peak values
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
// Cancel?
if (isCanceled()) {
return;
}
// Determine normalization type and calculate normalization factor
double normalizationFactor = 1.0;
// - normalization by average peak intensity
if (normalizationType == NormalizationType.AverageIntensity) {
double intensitySum = 0;
int intensityCount = 0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
intensitySum += p.getHeight();
} else {
intensitySum += p.getArea();
}
intensityCount++;
}
}
normalizationFactor = intensitySum / (double) intensityCount;
}
// - normalization by average squared peak intensity
if (normalizationType == NormalizationType.AverageSquaredIntensity) {
double intensitySum = 0.0;
int intensityCount = 0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
intensitySum += (p.getHeight() * p.getHeight());
} else {
intensitySum += (p.getArea() * p.getArea());
}
intensityCount++;
}
}
normalizationFactor = intensitySum / (double) intensityCount;
}
// - normalization by maximum peak intensity
if (normalizationType == NormalizationType.MaximumPeakHeight) {
double maximumIntensity = 0.0;
for (PeakListRow peakListRow : originalPeakList.getRows()) {
Feature p = peakListRow.getPeak(file);
if (p != null) {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
if (maximumIntensity < p.getHeight())
maximumIntensity = p.getHeight();
} else {
if (maximumIntensity < p.getArea())
maximumIntensity = p.getArea();
}
}
}
normalizationFactor = maximumIntensity;
}
// - normalization by total raw signal
if (normalizationType == NormalizationType.TotalRawSignal) {
normalizationFactor = 0;
for (int scanNumber : file.getScanNumbers(1)) {
Scan scan = file.getScan(scanNumber);
normalizationFactor += scan.getTIC();
}
}
// Readjust normalization factor so that maximum height will be
// equal to maximumOverallPeakHeightAfterNormalization after
// normalization
double maxNormalizedHeight = maxOriginalHeight / normalizationFactor;
normalizationFactor = normalizationFactor * maxNormalizedHeight / maximumOverallPeakHeightAfterNormalization;
// Normalize all peak intenisities using the normalization factor
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
// Cancel?
if (isCanceled()) {
return;
}
Feature originalPeak = originalpeakListRow.getPeak(file);
if (originalPeak != null) {
SimpleFeature normalizedPeak = new SimpleFeature(originalPeak);
PeakUtils.copyPeakProperties(originalPeak, normalizedPeak);
double normalizedHeight = originalPeak.getHeight() / normalizationFactor;
double normalizedArea = originalPeak.getArea() / normalizationFactor;
normalizedPeak.setHeight(normalizedHeight);
normalizedPeak.setArea(normalizedArea);
SimplePeakListRow normalizedRow = rowMap.get(originalpeakListRow);
if (normalizedRow == null) {
normalizedRow = new SimplePeakListRow(originalpeakListRow.getID());
PeakUtils.copyPeakListRowProperties(originalpeakListRow, normalizedRow);
rowMap.put(originalpeakListRow, normalizedRow);
}
normalizedRow.addPeak(file, normalizedPeak);
}
}
// Progress
processedDataFiles++;
}
// Finally add all normalized rows to normalized alignment result
for (PeakListRow originalpeakListRow : originalPeakList.getRows()) {
SimplePeakListRow normalizedRow = rowMap.get(originalpeakListRow);
if (normalizedRow == null)
continue;
normalizedPeakList.addRow(normalizedRow);
}
// Add new peaklist to the project
project.addPeakList(normalizedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
normalizedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Linear normalization of by " + normalizationType, parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
logger.info("Finished linear normalizer");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class RTCalibrationTask method normalizeRow.
/**
* Normalize retention time of given row using selected standards
*
* @param originalRow Feature list row to be normalized
* @param standards Standard rows in same feature list
* @param normalizedStdRTs Normalized retention times of standard rows
* @return New feature list row with normalized retention time
*/
private PeakListRow normalizeRow(PeakListRow originalRow, PeakListRow[] standards, double[] normalizedStdRTs) {
PeakListRow normalizedRow = new SimplePeakListRow(originalRow.getID());
// Standard rows preceding and following this row
int prevStdIndex = -1, nextStdIndex = -1;
for (int stdIndex = 0; stdIndex < standards.length; stdIndex++) {
// If this standard peak is actually originalRow
if (standards[stdIndex] == originalRow) {
prevStdIndex = stdIndex;
nextStdIndex = stdIndex;
break;
}
// If this standard peak is before our originalRow
if (standards[stdIndex].getAverageRT() < originalRow.getAverageRT()) {
if ((prevStdIndex == -1) || (standards[stdIndex].getAverageRT() > standards[prevStdIndex].getAverageRT()))
prevStdIndex = stdIndex;
}
// If this standard peak is after our originalRow
if (standards[stdIndex].getAverageRT() > originalRow.getAverageRT()) {
if ((nextStdIndex == -1) || (standards[stdIndex].getAverageRT() < standards[nextStdIndex].getAverageRT()))
nextStdIndex = stdIndex;
}
}
// Calculate normalized retention time of this row
double normalizedRT = -1;
if ((prevStdIndex == -1) || (nextStdIndex == -1)) {
normalizedRT = originalRow.getAverageRT();
} else if (prevStdIndex == nextStdIndex) {
normalizedRT = normalizedStdRTs[prevStdIndex];
} else {
double weight = (originalRow.getAverageRT() - standards[prevStdIndex].getAverageRT()) / (standards[nextStdIndex].getAverageRT() - standards[prevStdIndex].getAverageRT());
normalizedRT = normalizedStdRTs[prevStdIndex] + (weight * (normalizedStdRTs[nextStdIndex] - normalizedStdRTs[prevStdIndex]));
}
// Set normalized retention time to all peaks in this row
for (RawDataFile file : originalRow.getRawDataFiles()) {
Feature originalPeak = originalRow.getPeak(file);
if (originalPeak != null) {
SimpleFeature normalizedPeak = new SimpleFeature(originalPeak);
PeakUtils.copyPeakProperties(originalPeak, normalizedPeak);
normalizedPeak.setRT(normalizedRT);
normalizedRow.addPeak(file, normalizedPeak);
}
}
return normalizedRow;
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class StandardCompoundNormalizerTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.finest("Starting standard compound normalization of " + originalPeakList + " using " + normalizationType + " (total " + standardRows.length + " standard peaks)");
// Check if we have standards
if (standardRows.length == 0) {
setErrorMessage("No internal standard peaks selected");
setStatus(TaskStatus.ERROR);
return;
}
// Initialize new alignment result for the normalized result
normalizedPeakList = new SimplePeakList(originalPeakList + " " + suffix, originalPeakList.getRawDataFiles());
// Copy raw data files from original alignment result to new alignment
// result
totalRows = originalPeakList.getNumberOfRows();
// Loop through all rows
rowIteration: for (PeakListRow row : originalPeakList.getRows()) {
// Cancel ?
if (isCanceled()) {
return;
}
// Do not add the standard rows to the new peaklist
for (int i = 0; i < standardRows.length; i++) {
if (row == standardRows[i]) {
processedRows++;
continue rowIteration;
}
}
// Copy comment and identification
SimplePeakListRow normalizedRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, normalizedRow);
// Get m/z and RT of the current row
double mz = row.getAverageMZ();
double rt = row.getAverageRT();
// Loop through all raw data files
for (RawDataFile file : originalPeakList.getRawDataFiles()) {
double[] normalizationFactors = null;
double[] normalizationFactorWeights = null;
if (normalizationType == StandardUsageType.Nearest) {
// Search for nearest standard
PeakListRow nearestStandardRow = null;
double nearestStandardRowDistance = Double.MAX_VALUE;
for (int standardRowIndex = 0; standardRowIndex < standardRows.length; standardRowIndex++) {
PeakListRow standardRow = standardRows[standardRowIndex];
double stdMZ = standardRow.getAverageMZ();
double stdRT = standardRow.getAverageRT();
double distance = MZvsRTBalance * Math.abs(mz - stdMZ) + Math.abs(rt - stdRT);
if (distance <= nearestStandardRowDistance) {
nearestStandardRow = standardRow;
nearestStandardRowDistance = distance;
}
}
assert nearestStandardRow != null;
// Calc and store a single normalization factor
normalizationFactors = new double[1];
normalizationFactorWeights = new double[1];
Feature standardPeak = nearestStandardRow.getPeak(file);
if (standardPeak == null) {
// What to do if standard peak is not available?
normalizationFactors[0] = 1.0;
} else {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
normalizationFactors[0] = standardPeak.getHeight();
} else {
normalizationFactors[0] = standardPeak.getArea();
}
}
logger.finest("Normalizing row #" + row.getID() + " using standard peak " + standardPeak + ", factor " + normalizationFactors[0]);
normalizationFactorWeights[0] = 1.0f;
}
if (normalizationType == StandardUsageType.Weighted) {
// Add all standards as factors, and use distance as weight
normalizationFactors = new double[standardRows.length];
normalizationFactorWeights = new double[standardRows.length];
for (int standardRowIndex = 0; standardRowIndex < standardRows.length; standardRowIndex++) {
PeakListRow standardRow = standardRows[standardRowIndex];
double stdMZ = standardRow.getAverageMZ();
double stdRT = standardRow.getAverageRT();
double distance = MZvsRTBalance * Math.abs(mz - stdMZ) + Math.abs(rt - stdRT);
Feature standardPeak = standardRow.getPeak(file);
if (standardPeak == null) {
// What to do if standard peak is not available?
normalizationFactors[standardRowIndex] = 1.0;
normalizationFactorWeights[standardRowIndex] = 0.0;
} else {
if (peakMeasurementType == PeakMeasurementType.HEIGHT) {
normalizationFactors[standardRowIndex] = standardPeak.getHeight();
} else {
normalizationFactors[standardRowIndex] = standardPeak.getArea();
}
normalizationFactorWeights[standardRowIndex] = 1 / distance;
}
}
}
assert normalizationFactors != null;
assert normalizationFactorWeights != null;
// Calculate a single normalization factor as weighted average
// of all factors
double weightedSum = 0.0f;
double sumOfWeights = 0.0f;
for (int factorIndex = 0; factorIndex < normalizationFactors.length; factorIndex++) {
weightedSum += normalizationFactors[factorIndex] * normalizationFactorWeights[factorIndex];
sumOfWeights += normalizationFactorWeights[factorIndex];
}
double normalizationFactor = weightedSum / sumOfWeights;
// For simple scaling of the normalized values
normalizationFactor = normalizationFactor / 100.0f;
logger.finest("Normalizing row #" + row.getID() + "[" + file + "] using factor " + normalizationFactor);
// How to handle zero normalization factor?
if (normalizationFactor == 0.0)
normalizationFactor = Double.MIN_VALUE;
// Normalize peak
Feature originalPeak = row.getPeak(file);
if (originalPeak != null) {
SimpleFeature normalizedPeak = new SimpleFeature(originalPeak);
PeakUtils.copyPeakProperties(originalPeak, normalizedPeak);
double normalizedHeight = originalPeak.getHeight() / normalizationFactor;
double normalizedArea = originalPeak.getArea() / normalizationFactor;
normalizedPeak.setHeight(normalizedHeight);
normalizedPeak.setArea(normalizedArea);
normalizedRow.addPeak(file, normalizedPeak);
}
}
normalizedPeakList.addRow(normalizedRow);
processedRows++;
}
// Add new peaklist to the project
project.addPeakList(normalizedPeakList);
// Load previous applied methods
for (PeakListAppliedMethod proc : originalPeakList.getAppliedMethods()) {
normalizedPeakList.addDescriptionOfAppliedTask(proc);
}
// Add task description to peakList
normalizedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Standard compound normalization", parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(originalPeakList);
logger.info("Finished standard compound normalizer");
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class ADAP3DTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started ADAP3D on " + dataFile);
List<Scan> selectedScans = Arrays.asList(scanSelection.getMatchingScans(dataFile));
// Check if we have any scans
if (selectedScans.size() == 0) {
setStatus(TaskStatus.ERROR);
setErrorMessage("No scans match the selected criteria");
return;
}
// Check if the scans are properly ordered by RT
double prevRT = Double.NEGATIVE_INFINITY;
for (Scan s : selectedScans) {
if (s.getRetentionTime() < prevRT) {
setStatus(TaskStatus.ERROR);
final String msg = "Retention time of scan #" + s.getScanNumber() + " is smaller then the retention time of the previous scan." + " Please make sure you only use scans with increasing retention times." + " You can restrict the scan numbers in the parameters, or you can use the Crop filter module";
setErrorMessage(msg);
return;
}
prevRT = s.getRetentionTime();
}
// Run MSDK module
MZmineToMSDKRawDataFile msdkRawDataFile = new MZmineToMSDKRawDataFile(dataFile);
Predicate<MsScan> scanSelectionPredicate = scan -> selectedScans.contains(((MZmineToMSDKMsScan) scan).getMzmineScan());
msdkADAP3DMethod = new ADAP3DFeatureDetectionMethod(msdkRawDataFile, scanSelectionPredicate, new ADAP3DFeatureDetectionParameters());
List<Feature> features = null;
try {
if (isCanceled())
return;
features = msdkADAP3DMethod.execute();
if (isCanceled())
return;
} catch (Exception e) {
e.printStackTrace();
setStatus(TaskStatus.ERROR);
setErrorMessage("Error in ADAP3D: " + e.getMessage());
}
if (features == null)
features = new ArrayList<>(0);
logger.info("ADAP3D detected " + features.size() + " features in " + dataFile + ", converting to MZmine peaklist");
// Create new MZmine 2 feature list
SimplePeakList newPeakList = new SimplePeakList(dataFile + " " + suffix, dataFile);
int rowId = 1;
for (Feature msdkFeature : features) {
if (isCanceled())
return;
SimpleFeature mzmineFeature = new SimpleFeature(dataFile, FeatureStatus.DETECTED, msdkFeature);
PeakListRow row = new SimplePeakListRow(rowId);
row.addPeak(dataFile, mzmineFeature);
newPeakList.addRow(row);
rowId++;
}
// Add new peaklist to the project
project.addPeakList(newPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(newPeakList);
setStatus(TaskStatus.FINISHED);
logger.info("Finished ADAP3D feature detection on " + dataFile);
}
use of net.sf.mzmine.datamodel.impl.SimplePeakListRow in project mzmine2 by mzmine.
the class PeakListTablePopupMenu method actionPerformed.
@Override
public void actionPerformed(final ActionEvent e) {
final Object src = e.getSource();
if (deleteRowsItem.equals(src)) {
final int[] rowsToDelete = table.getSelectedRows();
final int[] unsortedIndexes = new int[rowsToDelete.length];
for (int i = rowsToDelete.length - 1; i >= 0; i--) {
unsortedIndexes[i] = table.convertRowIndexToModel(rowsToDelete[i]);
}
// sort row indexes and start removing from the last
Arrays.sort(unsortedIndexes);
// delete the rows starting from last
for (int i = unsortedIndexes.length - 1; i >= 0; i--) {
peakList.removeRow(unsortedIndexes[i]);
}
// Notify the GUI that peaklist contents have changed
updateTableGUI();
}
if (plotRowsItem.equals(src)) {
final int[] selectedTableRows = table.getSelectedRows();
final PeakListRow[] selectedRows = new PeakListRow[selectedTableRows.length];
for (int i = 0; i < selectedTableRows.length; i++) {
selectedRows[i] = getPeakListRow(table.convertRowIndexToModel(selectedTableRows[i]));
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
IntensityPlotModule.showIntensityPlot(MZmineCore.getProjectManager().getCurrentProject(), peakList, selectedRows);
}
});
}
if (showXICItem.equals(src) && allClickedPeakListRows.length != 0) {
// Map peaks to their identity labels.
final Map<Feature, String> labelsMap = new HashMap<Feature, String>(allClickedPeakListRows.length);
final RawDataFile selectedDataFile = clickedDataFile == null ? allClickedPeakListRows[0].getBestPeak().getDataFile() : clickedDataFile;
Range<Double> mzRange = null;
final List<Feature> selectedPeaks = new ArrayList<Feature>(allClickedPeakListRows.length);
for (final PeakListRow row : allClickedPeakListRows) {
for (final Feature peak : row.getPeaks()) {
if (mzRange == null) {
mzRange = peak.getRawDataPointsMZRange();
double upper = mzRange.upperEndpoint();
double lower = mzRange.lowerEndpoint();
if ((upper - lower) < 0.000001) {
// Workaround to make ultra narrow mzRanges (e.g. from imported mzTab peaklist),
// a more reasonable default for a HRAM instrument (~5ppm)
double fiveppm = (upper * 5E-6);
mzRange = Range.closed(lower - fiveppm, upper + fiveppm);
}
} else {
mzRange = mzRange.span(peak.getRawDataPointsMZRange());
}
}
final Feature filePeak = row.getPeak(selectedDataFile);
if (filePeak != null) {
selectedPeaks.add(filePeak);
// Label the peak with the row's preferred identity.
final PeakIdentity identity = row.getPreferredPeakIdentity();
if (identity != null) {
labelsMap.put(filePeak, identity.getName());
}
}
}
ScanSelection scanSelection = new ScanSelection(selectedDataFile.getDataRTRange(1), 1);
TICVisualizerModule.showNewTICVisualizerWindow(new RawDataFile[] { selectedDataFile }, selectedPeaks.toArray(new Feature[selectedPeaks.size()]), labelsMap, scanSelection, TICPlotType.BASEPEAK, mzRange);
}
if (showXICSetupItem.equals(src) && allClickedPeakListRows.length != 0) {
// Map peaks to their identity labels.
final Map<Feature, String> labelsMap = new HashMap<Feature, String>(allClickedPeakListRows.length);
final RawDataFile[] selectedDataFiles = clickedDataFile == null ? peakList.getRawDataFiles() : new RawDataFile[] { clickedDataFile };
Range<Double> mzRange = null;
final ArrayList<Feature> allClickedPeaks = new ArrayList<Feature>(allClickedPeakListRows.length);
final ArrayList<Feature> selectedClickedPeaks = new ArrayList<Feature>(allClickedPeakListRows.length);
for (final PeakListRow row : allClickedPeakListRows) {
// Label the peak with the row's preferred identity.
final PeakIdentity identity = row.getPreferredPeakIdentity();
for (final Feature peak : row.getPeaks()) {
allClickedPeaks.add(peak);
if (peak.getDataFile() == clickedDataFile) {
selectedClickedPeaks.add(peak);
}
if (mzRange == null) {
mzRange = peak.getRawDataPointsMZRange();
} else {
mzRange = mzRange.span(peak.getRawDataPointsMZRange());
}
if (identity != null) {
labelsMap.put(peak, identity.getName());
}
}
}
ScanSelection scanSelection = new ScanSelection(selectedDataFiles[0].getDataRTRange(1), 1);
TICVisualizerModule.setupNewTICVisualizer(MZmineCore.getProjectManager().getCurrentProject().getDataFiles(), selectedDataFiles, allClickedPeaks.toArray(new Feature[allClickedPeaks.size()]), selectedClickedPeaks.toArray(new Feature[selectedClickedPeaks.size()]), labelsMap, scanSelection, mzRange);
}
if (show2DItem.equals(src)) {
final Feature showPeak = getSelectedPeak();
if (showPeak != null) {
TwoDVisualizerModule.show2DVisualizerSetupDialog(showPeak.getDataFile(), getPeakMZRange(showPeak), getPeakRTRange(showPeak));
}
}
if (show3DItem.equals(src)) {
final Feature showPeak = getSelectedPeak();
if (showPeak != null) {
Fx3DVisualizerModule.setupNew3DVisualizer(showPeak.getDataFile(), getPeakMZRange(showPeak), getPeakRTRange(showPeak), showPeak);
}
}
if (manuallyDefineItem.equals(src)) {
// ManualPeakPickerModule.runManualDetection(clickedDataFile, clickedPeakListRow, peakList,
// table);
XICManualPickerModule.runManualDetection(clickedDataFile, clickedPeakListRow, peakList, table);
}
if (showSpectrumItem.equals(src)) {
final Feature showPeak = getSelectedPeak();
if (showPeak != null) {
SpectraVisualizerModule.showNewSpectrumWindow(showPeak.getDataFile(), showPeak.getRepresentativeScanNumber(), showPeak);
}
}
if (openCompoundIdUrl.equals(src)) {
if (clickedPeakListRow != null && clickedPeakListRow.getPreferredPeakIdentity() != null) {
String url = clickedPeakListRow.getPreferredPeakIdentity().getPropertyValue(PeakIdentity.PROPERTY_URL);
if (url != null && !url.isEmpty() && Desktop.isDesktopSupported()) {
try {
Desktop.getDesktop().browse(new URI(url));
} catch (IOException | URISyntaxException e1) {
}
}
}
}
if (showMSMSItem.equals(src)) {
if (allClickedPeakListRows != null && allClickedPeakListRows.length > 1) {
// show multi msms window of multiple rows
MultiMSMSWindow multi = new MultiMSMSWindow();
multi.setData(allClickedPeakListRows, peakList.getRawDataFiles(), clickedDataFile, true, SortingProperty.MZ, SortingDirection.Ascending);
multi.setVisible(true);
} else {
Feature showPeak = getSelectedPeakForMSMS();
if (showPeak != null) {
final int scanNumber = showPeak.getMostIntenseFragmentScanNumber();
if (scanNumber > 0) {
SpectraVisualizerModule.showNewSpectrumWindow(showPeak.getDataFile(), scanNumber);
} else {
MZmineCore.getDesktop().displayMessage(window, "There is no fragment for " + MZmineCore.getConfiguration().getMZFormat().format(showPeak.getMZ()) + " m/z in the current raw data.");
}
}
}
}
// mirror of the two best fragment scans
if (showMSMSMirrorItem.equals(src)) {
if (allClickedPeakListRows != null && allClickedPeakListRows.length == 2) {
PeakListRow a = allClickedPeakListRows[0];
PeakListRow b = allClickedPeakListRows[1];
Scan scan = a.getBestFragmentation();
Scan mirror = b.getBestFragmentation();
if (scan != null && mirror != null) {
// show mirror msms window of two rows
MirrorScanWindow mirrorWindow = new MirrorScanWindow();
mirrorWindow.setScans(scan, mirror);
mirrorWindow.setVisible(true);
}
}
}
// show spectral db matches
if (showSpectralDBResults.equals(src)) {
List<SpectralDBPeakIdentity> spectralID = Arrays.stream(clickedPeakListRow.getPeakIdentities()).filter(pi -> pi instanceof SpectralDBPeakIdentity).map(pi -> ((SpectralDBPeakIdentity) pi)).collect(Collectors.toList());
if (!spectralID.isEmpty()) {
SpectraIdentificationResultsWindow window = new SpectraIdentificationResultsWindow();
window.addMatches(spectralID);
window.setTitle("Matched " + spectralID.size() + " compounds for feature list row" + clickedPeakListRow.getID());
window.setVisible(true);
}
}
if (showAllMSMSItem.equals(src)) {
final Feature showPeak = getSelectedPeakForMSMS();
RawDataFile raw = clickedPeakListRow.getBestFragmentation().getDataFile();
if (showPeak != null && showPeak.getMostIntenseFragmentScanNumber() != 0)
raw = showPeak.getDataFile();
if (clickedPeakListRow.getBestFragmentation() != null) {
MultiSpectraVisualizerWindow multiSpectraWindow = new MultiSpectraVisualizerWindow(clickedPeakListRow, raw);
multiSpectraWindow.setVisible(true);
} else {
MZmineCore.getDesktop().displayMessage(window, "There is no fragment for " + MZmineCore.getConfiguration().getMZFormat().format(showPeak.getMZ()) + " m/z in the current raw data.");
}
}
if (showIsotopePatternItem.equals(src)) {
final Feature showPeak = getSelectedPeak();
if (showPeak != null && showPeak.getIsotopePattern() != null) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
SpectraVisualizerModule.showNewSpectrumWindow(showPeak.getDataFile(), showPeak.getRepresentativeScanNumber(), showPeak.getIsotopePattern());
}
});
}
}
if (formulaItem != null && formulaItem.equals(src)) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
FormulaPredictionModule.showSingleRowIdentificationDialog(clickedPeakListRow);
}
});
}
// peak.
if (siriusItem != null && siriusItem.equals(src)) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
SiriusProcessingModule.showSingleRowIdentificationDialog(clickedPeakListRow);
}
});
}
if (onlineDbSearchItem != null && onlineDbSearchItem.equals(src)) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
OnlineDBSearchModule.showSingleRowIdentificationDialog(clickedPeakListRow);
}
});
}
if (spectralDbSearchItem != null && spectralDbSearchItem.equals(src)) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
LocalSpectralDBSearchModule.showSelectedRowsIdentificationDialog(allClickedPeakListRows, table);
}
});
}
if (nistSearchItem != null && nistSearchItem.equals(src)) {
NistMsSearchModule.singleRowSearch(peakList, clickedPeakListRow);
}
if (addNewRowItem.equals(src)) {
// find maximum ID and add 1
int newID = 1;
for (final PeakListRow row : peakList.getRows()) {
if (row.getID() >= newID) {
newID = row.getID() + 1;
}
}
// create a new row
final PeakListRow newRow = new SimplePeakListRow(newID);
ManualPeakPickerModule.runManualDetection(peakList.getRawDataFiles(), newRow, peakList, table);
}
if (showPeakRowSummaryItem.equals(src)) {
PeakSummaryVisualizerModule.showNewPeakSummaryWindow(clickedPeakListRow);
}
if (exportIsotopesItem.equals(src)) {
IsotopePatternExportModule.exportIsotopePattern(clickedPeakListRow);
}
if (exportToSirius.equals(src)) {
// export all selected rows
SiriusExportModule.exportSingleRows(allClickedPeakListRows);
}
if (exportMSMSLibrary.equals(src)) {
// open window with all selected rows
MSMSLibrarySubmissionWindow libraryWindow = new MSMSLibrarySubmissionWindow();
libraryWindow.setData(allClickedPeakListRows, SortingProperty.MZ, SortingDirection.Ascending, true);
libraryWindow.setVisible(true);
}
if (exportMS1Library.equals(src)) {
// open window with all selected rows
MSMSLibrarySubmissionWindow libraryWindow = new MSMSLibrarySubmissionWindow();
libraryWindow.setData(allClickedPeakListRows, SortingProperty.MZ, SortingDirection.Ascending, false);
libraryWindow.setVisible(true);
}
if (exportMSMSItem.equals(src)) {
MSMSExportModule.exportMSMS(clickedPeakListRow);
}
if (clearIdsItem.equals(src)) {
// Delete identities of selected rows.
for (final PeakListRow row : allClickedPeakListRows) {
// Selected row index.
for (final PeakIdentity id : row.getPeakIdentities()) {
// Remove id.
row.removePeakIdentity(id);
}
}
// Update table GUI.
updateTableGUI();
}
if (copyIdsItem.equals(src) && allClickedPeakListRows.length > 0) {
final PeakIdentity id = allClickedPeakListRows[0].getPreferredPeakIdentity();
if (id != null) {
copiedId = (PeakIdentity) id.clone();
}
}
if (pasteIdsItem.equals(src) && copiedId != null) {
// Paste identity into selected rows.
for (final PeakListRow row : allClickedPeakListRows) {
row.setPreferredPeakIdentity((PeakIdentity) copiedId.clone());
}
// Update table GUI.
updateTableGUI();
}
}
Aggregations