use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class PeakComparisonRowFilterTask method copyPeakRow.
/**
* Create a copy of a feature list row.
*
* @param row the row to copy.
* @return the newly created copy.
*/
private static PeakListRow copyPeakRow(final PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
final Feature newPeak = new SimpleFeature(peak);
PeakUtils.copyPeakProperties(peak, newPeak);
newRow.addPeak(peak.getDataFile(), newPeak);
}
return newRow;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class PeakComparisonRowFilterTask method filterPeakListRows.
/**
* Filter the feature list rows by comparing peaks within a row.
*
* @param peakList feature list to filter.
* @return a new feature list with rows of the original feature list that pass the filtering.
*/
private PeakList filterPeakListRows(final PeakList peakList) {
// Create new feature list.
final PeakList newPeakList = new SimplePeakList(peakList.getName() + ' ' + parameters.getParameter(PeakComparisonRowFilterParameters.SUFFIX).getValue(), peakList.getRawDataFiles());
// Copy previous applied methods.
for (final PeakListAppliedMethod method : peakList.getAppliedMethods()) {
newPeakList.addDescriptionOfAppliedTask(method);
}
// Add task description to peakList.
newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod(getTaskDescription(), parameters));
// Get parameters.
final boolean evalutateFoldChange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getValue();
final boolean evalutatePPMdiff = parameters.getParameter(PeakComparisonRowFilterParameters.MZ_PPM_DIFF).getValue();
final boolean evalutateRTdiff = parameters.getParameter(PeakComparisonRowFilterParameters.RT_DIFF).getValue();
final int columnIndex1 = parameters.getParameter(PeakComparisonRowFilterParameters.COLUMN_INDEX_1).getValue();
final int columnIndex2 = parameters.getParameter(PeakComparisonRowFilterParameters.COLUMN_INDEX_2).getValue();
final Range<Double> foldChangeRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getEmbeddedParameter().getValue();
final Range<Double> ppmDiffRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getEmbeddedParameter().getValue();
final Range<Double> rtDiffRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE).getEmbeddedParameter().getValue();
// Setup variables
final PeakListRow[] rows = peakList.getRows();
RawDataFile rawDataFile1;
RawDataFile rawDataFile2;
Feature peak1;
Feature peak2;
totalRows = rows.length;
final RawDataFile[] rawDataFiles = peakList.getRawDataFiles();
boolean allCriteriaMatched = true;
// doesn't exist.
if (columnIndex1 > rawDataFiles.length) {
setErrorMessage("Column 1 set too large.");
setStatus(TaskStatus.ERROR);
return null;
}
if (columnIndex2 > rawDataFiles.length) {
setErrorMessage("Column 2 set too large.");
setStatus(TaskStatus.ERROR);
return null;
}
// Loop over the rows & filter
for (processedRows = 0; !isCanceled() && processedRows < totalRows; processedRows++) {
if (isCanceled())
return null;
allCriteriaMatched = true;
// Default value in case of null peak
double peak1Area = 1.0;
double peak2Area = 1.0;
double peak1MZ = -1.0;
double peak2MZ = -1.0;
double peak1RT = -1.0;
double peak2RT = -1.0;
double foldChange = 0.0;
double ppmDiff = 0.0;
double rtDiff = 0.0;
final PeakListRow row = rows[processedRows];
rawDataFile1 = rawDataFiles[columnIndex1];
rawDataFile2 = rawDataFiles[columnIndex2];
peak1 = row.getPeak(rawDataFile1);
peak2 = row.getPeak(rawDataFile2);
if (peak1 != null) {
peak1Area = peak1.getArea();
peak1MZ = peak1.getMZ();
peak1RT = peak1.getRT();
}
if (peak2 != null) {
peak2Area = peak2.getArea();
peak2MZ = peak2.getMZ();
peak2RT = peak2.getRT();
}
// Fold change criteria checking.
if (evalutateFoldChange) {
foldChange = Math.log(peak1Area / peak2Area) / Math.log(2);
if (!foldChangeRange.contains(foldChange))
allCriteriaMatched = false;
// PPM difference evaluation
if (evalutatePPMdiff) {
ppmDiff = (peak1MZ - peak2MZ) / peak1MZ * 1E6;
if (!ppmDiffRange.contains(ppmDiff))
allCriteriaMatched = false;
}
// RT difference evaluation
if (evalutateRTdiff) {
rtDiff = peak1RT - peak2RT;
if (!rtDiffRange.contains(rtDiff))
allCriteriaMatched = false;
}
}
// Good row?
if (allCriteriaMatched)
newPeakList.addRow(copyPeakRow(row));
}
return newPeakList;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class DuplicateFilterTask method filterDuplicatePeakListRows.
/**
* Filter our duplicate feature list rows.
*
* @param origPeakList the original feature list.
* @param suffix the suffix to apply to the new feature list name.
* @param mzTolerance m/z tolerance.
* @param rtTolerance RT tolerance.
* @param requireSameId must duplicate peaks have the same identities?
* @return the filtered feature list.
*/
private PeakList filterDuplicatePeakListRows(final PeakList origPeakList, final String suffix, final MZTolerance mzTolerance, final RTTolerance rtTolerance, final boolean requireSameId, FilterMode mode) {
final PeakListRow[] peakListRows = origPeakList.getRows();
final int rowCount = peakListRows.length;
RawDataFile[] rawFiles = origPeakList.getRawDataFiles();
// Create the new feature list.
final PeakList newPeakList = new SimplePeakList(origPeakList + " " + suffix, origPeakList.getRawDataFiles());
// sort rows
if (mode.equals(FilterMode.OLD_AVERAGE))
Arrays.sort(peakListRows, new PeakListRowSorter(SortingProperty.Area, SortingDirection.Descending));
else
Arrays.sort(peakListRows, new PeakListRowSorter(SortingProperty.ID, SortingDirection.Ascending));
// filter by average mz and rt
boolean filterByAvgRTMZ = !mode.equals(FilterMode.SINGLE_FEATURE);
// Loop through all feature list rows
processedRows = 0;
int n = 0;
totalRows = rowCount;
for (int firstRowIndex = 0; !isCanceled() && firstRowIndex < rowCount; firstRowIndex++) {
final PeakListRow mainRow = peakListRows[firstRowIndex];
if (mainRow != null) {
// copy first row
PeakListRow firstRow = copyRow(mainRow);
for (int secondRowIndex = firstRowIndex + 1; !isCanceled() && secondRowIndex < rowCount; secondRowIndex++) {
final PeakListRow secondRow = peakListRows[secondRowIndex];
if (secondRow != null) {
// Compare identifications
final boolean sameID = !requireSameId || PeakUtils.compareIdentities(firstRow, secondRow);
boolean sameMZRT = // average or single feature
filterByAvgRTMZ ? checkSameAverageRTMZ(firstRow, secondRow, mzTolerance, rtTolerance) : checkSameSingleFeatureRTMZ(rawFiles, firstRow, secondRow, mzTolerance, rtTolerance);
// Duplicate peaks?
if (sameID && sameMZRT) {
// create consensus row in new filter
if (!mode.equals(FilterMode.OLD_AVERAGE)) {
// copy all detected features of row2 into row1
// to exchange gap-filled against detected features
createConsensusFirstRow(rawFiles, firstRow, secondRow);
}
// second row deleted
n++;
peakListRows[secondRowIndex] = null;
}
}
}
// add to new list
newPeakList.addRow(firstRow);
}
processedRows++;
}
// finalize
if (!isCanceled()) {
// Load previous applied methods.
for (final PeakListAppliedMethod method : origPeakList.getAppliedMethods()) {
newPeakList.addDescriptionOfAppliedTask(method);
}
// Add task description to peakList
newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Duplicate feature list rows filter", parameters));
LOG.info("Removed " + n + " duplicate rows");
}
return newPeakList;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class DuplicateFilterTask method copyRow.
public PeakListRow copyRow(PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
newRow.addPeak(peak.getDataFile(), copyPeak(peak));
}
return newRow;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class CameraSearchTask method addPseudoSpectraIdentities.
/**
* Add pseudo-spectra identities.
*
* @param peaks peaks to annotate with identities.
* @param spectraExp the pseudo-spectra ids vector.
* @param isotopeExp the isotopes vector.
*/
private void addPseudoSpectraIdentities(final Feature[] peaks, final int[] spectra, final String[] isotopes, final String[] adducts) {
// Add identities for each peak.
int peakIndex = 0;
for (final Feature peak : peaks) {
// Create pseudo-spectrum identity
final SimplePeakIdentity identity = new SimplePeakIdentity("Pseudo-spectrum #" + String.format("%03d", spectra[peakIndex]));
identity.setPropertyValue(PeakIdentity.PROPERTY_METHOD, "Bioconductor CAMERA");
// Add isotope info, if any.
if (isotopes != null) {
final String isotope = isotopes[peakIndex].trim();
if (isotope.length() > 0) {
// Parse the isotope pattern.
final Matcher matcher = ISOTOPE_PATTERN.matcher(isotope);
if (matcher.matches()) {
// identity.setPropertyValue("Isotope", matcher.group(1));
identity.setPropertyValue("Isotope", isotope);
} else {
LOG.warning("Irregular isotope value: " + isotope);
}
}
}
if (adducts != null) {
final String adduct = adducts[peakIndex].trim();
if (adduct.length() > 0)
identity.setPropertyValue("Adduct", adduct);
}
// Add identity to peak's row.
PeakListRow row = peakList.getPeakRow(peak);
for (PeakIdentity peakIdentity : row.getPeakIdentities()) row.removePeakIdentity(peakIdentity);
peakList.getPeakRow(peak).addPeakIdentity(identity, true);
peakIndex++;
}
}
Aggregations