use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class RowsFilterTask method copyPeakRow.
/**
* Create a copy of a feature list row.
*
* @param row the row to copy.
* @return the newly created copy.
*/
private static PeakListRow copyPeakRow(final PeakListRow row) {
// Copy the feature list row.
final PeakListRow newRow = new SimplePeakListRow(row.getID());
PeakUtils.copyPeakListRowProperties(row, newRow);
// Copy the peaks.
for (final Feature peak : row.getPeaks()) {
final Feature newPeak = new SimpleFeature(peak);
PeakUtils.copyPeakProperties(peak, newPeak);
newRow.addPeak(peak.getDataFile(), newPeak);
}
// Add PeakInformation
if (row.getPeakInformation() != null) {
SimplePeakInformation information = new SimplePeakInformation(new HashMap<>(row.getPeakInformation().getAllProperties()));
newRow.setPeakInformation(information);
}
return newRow;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class PeakFinderTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running gap filler on " + peakList);
// Calculate total number of scans in all files
for (RawDataFile dataFile : peakList.getRawDataFiles()) {
totalScans += dataFile.getNumOfScans(1);
}
processedScans = new AtomicInteger();
// Create new feature list
processedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Fill new feature list with empty rows
for (int row = 0; row < peakList.getNumberOfRows(); row++) {
PeakListRow sourceRow = peakList.getRow(row);
PeakListRow newRow = new SimplePeakListRow(sourceRow.getID());
newRow.setComment(sourceRow.getComment());
for (PeakIdentity ident : sourceRow.getPeakIdentities()) {
newRow.addPeakIdentity(ident, false);
}
if (sourceRow.getPreferredPeakIdentity() != null) {
newRow.setPreferredPeakIdentity(sourceRow.getPreferredPeakIdentity());
}
processedPeakList.addRow(newRow);
}
if (rtCorrection) {
totalScans *= 2;
// Fill the gaps of a random sample using all the other samples and
// take it as master list
// to fill the gaps of the other samples
masterSample = (int) Math.floor(Math.random() * peakList.getNumberOfRawDataFiles());
fillList(MASTERLIST);
// Process all raw data files
fillList(!MASTERLIST);
} else {
// Process all raw data files
IntStream rawStream = IntStream.range(0, peakList.getNumberOfRawDataFiles());
if (useParallelStream)
rawStream = rawStream.parallel();
rawStream.forEach(i -> {
// Canceled?
if (isCanceled()) {
// inside stream - only skips this element
return;
}
RawDataFile dataFile = peakList.getRawDataFile(i);
List<Gap> gaps = new ArrayList<Gap>();
// if necessary
for (int row = 0; row < peakList.getNumberOfRows(); row++) {
// Canceled?
if (isCanceled()) {
// inside stream - only skips this element
return;
}
PeakListRow sourceRow = peakList.getRow(row);
PeakListRow newRow = processedPeakList.getRow(row);
Feature sourcePeak = sourceRow.getPeak(dataFile);
if (sourcePeak == null) {
// Create a new gap
Range<Double> mzRange = mzTolerance.getToleranceRange(sourceRow.getAverageMZ());
Range<Double> rtRange = rtTolerance.getToleranceRange(sourceRow.getAverageRT());
Gap newGap = new Gap(newRow, dataFile, mzRange, rtRange, intTolerance);
gaps.add(newGap);
} else {
newRow.addPeak(dataFile, sourcePeak);
}
}
// Stop processing this file if there are no gaps
if (gaps.size() == 0) {
processedScans.addAndGet(dataFile.getNumOfScans());
return;
}
// Get all scans of this data file
int[] scanNumbers = dataFile.getScanNumbers(1);
// Process each scan
for (int scanNumber : scanNumbers) {
// Canceled?
if (isCanceled()) {
// inside stream - only skips this element
return;
}
// Get the scan
Scan scan = dataFile.getScan(scanNumber);
// Feed this scan to all gaps
for (Gap gap : gaps) {
gap.offerNextScan(scan);
}
processedScans.incrementAndGet();
}
// Finalize gaps
for (Gap gap : gaps) {
gap.noMoreOffers();
}
});
}
// terminate - stream only skips all elements
if (isCanceled())
return;
// Append processed feature list to the project
project.addPeakList(processedPeakList);
// Add quality parameters to peaks
QualityParameters.calculateQualityParameters(processedPeakList);
// Add task description to peakList
processedPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod("Gap filling ", parameters));
// Remove the original peaklist if requested
if (removeOriginal)
project.removePeakList(peakList);
logger.info("Finished gap-filling on " + peakList);
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class MultiThreadPeakFinderMainTask method createResultsPeakList.
private PeakList createResultsPeakList() {
SimplePeakList processedPeakList = new SimplePeakList(peakList + " " + suffix, peakList.getRawDataFiles());
// Fill new feature list with empty rows
for (int row = 0; row < peakList.getNumberOfRows(); row++) {
PeakListRow sourceRow = peakList.getRow(row);
PeakListRow newRow = new SimplePeakListRow(sourceRow.getID());
newRow.setComment(sourceRow.getComment());
for (PeakIdentity ident : sourceRow.getPeakIdentities()) {
newRow.addPeakIdentity(ident, false);
}
if (sourceRow.getPreferredPeakIdentity() != null) {
newRow.setPreferredPeakIdentity(sourceRow.getPreferredPeakIdentity());
}
processedPeakList.addRow(newRow);
}
return processedPeakList;
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class MultiThreadPeakFinderTask method run.
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Running multithreaded gap filler " + taskIndex + " on raw files " + (start + 1) + "-" + endexcl + " of pkl:" + peakList);
// Calculate total number of scans in all files
for (int i = start; i < endexcl; i++) {
RawDataFile dataFile = peakList.getRawDataFile(i);
totalScans += dataFile.getNumOfScans(1);
}
// Process all raw data files
for (int i = start; i < endexcl; i++) {
RawDataFile dataFile = peakList.getRawDataFile(i);
// Canceled?
if (isCanceled()) {
return;
}
List<Gap> gaps = new ArrayList<Gap>();
// if necessary
for (int row = 0; row < peakList.getNumberOfRows(); row++) {
PeakListRow sourceRow = peakList.getRow(row);
PeakListRow newRow = processedPeakList.getRow(row);
Feature sourcePeak = sourceRow.getPeak(dataFile);
if (sourcePeak == null) {
// Create a new gap
Range<Double> mzRange = mzTolerance.getToleranceRange(sourceRow.getAverageMZ());
Range<Double> rtRange = rtTolerance.getToleranceRange(sourceRow.getAverageRT());
Gap newGap = new Gap(newRow, dataFile, mzRange, rtRange, intTolerance);
gaps.add(newGap);
} else {
newRow.addPeak(dataFile, sourcePeak);
}
}
// Stop processing this file if there are no gaps
if (gaps.isEmpty()) {
processedScans += dataFile.getNumOfScans();
continue;
}
// Get all scans of this data file
int[] scanNumbers = dataFile.getScanNumbers(1);
// Process each scan
for (int scanNumber : scanNumbers) {
// Canceled?
if (isCanceled()) {
return;
}
// Get the scan
Scan scan = dataFile.getScan(scanNumber);
// Feed this scan to all gaps
for (Gap gap : gaps) {
gap.offerNextScan(scan);
}
processedScans++;
}
// Finalize gaps
for (Gap gap : gaps) {
gap.noMoreOffers();
}
}
// first notify listener
listener.accept(processedPeakList);
logger.info("Finished sub task: Multithreaded gap filler " + taskIndex + " on raw files " + (start + 1) + "-" + endexcl + " of pkl:" + peakList);
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.PeakListRow in project mzmine2 by mzmine.
the class PeakListBlankSubtractionMasterTask method run.
@Override
public void run() {
if (!checkBlankSelection(alignedFeatureList, blankRaws)) {
setErrorMessage("Peak list " + alignedFeatureList.getName() + " does no contain all selected blank raw data files.");
setStatus(TaskStatus.ERROR);
return;
}
setStatus(TaskStatus.PROCESSING);
// PeakListRow[] rowsInBlanks =
// getFeatureRowsContainedBlanks(alignedFeatureList, blankRaws, minBlankDetections);
PeakListRow[] rows = PeakUtils.copyPeakRows(alignedFeatureList.getRows());
rows = PeakUtils.sortRowsMzAsc(rows);
for (RawDataFile raw : alignedFeatureList.getRawDataFiles()) {
// only create a task for every file that is not a blank
if (Arrays.asList(blankRaws).contains(raw))
continue;
// these tasks will access the passed array and remove the features that appear in their raw
// data file and the blanks from these rows
AbstractTask task = new PeakListBlankSubtractionSingleTask(parameters, raw, rows);
MZmineCore.getTaskController().addTask(task);
subTasks.add(task);
if (getStatus() == TaskStatus.CANCELED)
return;
}
// wait for tasks to finish
boolean allTasksFinished = false;
while (!allTasksFinished) {
allTasksFinished = true;
for (AbstractTask task : subTasks) {
if (task.getStatus() != TaskStatus.FINISHED)
allTasksFinished = false;
}
try {
TimeUnit.MILLISECONDS.sleep(5);
} catch (InterruptedException e) {
e.printStackTrace();
setErrorMessage(e.getMessage());
setStatus(TaskStatus.ERROR);
return;
}
if (getStatus() == TaskStatus.CANCELED)
return;
}
// remove rows that only contain blankRaws
List<RawDataFile> blankRawsList = Arrays.asList(blankRaws);
int onlyBlankRows = 0;
for (int i = 0; i < rows.length; i++) {
PeakListRow row = rows[i];
if (blankRawsList.containsAll(Arrays.asList(row.getRawDataFiles()))) {
onlyBlankRows++;
rows[i] = null;
}
if (getStatus() == TaskStatus.CANCELED)
return;
}
logger.finest("Removed " + onlyBlankRows + " rows that only existed in blankfiles.");
PeakList result = new SimplePeakList(alignedFeatureList.getName() + " sbtrctd", alignedFeatureList.getRawDataFiles());
for (PeakListRow row : rows) {
if (row != null) {
result.addRow(row);
}
}
PeakListUtils.copyPeakListAppliedMethods(alignedFeatureList, result);
result.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod(PeakListBlankSubtractionModule.MODULE_NAME, parameters));
project.addPeakList(result);
setStatus(TaskStatus.FINISHED);
}
Aggregations