use of net.sf.mzmine.datamodel.RawDataFileWriter in project mzmine2 by mzmine.
the class PearsonCorrelation method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started Scan Alignment on " + dataFile);
scanNumbers = dataFile.getScanNumbers(1);
totalScans = scanNumbers.length;
RawDataFileWriter newRDFW = null;
try {
newRDFW = MZmineCore.createNewFile(dataFile.getName() + ' ' + suffix);
// [relative scan][j value]
DataPoint[][] mzValues = null;
int i, j, si, sj, ii, k, shift, ks;
int[] shiftedScans = new int[mzSpan * 2 + 1];
for (i = 0; i < totalScans; i++) {
if (isCanceled())
return;
Scan scan = dataFile.getScan(scanNumbers[i]);
si = (int) Math.max(0, i - scanSpan);
sj = (int) (si + 2 * scanSpan);
if (sj >= totalScans) {
si = (int) Math.max(0, si - (sj - totalScans + 1));
sj = (int) (si + 2 * scanSpan);
}
if (scan != null) {
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1)
mzValues = new DataPoint[sj - si + 1][];
// Load Data Points
for (j = si; j <= sj; j++) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[j - si] = xscan.getDataPoints();
}
// Estimate Correlations
ii = i - si;
final SimpleScan newScan = new SimpleScan(scan);
DataPoint[] newDP = new DataPoint[mzValues[ii].length];
int maxShift = 0;
double maxCorrelation = 0;
int ndp = mzValues[ii].length;
// System.out.print("Scan="+i);
for (shift = -mzSpan; shift <= mzSpan; shift++) {
PearsonCorrelation thisShift = new PearsonCorrelation();
for (k = 0; k < ndp; k++) {
ks = k + shift;
if (ks >= 0 && ks < ndp && mzValues[ii][ks].getIntensity() >= minimumHeight) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
int f = 0;
for (j = 0; j < mzValues.length; j++) {
// System.out.println(j);
if (j != ii) {
if (mzValues[j].length > k && Math.abs(mzValues[j][k].getMZ() - mz) < 1e-10) {
f = k;
} else {
f = findFirstMass(mz, mzValues[j]);
if (Math.abs(mzValues[j][f].getMZ() - mz) > 1e-10) {
f = -f;
}
}
if (f >= 0) {
if (logScale) {
thisShift.enter(Math.log(mzValues[j][f].getIntensity()), Math.log(mzValues[ii][ks].getIntensity()));
} else {
thisShift.enter(mzValues[j][f].getIntensity(), mzValues[ii][ks].getIntensity());
}
}
}
}
}
}
// correlation="+Math.round(thisShift.correlation()*1000)/1000.0);
if (thisShift.correlation() > maxCorrelation) {
maxShift = shift;
maxCorrelation = thisShift.correlation();
}
// newDP[k] = new SimpleDataPoint(mz, c > 0 ? a/c : 0);
}
// Copy DataPoints with maxShift as the shift
shift = maxShift;
// System.out.println("\nScan="+i+", Shift="+maxShift+", Correlation="+maxCorrelation);
shiftedScans[maxShift + mzSpan]++;
for (k = 0; k < ndp; k++) {
ks = k + shift;
if (ks >= 0 && ks < ndp) {
newDP[k] = new SimpleDataPoint(mzValues[ii][k].getMZ(), mzValues[ii][ks].getIntensity());
} else {
newDP[k] = new SimpleDataPoint(mzValues[ii][k].getMZ(), 0);
}
}
newScan.setDataPoints(newDP);
newRDFW.addScan(newScan);
}
processedScans++;
}
if (!isCanceled()) {
// Finalize writing
newRDF = newRDFW.finishWriting();
// Add the newly created file to the project
project.addFile(newRDF);
// Remove the original data file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
String shifts = "";
for (i = -mzSpan; i <= mzSpan; i++) {
shifts = shifts + i + ":" + shiftedScans[i + mzSpan] + " | ";
}
logger.info("Finished Scan Alignment on " + dataFile + ". Scans per shift = " + shifts);
}
} catch (IOException e) {
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.RawDataFileWriter in project mzmine2 by mzmine.
the class ScanSmoothingTask method run.
/**
* @see Runnable#run()
*/
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started Scan Smoothing on " + dataFile);
scanNumbers = dataFile.getScanNumbers(1);
totalScans = scanNumbers.length;
RawDataFileWriter newRDFW = null;
int timepassed = 0;
int mzpassed = 0;
try {
newRDFW = MZmineCore.createNewFile(dataFile.getName() + ' ' + suffix);
// [relative scan][j value]
DataPoint[][] mzValues = null;
int i, j, si, sj, ii, k, ssi, ssj;
for (i = 0; i < totalScans; i++) {
if (isCanceled())
return;
// Smoothing in TIME space
Scan scan = dataFile.getScan(scanNumbers[i]);
if (scan != null) {
double rt = scan.getRetentionTime();
final SimpleScan newScan = new SimpleScan(scan);
DataPoint[] newDP = null;
sj = si = i;
ssi = ssj = i;
if (timeSpan > 0 || scanSpan > 0) {
double timeMZtol = Math.max(mzTol, 1e-5);
for (si = i; si > 1; si--) {
Scan scanS = dataFile.getScan(scanNumbers[si - 1]);
if (scanS == null || scanS.getRetentionTime() < rt - timeSpan / 2) {
break;
}
}
for (sj = i; sj < totalScans - 1; sj++) {
Scan scanS = dataFile.getScan(scanNumbers[sj + 1]);
if (scanS == null || scanS.getRetentionTime() >= rt + timeSpan / 2) {
break;
}
}
ssi = i - (scanSpan - 1) / 2;
ssj = i + (scanSpan - 1) / 2;
if (ssi < 0) {
ssj += -ssi;
ssi = 0;
}
if (ssj >= totalScans) {
ssi -= (ssj - totalScans + 1);
ssj = totalScans - 1;
}
if (sj - si + 1 < scanSpan) {
si = ssi;
sj = ssj;
// si = Math.min(si, ssi);
// sj = Math.max(sj, ssj);
}
if (sj > si) {
timepassed++;
// Allocate
if (mzValues == null || mzValues.length < sj - si + 1)
mzValues = new DataPoint[sj - si + 1][];
// Load Data Points
for (j = si; j <= sj; j++) {
Scan xscan = dataFile.getScan(scanNumbers[j]);
mzValues[j - si] = xscan.getDataPoints();
}
// Estimate Averages
ii = i - si;
newDP = new DataPoint[mzValues[ii].length];
for (k = 0; k < mzValues[ii].length; k++) {
DataPoint dp = mzValues[ii][k];
double mz = dp.getMZ();
double intensidad = 0;
if (dp.getIntensity() > 0) {
// only process
// those > 0
double a = 0;
short c = 0;
int f = 0;
for (j = 0; j < mzValues.length; j++) {
// System.out.println(j);
if (mzValues[j].length > k && Math.abs(mzValues[j][k].getMZ() - mz) < timeMZtol) {
f = k;
} else {
f = findFirstMass(mz, mzValues[j]);
if (Math.abs(mzValues[j][f].getMZ() - mz) > timeMZtol) {
f = -f;
}
}
if (f >= 0 && mzValues[j][f].getIntensity() >= minimumHeight) {
a += mzValues[j][f].getIntensity();
c++;
} else {
c = (short) (c + 0);
}
}
intensidad = c > 0 ? a / c : 0;
}
newDP[k] = new SimpleDataPoint(mz, intensidad);
}
}
} else if (scan != null) {
newDP = scan.getDataPoints();
}
if ((mzTol > 0 || mzPoints > 0)) {
mzpassed++;
DataPoint[] updatedDP = new DataPoint[newDP.length];
for (k = 0; k < newDP.length; k++) {
double mz = newDP[k].getMZ();
double intensidad = 0;
if (newDP[k].getIntensity() > 0) {
for (si = k; si > 0 && (newDP[si].getMZ() + mzTol >= mz || k - si <= mzPoints); si--) ;
for (sj = k; sj < newDP.length - 1 && (newDP[sj].getMZ() - mzTol <= mz || sj - k <= mzPoints); sj++) ;
double sum = 0;
for (j = si; j <= sj; j++) {
sum += newDP[j].getIntensity();
}
intensidad = sum / (sj - si + 1);
}
updatedDP[k] = new SimpleDataPoint(mz, intensidad);
}
newDP = updatedDP;
}
// Register new smoothing data
if (scan != null && newDP != null) {
newScan.setDataPoints(newDP);
newRDFW.addScan(newScan);
}
}
processedScans++;
}
if (!isCanceled()) {
// Finalize writing
newRDF = newRDFW.finishWriting();
// Add the newly created file to the project
project.addFile(newRDF);
// Remove the original data file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
if (mzpassed + timepassed < totalScans / 2) {
logger.warning("It seems that parameters were not properly set. Scans processed : time=" + timepassed + ", mz=" + mzpassed);
}
logger.info("Finished Scan Smoothing on " + dataFile);
}
} catch (IOException e) {
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.RawDataFileWriter in project mzmine2 by mzmine.
the class CropFilterTask method run.
/**
* @see Runnable#run()
*/
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started crop filter on " + dataFile);
scans = scanSelection.getMatchingScans(dataFile);
totalScans = scans.length;
// Check if we have any scans
if (totalScans == 0) {
setStatus(TaskStatus.ERROR);
setErrorMessage("No scans match the selected criteria");
return;
}
try {
RawDataFileWriter rawDataFileWriter = MZmineCore.createNewFile(dataFile.getName() + " " + suffix);
for (Scan scan : scans) {
SimpleScan scanCopy = new SimpleScan(scan);
// Check if we have something to crop
if (!mzRange.encloses(scan.getDataPointMZRange())) {
DataPoint[] croppedDataPoints = scan.getDataPointsByMass(mzRange);
scanCopy.setDataPoints(croppedDataPoints);
}
rawDataFileWriter.addScan(scanCopy);
processedScans++;
}
RawDataFile filteredRawDataFile = rawDataFileWriter.finishWriting();
project.addFile(filteredRawDataFile);
// Remove the original file if requested
if (removeOriginal) {
project.removeFile(dataFile);
}
setStatus(TaskStatus.FINISHED);
} catch (Exception e) {
setStatus(TaskStatus.ERROR);
setErrorMessage(e.toString());
e.printStackTrace();
}
}
use of net.sf.mzmine.datamodel.RawDataFileWriter in project mzmine2 by mzmine.
the class ZipReadTask method run.
/**
* @see java.lang.Runnable#run()
*/
public void run() {
// Update task status
setStatus(TaskStatus.PROCESSING);
logger.info("Started opening compressed file " + file);
try {
// Name of the uncompressed file
String newName = file.getName();
if (newName.toLowerCase().endsWith(".zip") || newName.toLowerCase().endsWith(".gz")) {
newName = FilenameUtils.removeExtension(newName);
}
// Create decompressing stream
FileInputStream fis = new FileInputStream(file);
InputStream is;
long decompressedSize = 0;
switch(fileType) {
case ZIP:
ZipInputStream zis = new ZipInputStream(fis);
ZipEntry entry = zis.getNextEntry();
newName = entry.getName();
decompressedSize = entry.getSize();
if (decompressedSize < 0)
decompressedSize = 0;
is = zis;
break;
case GZIP:
is = new GZIPInputStream(fis);
// Ballpark a
decompressedSize = (long) (file.length() * 1.5);
// progress
if (decompressedSize < 0)
decompressedSize = 0;
break;
default:
setErrorMessage("Cannot decompress file type: " + fileType);
setStatus(TaskStatus.ERROR);
return;
}
tmpDir = Files.createTempDir();
tmpFile = new File(tmpDir, newName);
logger.finest("Decompressing to file " + tmpFile);
tmpFile.deleteOnExit();
tmpDir.deleteOnExit();
FileOutputStream ous = new FileOutputStream(tmpFile);
// Decompress the contents
copy = new StreamCopy();
copy.copy(is, ous, decompressedSize);
// Close the streams
is.close();
ous.close();
if (isCanceled())
return;
// Find the type of the decompressed file
RawDataFileType fileType = RawDataFileTypeDetector.detectDataFileType(tmpFile);
logger.finest("File " + tmpFile + " type detected as " + fileType);
if (fileType == null) {
setErrorMessage("Could not determine the file type of file " + newName);
setStatus(TaskStatus.ERROR);
return;
}
// Run the import module on the decompressed file
RawDataFileWriter newMZmineFile = MZmineCore.createNewFile(newName);
decompressedOpeningTask = RawDataImportModule.createOpeningTask(fileType, project, tmpFile, newMZmineFile);
if (decompressedOpeningTask == null) {
setErrorMessage("File type " + fileType + " of file " + newName + " is not supported.");
setStatus(TaskStatus.ERROR);
return;
}
// Run the underlying task
decompressedOpeningTask.run();
// Delete the temporary folder
tmpFile.delete();
tmpDir.delete();
if (isCanceled())
return;
} catch (Throwable e) {
e.printStackTrace();
logger.log(Level.SEVERE, "Could not open file " + file.getPath(), e);
setErrorMessage(ExceptionUtils.exceptionToString(e));
setStatus(TaskStatus.ERROR);
return;
}
logger.info("Finished opening compressed file " + file);
// Update task status
setStatus(TaskStatus.FINISHED);
}
use of net.sf.mzmine.datamodel.RawDataFileWriter in project mzmine2 by mzmine.
the class MzTabImportTask method importRawDataFiles.
private SortedMap<Integer, RawDataFile> importRawDataFiles(MZTabFile mzTabFile) throws Exception {
SortedMap<Integer, MsRun> msrun = mzTabFile.getMetadata().getMsRunMap();
SortedMap<Integer, RawDataFile> rawDataFiles = new TreeMap<>();
// If we are importing files, let's run RawDataImportModule
if (importRawFiles) {
List<File> filesToImport = new ArrayList<>();
for (Entry<Integer, MsRun> entry : msrun.entrySet()) {
File fileToImport = new File(entry.getValue().getLocation().getPath());
if (fileToImport.exists() && fileToImport.canRead())
filesToImport.add(fileToImport);
else {
// Check if the raw file exists in the same folder as the mzTab file
File checkFile = new File(inputFile.getParentFile(), fileToImport.getName());
if (checkFile.exists() && checkFile.canRead())
filesToImport.add(checkFile);
else {
// Append .gz & check again if file exists as a workaround to .gz not getting preserved
// when .mzML.gz importing
checkFile = new File(inputFile.getParentFile(), fileToImport.getName() + ".gz");
if (checkFile.exists() && checkFile.canRead())
filesToImport.add(checkFile);
else {
// One more level of checking, appending .zip & checking as a workaround
checkFile = new File(inputFile.getParentFile(), fileToImport.getName() + ".zip");
if (checkFile.exists() && checkFile.canRead())
filesToImport.add(checkFile);
}
}
}
}
RawDataImportModule RDI = MZmineCore.getModuleInstance(RawDataImportModule.class);
ParameterSet rdiParameters = RDI.getParameterSetClass().newInstance();
rdiParameters.getParameter(RawDataImportParameters.fileNames).setValue(filesToImport.toArray(new File[0]));
synchronized (underlyingTasks) {
RDI.runModule(project, rdiParameters, underlyingTasks);
}
if (underlyingTasks.size() > 0) {
MZmineCore.getTaskController().addTasks(underlyingTasks.toArray(new Task[0]));
}
// Wait until all raw data file imports have completed
while (true) {
if (isCanceled())
return null;
boolean tasksFinished = true;
for (Task task : underlyingTasks) {
if ((task.getStatus() == TaskStatus.WAITING) || (task.getStatus() == TaskStatus.PROCESSING))
tasksFinished = false;
}
if (tasksFinished)
break;
Thread.sleep(1000);
}
/*
* // Sort raw data files based on order in mzTab file MainWindow mainWindow = (MainWindow)
* MZmineCore.getDesktop(); ProjectTree rawDataTree = mainWindow.getMainPanel()
* .getRawDataTree(); final RawDataTreeModel treeModel = ((MZmineProjectImpl)
* project).getRawDataTreeModel(); final DefaultMutableTreeNode rootNode =
* treeModel.getRoot(); int[] selectedRows = new int[rootNode.getChildCount()]; for (int i =
* 1; i < rootNode.getChildCount() + 1; i++) { selectedRows[i - 1] = i; } final
* ArrayList<DefaultMutableTreeNode> selectedNodes = new ArrayList<DefaultMutableTreeNode>();
* for (int row : selectedRows) { TreePath path = rawDataTree.getPathForRow(row);
* DefaultMutableTreeNode selectedNode = (DefaultMutableTreeNode) path
* .getLastPathComponent(); selectedNodes.add(selectedNode); }
*
* // Reorder the nodes in the tree model based on order in mzTab // file int fileCounter = 0;
* for (Entry<Integer, MsRun> entry : msrun.entrySet()) { fileCounter++; File f = new
* File(entry.getValue().getLocation().getPath()); for (DefaultMutableTreeNode node :
* selectedNodes) { if (node.toString().equals(f.getName())) {
* treeModel.removeNodeFromParent(node); treeModel.insertNodeInto(node, rootNode, fileCounter
* - 1); } } }
*/
} else {
finishedPercentage = 0.5;
}
// Find a matching RawDataFile for each MsRun entry
for (Entry<Integer, MsRun> entry : msrun.entrySet()) {
String rawFileName = new File(entry.getValue().getLocation().getPath()).getName();
RawDataFile rawDataFile = null;
// Check if we already have a RawDataFile of that name
for (RawDataFile f : project.getDataFiles()) {
if (f.getName().equals(rawFileName)) {
rawDataFile = f;
break;
}
}
// If no data file of that name exists, create a dummy one
if (rawDataFile == null) {
RawDataFileWriter writer = MZmineCore.createNewFile(rawFileName);
rawDataFile = writer.finishWriting();
project.addFile(rawDataFile);
}
// Save a reference to the new raw data file
rawDataFiles.put(entry.getKey(), rawDataFile);
}
return rawDataFiles;
}
Aggregations