use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class GnpsMgfParser method parse.
@Override
public boolean parse(AbstractTask mainTask, File dataBaseFile) throws IOException {
logger.info("Parsing mgf spectral library " + dataBaseFile.getAbsolutePath());
// BEGIN IONS
// meta data
// SCANS=1 .... n (the scan ID; could be used to put all spectra of the same entry together)
// data
// END IONS
int correct = 0;
State state = State.WAIT_FOR_META;
Map<DBEntryField, Object> fields = new EnumMap<>(DBEntryField.class);
List<DataPoint> dps = new ArrayList<>();
int sep = -1;
// create db
try (BufferedReader br = new BufferedReader(new FileReader(dataBaseFile))) {
for (String l; (l = br.readLine()) != null; ) {
// main task was canceled?
if (mainTask != null && mainTask.isCanceled()) {
return false;
}
try {
if (l.length() > 1) {
// meta data start?
if (state.equals(State.WAIT_FOR_META)) {
if (l.equalsIgnoreCase("BEGIN IONS")) {
fields = new EnumMap<>(fields);
dps.clear();
state = State.META;
}
} else {
if (l.equalsIgnoreCase("END IONS")) {
// add entry and reset
if (fields.size() > 1 && dps.size() > 1) {
SpectralDBEntry entry = new SpectralDBEntry(fields, dps.toArray(new DataPoint[dps.size()]));
// add and push
addLibraryEntry(entry);
correct++;
}
state = State.WAIT_FOR_META;
} else if (l.toLowerCase().startsWith("scans")) {
// belongs to the previously created entry and is another spectrum
// data starts
state = State.DATA;
} else {
switch(state) {
case WAIT_FOR_META:
// wait for next entry
break;
case DATA:
String[] data = l.split("\t");
dps.add(new SimpleDataPoint(Double.parseDouble(data[0]), Double.parseDouble(data[1])));
break;
case META:
sep = l.indexOf('=');
if (sep != -1 && sep < l.length() - 1) {
DBEntryField field = DBEntryField.forMgfID(l.substring(0, sep));
if (field != null) {
String content = l.substring(sep + 1, l.length());
if (!content.isEmpty()) {
try {
Object value = field.convertValue(content);
// name
if (field.equals(DBEntryField.NAME)) {
String name = ((String) value);
int lastSpace = name.lastIndexOf(' ');
if (lastSpace != -1 && lastSpace < name.length() - 2) {
String adductCandidate = name.substring(lastSpace + 1);
// check for valid adduct with the adduct parser from export
// use as adduct
String adduct = AdductParser.parse(adductCandidate);
if (adduct != null && !adduct.isEmpty())
fields.put(DBEntryField.ION_TYPE, adduct);
}
}
fields.put(field, value);
} catch (Exception e) {
logger.log(Level.WARNING, "Cannot convert value type of " + content + " to " + field.getObjectClass().toString(), e);
}
}
}
}
break;
}
}
}
}
} catch (Exception ex) {
logger.log(Level.WARNING, "Error for entry", ex);
state = State.WAIT_FOR_META;
}
}
// finish and process all entries
finish();
return true;
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class GnpsJsonParser method getDataPoints.
/**
* Data points or null
*
* @param main
* @return
*/
public DataPoint[] getDataPoints(JsonObject main) {
JsonArray data = main.getJsonArray("peaks");
if (data == null)
return null;
DataPoint[] dps = new DataPoint[data.size()];
try {
for (int i = 0; i < data.size(); i++) {
double mz = data.getJsonArray(i).getJsonNumber(0).doubleValue();
double intensity = data.getJsonArray(i).getJsonNumber(1).doubleValue();
dps[i] = new SimpleDataPoint(mz, intensity);
}
return dps;
} catch (Exception e) {
logger.log(Level.SEVERE, "Cannot convert DP values to doubles", e);
return null;
}
}
use of net.sf.mzmine.datamodel.impl.SimpleDataPoint in project mzmine2 by mzmine.
the class JdxParser method parse.
@Override
public boolean parse(AbstractTask mainTask, File dataBaseFile) throws IOException {
logger.info("Parsing jdx spectral library " + dataBaseFile.getAbsolutePath());
boolean isData = false;
Map<DBEntryField, Object> fields = new EnumMap<>(DBEntryField.class);
List<DataPoint> dps = new ArrayList<>();
// create db
int sep = -1;
try (BufferedReader br = new BufferedReader(new FileReader(dataBaseFile))) {
for (String l; (l = br.readLine()) != null; ) {
// main task was canceled?
if (mainTask.isCanceled()) {
return false;
}
try {
// meta data?
sep = isData ? -1 : l.indexOf("=");
if (sep != -1) {
DBEntryField field = DBEntryField.forJdxID(l.substring(0, sep));
if (field != null) {
String content = l.substring(sep + 1, l.length());
if (content.length() > 0) {
try {
Object value = field.convertValue(content);
fields.put(field, value);
} catch (Exception e) {
logger.log(Level.WARNING, "Cannot convert value type of " + content + " to " + field.getObjectClass().toString(), e);
}
}
}
} else {
// data?
String[] dataPairs = l.split(" ");
for (String dataPair : dataPairs) {
String[] data = dataPair.split(",");
if (data.length == 2) {
try {
dps.add(new SimpleDataPoint(Double.parseDouble(data[0]), Double.parseDouble(data[1])));
isData = true;
} catch (Exception e) {
}
}
}
}
if (l.contains("END")) {
// row with END
// add entry and reset
SpectralDBEntry entry = new SpectralDBEntry(fields, dps.toArray(new DataPoint[dps.size()]));
fields = new EnumMap<>(fields);
dps.clear();
addLibraryEntry(entry);
// reset
isData = false;
}
} catch (Exception ex) {
logger.log(Level.WARNING, "Error for entry", ex);
}
}
}
// finish and push last entries
finish();
return true;
}
Aggregations