use of org.eol.globi.domain.Specimen in project eol-globi-data by jhpoelen.
the class StudyImporterForRoopnarine method importPredatorSpecimen.
private List<Specimen> importPredatorSpecimen(String trophicGuildLookup, Map<Integer, List<String>> trophicGuildNumberToSpeciesMap, LabeledCSVParser parser, List<String> preyTaxonList, Study study, Location location) throws StudyImporterException {
Integer predatorGuildNumber = parseGuildNumber(trophicGuildLookup, parser);
List<Specimen> predatorSpecimenList = new ArrayList<Specimen>();
List<String> predatorTaxaList = trophicGuildNumberToSpeciesMap.get(predatorGuildNumber);
if (predatorTaxaList == null) {
throw new StudyImporterException("no species available for guild number [" + predatorGuildNumber + "]");
}
for (String predatorTaxa : predatorTaxaList) {
// TODO - here's where the specimen model doesn't fit nicely - need a way to distinguish inferred relationships from direct observations
if (StringUtils.isBlank(predatorTaxa)) {
getLogger().info(study, "found blank predator name on line [" + parser.lastLineNumber() + "]");
} else {
Specimen predatorSpecimen = nodeFactory.createSpecimen(study, new TaxonImpl(predatorTaxa, null));
predatorSpecimen.caughtIn(location);
predatorSpecimenList.add(predatorSpecimen);
for (String preyTaxonName : preyTaxonList) {
if (StringUtils.isBlank(preyTaxonName)) {
getLogger().info(study, "found blank prey name for predator [" + predatorTaxa + "] on line [" + parser.lastLineNumber() + "]");
} else {
Specimen preySpecimen = nodeFactory.createSpecimen(study, new TaxonImpl(preyTaxonName, null));
preySpecimen.caughtIn(location);
predatorSpecimen.ate(preySpecimen);
}
}
}
}
return predatorSpecimenList;
}
use of org.eol.globi.domain.Specimen in project eol-globi-data by jhpoelen.
the class StudyImporterForWrast method addNextRecordToStudy.
private void addNextRecordToStudy(LabeledCSVParser csvParser, Study study, Map<String, String> columnToNormalizedTermMapper, LengthParser lengthParser) throws StudyImporterException {
String seasonName = csvParser.getValueByLabel(columnToNormalizedTermMapper.get(SEASON));
String preyItem = csvParser.getValueByLabel(columnToNormalizedTermMapper.get(PREY_SPECIES));
if (preyItem == null) {
getLogger().warn(study, "no prey name for line [" + csvParser.getLastLineNumber() + "]");
} else {
Specimen prey = createAndClassifySpecimen(preyItem, study);
String habitat = csvParser.getValueByLabel(COLUMN_MAPPER.get(HABITAT));
String site = csvParser.getValueByLabel(COLUMN_MAPPER.get(SITE));
String region = csvParser.getValueByLabel(COLUMN_MAPPER.get(REGION));
String sampleLocationId = createLocationId(habitat, region, site);
Map<String, LatLng> averageLocations = getLocationMap();
LatLng latLng1 = averageLocations.get(sampleLocationId);
if (latLng1 == null) {
throw new StudyImporterException("no location information for [" + sampleLocationId + "] on line [" + csvParser.getLastLineNumber() + "] found in [" + averageLocations + "]");
}
if (depthMap == null) {
depthMap = createDepthMap(study);
}
Double depth = depthMap.get(createDepthId(seasonName, region, site, habitat));
Double altitude = depth == null ? null : -depth;
if (depth == null) {
getLogger().warn(study, createMsgPrefix(csvParser) + " failed to find depth for habitat, region, site and season: [" + createDepthId(seasonName, region, site, habitat) + "], skipping entry");
}
Location sampleLocation;
try {
sampleLocation = nodeFactory.getOrCreateLocation(new LocationImpl(latLng1.getLat(), latLng1.getLng(), altitude, null));
} catch (NodeFactoryException e) {
throw new StudyImporterException("failed to create location", e);
}
prey.caughtIn(sampleLocation);
prey.caughtDuring(getOrCreateSeason(seasonName));
String speciesName = csvParser.getValueByLabel(columnToNormalizedTermMapper.get(PREDATOR_SPECIES));
String predatorId = csvParser.getValueByLabel(columnToNormalizedTermMapper.get(PREDATOR_SPECIMEN_ID));
Map<String, Specimen> predatorMap = getPredatorSpecimenMap();
Specimen predator = predatorMap.get(predatorId);
if (predator == null) {
predator = addPredatorSpecimen(csvParser, study, lengthParser, seasonName, sampleLocation, speciesName, predatorId, predatorMap);
}
predator.ate(prey);
Date date = parseCollectionDate(csvParser, study);
try {
nodeFactory.setUnixEpochProperty(predator, date);
nodeFactory.setUnixEpochProperty(prey, date);
} catch (NodeFactoryException e) {
throw new StudyImporterException("specimen not associated to study", e);
}
}
}
use of org.eol.globi.domain.Specimen in project eol-globi-data by jhpoelen.
the class StudyImporterForStrona method importStudy.
@Override
public void importStudy() throws StudyImporterException {
LabeledCSVParser dataParser;
try {
dataParser = parserFactory.createParser(RESOURCE_PATH, CharsetConstant.UTF8);
} catch (IOException e) {
throw new StudyImporterException("failed to read resource [" + RESOURCE_PATH + "]", e);
}
try {
Study study = nodeFactory.getOrCreateStudy(new StudyImpl("strona2013", SOURCE + " . " + CitationUtil.createLastAccessedString(RESOURCE_PATH), "http://dx.doi.org/10.1890/12-1419.1", SOURCE));
while (dataParser.getLine() != null) {
if (importFilter.shouldImportRecord((long) dataParser.getLastLineNumber())) {
try {
String parasiteName = StringUtils.trim(dataParser.getValueByLabel("P_SP"));
String hostName = StringUtils.trim(dataParser.getValueByLabel("H_SP"));
if (areNamesAvailable(parasiteName, hostName)) {
Specimen parasite = nodeFactory.createSpecimen(study, new TaxonImpl(parasiteName, null));
Specimen host = nodeFactory.createSpecimen(study, new TaxonImpl(hostName, null));
parasite.interactsWith(host, InteractType.PARASITE_OF);
}
} catch (NodeFactoryException | NumberFormatException e) {
throw new StudyImporterException("failed to import line [" + (dataParser.lastLineNumber() + 1) + "]", e);
}
}
}
} catch (IOException | NodeFactoryException e) {
throw new StudyImporterException("problem importing [" + RESOURCE_PATH + "]", e);
}
}
use of org.eol.globi.domain.Specimen in project eol-globi-data by jhpoelen.
the class StudyImporterForSimons method addNextRecordToStudy.
private void addNextRecordToStudy(LabeledCSVParser csvParser, Study study, Map<String, String> columnToNormalizedTermMapper, LengthParser lengthParser) throws StudyImporterException {
String seasonName = csvParser.getValueByLabel(columnToNormalizedTermMapper.get(SEASON));
Specimen prey = createAndClassifySpecimen(csvParser.getValueByLabel(columnToNormalizedTermMapper.get(PREY_SPECIES)), study);
Location sampleLocation = getOrCreateSampleLocation(csvParser, columnToNormalizedTermMapper);
prey.caughtIn(sampleLocation);
prey.caughtDuring(getOrCreateSeason(seasonName));
String speciesName = csvParser.getValueByLabel(columnToNormalizedTermMapper.get(PREDATOR_SPECIES));
// see https://github.com/jhpoelen/gomexsi/issues/41
String occurrenceId = csvParser.getValueByLabel("spcode") + csvParser.getValueByLabel("sizecl") + csvParser.getValueByLabel("cruise") + csvParser.getValueByLabel("stcode");
Map<String, Specimen> predatorMap = getPredatorSpecimenMap();
Specimen predator = predatorMap.get(occurrenceId);
if (predator == null) {
predator = createAndClassifySpecimen(speciesName, study);
predator.setLengthInMm(lengthParser.parseLengthInMm(csvParser));
predator.caughtDuring(getOrCreateSeason(seasonName));
predator.caughtIn(sampleLocation);
predatorMap.put(occurrenceId, predator);
}
predator.ate(prey);
}
use of org.eol.globi.domain.Specimen in project eol-globi-data by jhpoelen.
the class StudyImporterForWebOfLife method importNetwork.
public void importNetwork(InteractType interactType1, Location networkLocation, Study study, File file) throws IOException, NodeFactoryException {
LabeledCSVParser interactions = CSVTSVUtil.createLabeledCSVParser(new FileInputStream(file));
final String[] targetLabels = interactions.getLabels();
List<String> targetTaxonNames = new ArrayList<String>();
List<String> ignoredLabels = Arrays.asList("number of hosts sampled", "");
for (String targetLabel : targetLabels) {
String trimmedLabel = StringUtils.trim(targetLabel);
if (StringUtils.isNotBlank(targetLabel) || !ignoredLabels.contains(trimmedLabel)) {
targetTaxonNames.add(targetLabel);
}
}
String[] line;
while ((line = interactions.getLine()) != null) {
String sourceTaxonName = line[0];
final Specimen sourceSpecimen = nodeFactory.createSpecimen(study, new TaxonImpl(sourceTaxonName, null));
sourceSpecimen.caughtIn(networkLocation);
for (String targetTaxonName : targetTaxonNames) {
final String valueByLabel = StringUtils.trim(interactions.getValueByLabel(targetTaxonName));
if (StringUtils.isNotBlank(valueByLabel) && !StringUtils.equals("0", valueByLabel)) {
final Specimen targetSpecimen = nodeFactory.createSpecimen(study, new TaxonImpl(targetTaxonName, null));
targetSpecimen.caughtIn(networkLocation);
sourceSpecimen.interactsWith(targetSpecimen, interactType1);
}
}
}
}
Aggregations