use of org.eol.globi.domain.StudyImpl in project eol-globi-data by jhpoelen.
the class StudyImporterForJSONLD method importStudy.
@Override
public void importStudy() throws StudyImporterException {
Model model;
try {
model = buildModel();
} catch (IOException e) {
throw new StudyImporterException("failed to import [" + getResourceURI() + "]", e);
}
Query query;
try {
query = QueryFactory.create(IOUtils.toString(new DatasetLocal().getResource("find-jsonld-interactions.rq"), CharsetConstant.UTF8));
} catch (IOException e) {
throw new StudyImporterException("failed to find sparql query", e);
}
QueryExecution exec = QueryExecutionFactory.create(query, model);
try {
ResultSet results = exec.execSelect();
while (results.hasNext()) {
QuerySolution solution = results.nextSolution();
String subj = solution.get("subj").asResource().getURI();
String creationDate = solution.get("creationDate").asLiteral().getString();
String authorURI = solution.get("author").toString();
String author;
try {
author = nodeFactory.getAuthorResolver().findFullName(authorURI);
} catch (IOException e) {
throw new StudyImporterException("failed to resolve author URI [" + authorURI + "]");
}
final String source1 = author + ". " + new DateTime(parseDate(creationDate)).getYear() + ". " + CitationUtil.createLastAccessedString(getResourceURI().toString());
Study study = nodeFactory.getOrCreateStudy(new StudyImpl(getResourceURI() + subj, source1, null, subj));
study.setExternalId(subj);
Specimen source = createSpecimen(solution, study, "subjTaxon");
Specimen target = createSpecimen(solution, study, "targetTaxon");
String interactType = solution.get("p").asResource().getLocalName();
InteractType interactType1 = InteractType.typeOf(StringUtils.replace(interactType, "RO_", "RO:"));
if (interactType1 == null) {
throw new StudyImporterException("failed to map interaction type [" + interactType + "]");
}
String collTime = solution.get("collTime").asLiteral().getString();
Date date = parseDate(collTime);
nodeFactory.setUnixEpochProperty(source, date);
nodeFactory.setUnixEpochProperty(target, date);
Location loc = nodeFactory.getOrCreateLocation(new LocationImpl(solution.get("collLat").asLiteral().getDouble(), solution.get("collLng").asLiteral().getDouble(), null, null));
target.caughtIn(loc);
source.caughtIn(loc);
source.interactsWith(target, interactType1);
}
} catch (NodeFactoryException e) {
throw new StudyImporterException("failed to import jsonld data in [" + getResourceURI() + "]", e);
} finally {
exec.close();
}
}
use of org.eol.globi.domain.StudyImpl in project eol-globi-data by jhpoelen.
the class StudyImporterForINaturalist method importInteraction.
private void importInteraction(Taxon targetTaxon, long observationId, String interactionDataType, InteractType interactionTypeId, JsonNode observation, Taxon sourceTaxon, String interactionTypeName) throws StudyImporterException, NodeFactoryException {
Date observationDate = getObservationDate(observation);
StringBuilder citation = buildCitation(observation, interactionTypeName, targetTaxon.getName(), sourceTaxon.getName(), observationDate);
String url = ExternalIdUtil.urlForExternalId(TaxonomyProvider.ID_PREFIX_INATURALIST + observationId);
citation.append(CitationUtil.createLastAccessedString(url));
StudyImpl study1 = new StudyImpl(TaxonomyProvider.ID_PREFIX_INATURALIST + observationId, getSourceString(), null, citation.toString());
study1.setExternalId(url);
Study study = nodeFactory.getOrCreateStudy(study1);
createAssociation(observationId, interactionDataType, interactionTypeId, observation, targetTaxon, sourceTaxon, study, observationDate);
}
use of org.eol.globi.domain.StudyImpl in project eol-globi-data by jhpoelen.
the class StudyImporterForRobledo method importStudy.
@Override
public void importStudy() throws StudyImporterException {
String description = "García-Robledo C, Erickson DL, Staines CL, Erwin TL, Kress WJ. Tropical Plant–Herbivore Networks: Reconstructing Species Interactions Using DNA Barcodes Heil M, editor. PLoS ONE [Internet]. 2013 January 8;8(1):e52967. Available from: http://dx.doi.org/10.1371/journal.pone.0052967";
String doi = "http://dx.doi.org/10.1371/journal.pone.0052967";
Study study1 = new StudyImpl("García-Robledo et al 2013", description, doi, description);
Study study = nodeFactory.getOrCreateStudy(study1);
Map<String, String> abrLookup = buildPlantLookup();
// spatial location from: http://www.ots.ac.cr/index.php?option=com_content&task=view&id=163&Itemid=348
Double latitude = LocationUtil.parseDegrees("10°26'N");
Double longitude = LocationUtil.parseDegrees("83°59'W");
Location location;
try {
location = nodeFactory.getOrCreateLocation(new LocationImpl(latitude, longitude, 35.0, null));
} catch (NodeFactoryException e) {
throw new StudyImporterException("failed to create location", e);
}
// TODO: need to map date range of collections
String studyResource = "robledo/table_s1_extract.csv";
try {
LabeledCSVParser parser = parserFactory.createParser(studyResource, CharsetConstant.UTF8);
while (parser.getLine() != null) {
String beetleName = parser.getValueByLabel("Herbivore species");
String beetleScientificName = completeBeetleName(beetleName);
Specimen predator = nodeFactory.createSpecimen(study, new TaxonImpl(beetleScientificName, null));
predator.caughtIn(location);
for (String plantAbbreviation : abrLookup.keySet()) {
String plantScientificName = abrLookup.get(plantAbbreviation);
String valueByLabel = parser.getValueByLabel(plantAbbreviation);
try {
int interactionCode = Integer.parseInt(valueByLabel);
if (interactionCode > 0) {
Specimen plant = nodeFactory.createSpecimen(study, new TaxonImpl(plantScientificName, null));
plant.caughtIn(location);
predator.ate(plant);
}
} catch (NumberFormatException ex) {
getLogger().warn(study, "malformed or no value [" + valueByLabel + "] found for [" + plantScientificName + "(" + plantAbbreviation + ")" + "] and beetle [" + beetleScientificName + "] could be found in [" + studyResource + ":" + parser.lastLineNumber() + "]");
}
}
}
} catch (IOException e) {
throw new StudyImporterException("problem reading [" + studyResource + "]", e);
} catch (NodeFactoryException e) {
throw new StudyImporterException("cannot create specimens from [" + studyResource + "]", e);
}
}
use of org.eol.globi.domain.StudyImpl in project eol-globi-data by jhpoelen.
the class StudyImporterForWebOfLife method importNetworks.
public void importNetworks(String archiveURL, String sourceCitation) throws StudyImporterException {
try {
InputStream inputStream = getDataset().getResource(archiveURL);
ZipInputStream zipInputStream = new ZipInputStream(inputStream);
ZipEntry entry;
File referencesTempFile = null;
Map<String, File> networkTempFileMap = new HashMap<String, File>();
while ((entry = zipInputStream.getNextEntry()) != null) {
if (entry.getName().matches("(^|(.*/))references\\.csv$")) {
referencesTempFile = FileUtils.saveToTmpFile(zipInputStream, entry);
} else if (entry.getName().matches(".*\\.csv$")) {
networkTempFileMap.put(entry.getName().replace(".csv", ""), FileUtils.saveToTmpFile(zipInputStream, entry));
} else {
IOUtils.copy(zipInputStream, new NullOutputStream());
}
}
IOUtils.closeQuietly(zipInputStream);
if (referencesTempFile == null) {
throw new StudyImporterException("failed to find expected [references.csv] resource in [" + archiveURL + "]");
}
if (networkTempFileMap.size() == 0) {
throw new StudyImporterException("failed to find expected network csv files");
}
BufferedReader assocReader = FileUtils.getUncompressedBufferedReader(new FileInputStream(referencesTempFile), CharsetConstant.UTF8);
LabeledCSVParser parser = CSVTSVUtil.createLabeledCSVParser(assocReader);
while (parser.getLine() != null) {
final String citation = parser.getValueByLabel("Reference");
if (StringUtils.isBlank(citation)) {
throw new StudyImporterException("found missing reference");
}
final String networkId = parser.getValueByLabel("ID");
if (!networkTempFileMap.containsKey(networkId)) {
throw new StudyImporterException("found network id [" + networkId + "], but no associated data.");
}
final Study study = nodeFactory.getOrCreateStudy(new StudyImpl("bascompte:" + citation, sourceCitation, null, citation));
importNetwork(parseInteractionType(parser), parseLocation(parser), study, networkTempFileMap.get(networkId));
}
} catch (IOException e) {
throw new StudyImporterException(e);
} catch (NodeFactoryException e) {
throw new StudyImporterException(e);
}
}
use of org.eol.globi.domain.StudyImpl in project eol-globi-data by jhpoelen.
the class StudyImporterForRoopnarine method importStudy.
@Override
public void importStudy() throws StudyImporterException {
String suffix = ".csv";
String prefix = "roopnarine/857470.item.";
String trophicGuildLookup = prefix + 4 + suffix;
final Map<Integer, List<String>> trophicGuildNumberToSpeciesMap = buildGuildLookup(trophicGuildLookup);
Map<String, LatLng> resourceLocation = resourceLocationMap(suffix, prefix);
Study study = nodeFactory.getOrCreateStudy(new StudyImpl("Roopnarine et al 2013", "Roopnarine, P.D. & Hertog, R., 2013. Detailed Food Web Networks of Three Greater Antillean Coral Reef Systems: The Cayman Islands, Cuba, and Jamaica. DatasetImpl Papers in Ecology, 2013, pp.1–9. Available at: http://dx.doi.org/10.7167/2013/857470.", "http://dx.doi.org/10.7167/2013/857470", null));
for (Map.Entry<String, LatLng> resourceLatLngEntry : resourceLocation.entrySet()) {
LatLng latLng = resourceLatLngEntry.getValue();
Location location;
try {
location = nodeFactory.getOrCreateLocation(new LocationImpl(latLng.getLat(), latLng.getLng(), 0.0, null));
} catch (NodeFactoryException e) {
throw new StudyImporterException("failed to create location", e);
}
String studyResource = resourceLatLngEntry.getKey();
getLogger().info(study, "import of [" + studyResource + "] started...");
List<Specimen> predatorSpecimen = importTrophicInteractions(trophicGuildLookup, trophicGuildNumberToSpeciesMap, studyResource, study, location);
getLogger().info(study, "import of [" + studyResource + "] done.");
}
}
Aggregations