use of org.neo4j.cypher.javacompat.ExecutionEngine in project eol-globi-data by jhpoelen.
the class StudyImporterForBellTest method importAll.
@Test
public void importAll() throws StudyImporterException, NodeFactoryException {
StudyImporter importer = new StudyImporterForBell(new ParserFactoryLocal(), nodeFactory);
importStudy(importer);
List<Study> allStudies = NodeUtil.findAllStudies(getGraphDb());
assertThat(allStudies.size(), is(6));
for (Study study : allStudies) {
assertThat(study.getSource(), is("Bell, K. C., Matek, D., Demboski, J. R., & Cook, J. A. (2015). Expanded Host Range of Sucking Lice and Pinworms of Western North American Chipmunks. Comparative Parasitology, 82(2), 312–321. doi:10.1654/4756.1 . Data provided by Kayce C. Bell."));
assertThat(study.getDOI(), is("http://dx.doi.org/10.1654/4756.1"));
assertThat(study.getCitation(), startsWith("Bell, K. C., Matek, D., Demboski, J. R., & Cook, J. A. (2015). Expanded Host Range of Sucking Lice and Pinworms of Western North American Chipmunks. Comparative Parasitology, 82(2), 312–321. doi:10.1654/4756.1 . Data provided by Kayce C. Bell."));
}
ExecutionResult execute = new ExecutionEngine(getGraphDb()).execute("START taxon = node:taxons('*:*') RETURN taxon.name");
String actual = execute.dumpToString();
assertThat(actual, containsString("Tamias"));
assertThat(taxonIndex.findTaxonByName("Tamias speciosus"), is(notNullValue()));
assertThat(taxonIndex.findTaxonByName("Hoplopleura arboricola"), is(notNullValue()));
assertThat(nodeFactory.findStudy("bell-"), is(notNullValue()));
}
use of org.neo4j.cypher.javacompat.ExecutionEngine in project eol-globi-data by jhpoelen.
the class ExportNCBIResourceFile method export.
protected void export(GraphDatabaseService graphService, OutputStreamFactory fileFactory) throws StudyImporterException {
String query = "START taxon = node:taxons('*:*') " + "MATCH taxon-[?:SAME_AS*0..1]->linkedTaxon " + "WHERE has(linkedTaxon.externalId) AND linkedTaxon.externalId =~ 'NCBI:.*'" + "RETURN distinct(linkedTaxon.externalId) as id";
ExecutionResult rows = new ExecutionEngine(graphService).execute(query, new HashMap<String, Object>());
int rowCount = 0;
OutputStream os = null;
try {
List<String> columns = rows.columns();
for (Map<String, Object> row : rows) {
if (rowCount % getLinksPerResourceFile() == 0) {
close(os);
os = null;
}
for (String column : columns) {
String taxonId = row.get(column).toString();
String ncbiTaxonId = StringUtils.replace(taxonId, TaxonomyProvider.ID_PREFIX_NCBI, "");
String aLink = String.format(" <ObjId>%s</ObjId>\n", ncbiTaxonId);
IOUtils.write(aLink, os == null ? (os = open(fileFactory, rowCount)) : os);
}
rowCount++;
}
close(os);
} catch (IOException e) {
throw new StudyImporterException("failed to export ncbi resources", e);
}
}
use of org.neo4j.cypher.javacompat.ExecutionEngine in project eol-globi-data by jhpoelen.
the class ExportUtil method writeResults.
public static void writeResults(Writer writer, GraphDatabaseService dbService, String query, HashMap<String, Object> params, boolean includeHeader) throws IOException {
ExecutionResult rows = new ExecutionEngine(dbService).execute(query, params);
List<String> columns = rows.columns();
if (includeHeader) {
final String[] values = columns.toArray(new String[columns.size()]);
writer.write(StringUtils.join(CSVTSVUtil.escapeValues(values), '\t'));
}
appendRow(writer, rows, columns);
}
use of org.neo4j.cypher.javacompat.ExecutionEngine in project eol-globi-data by jhpoelen.
the class StudyImporterForHechingerTest method importStudy.
@Test
public void importStudy() throws StudyImporterException, IOException {
JsonNode config = new ObjectMapper().readTree("{ \"citation\": \"Ryan F. Hechinger, Kevin D. Lafferty, John P. McLaughlin, Brian L. Fredensborg, Todd C. Huspeni, Julio Lorda, Parwant K. Sandhu, Jenny C. Shaw, Mark E. Torchin, Kathleen L. Whitney, and Armand M. Kuris 2011. Food webs including parasites, biomass, body sizes, and life stages for three California/Baja California estuaries. Ecology 92:791–791. http://dx.doi.org/10.1890/10-1383.1 .\",\n" + " \"doi\": \"http://dx.doi.org/10.1890/10-1383.1\",\n" + " \"format\": \"hechinger\",\n" + " \"delimiter\": \"\\t\",\n" + " \"resources\": {\n" + " \"nodes\": \"hechinger/Metaweb_Nodes.txt\",\n" + " \"links\": \"hechinger/Metaweb_Links.txt\"\n" + " }\n" + "}");
DatasetImpl dataset = new DatasetLocal();
dataset.setConfig(config);
ParserFactory parserFactory = new ParserFactoryForDataset(dataset);
StudyImporterForHechinger importer = new StudyImporterForHechinger(parserFactory, nodeFactory);
importer.setDataset(dataset);
importer.setLogger(new ImportLogger() {
@Override
public void warn(LogContext study, String message) {
LOG.warn(message);
}
@Override
public void info(LogContext study, String message) {
LOG.info(message);
}
@Override
public void severe(LogContext study, String message) {
LOG.error(message);
}
});
importStudy(importer);
Study study = getStudySingleton(getGraphDb());
assertThat(study, is(notNullValue()));
Iterable<Relationship> specimens = NodeUtil.getSpecimens(study);
int count = 0;
for (Relationship specimen : specimens) {
count++;
}
assertThat(count, is(27932));
ExecutionEngine engine = new ExecutionEngine(getGraphDb());
String query = "START resourceTaxon = node:taxons(name='Suaeda spp.')" + " MATCH taxon<-[:CLASSIFIED_AS]-specimen-[r]->resourceSpecimen-[:CLASSIFIED_AS]-resourceTaxon, specimen-[:COLLECTED_AT]->location" + " RETURN taxon.name, specimen.lifeStage?, type(r), resourceTaxon.name, resourceSpecimen.lifeStage?, location.latitude as lat, location.longitude as lng";
ExecutionResult result = engine.execute(query);
assertThat(result.dumpToString(), containsString("Branta bernicla"));
assertThat(result.dumpToString(), containsString("Athya affinis"));
assertThat(result.dumpToString(), containsString("Anas acuta"));
assertThat(result.dumpToString(), containsString("30.378207 | -115.938835 |"));
query = "START taxon = node:taxons('*:*')" + " MATCH taxon<-[:CLASSIFIED_AS]-specimen-[:PARASITE_OF]->resourceSpecimen-[:CLASSIFIED_AS]-resourceTaxon" + " RETURN taxon.name";
result = engine.execute(query);
Set<String> actualParasites = new HashSet<String>();
for (Map<String, Object> row : result) {
actualParasites.add((String) row.get("taxon.name"));
}
assertThat(actualParasites.size() > 0, is(true));
for (String unlikelyParasite : unlikelyParasites()) {
assertThat(actualParasites, not(hasItem(unlikelyParasite)));
}
// Trypanorhyncha (kind of tapeworms) are typically parasites, not prey
query = "START resourceTaxon = node:taxons(name='Trypanorhyncha')" + " MATCH taxon<-[:CLASSIFIED_AS]-specimen-[r:PREYS_UPON]->resourceSpecimen-[:CLASSIFIED_AS]-resourceTaxon" + " RETURN specimen.externalId + type(r) + resourceSpecimen.externalId as `resourceExternalId`";
result = engine.execute(query);
Set<String> actualPrey = new HashSet<String>();
for (Map<String, Object> row : result) {
actualPrey.add((String) row.get("resourceExternalId"));
}
assertThat(actualPrey.size(), is(0));
}
Aggregations