use of org.molgenis.data.support.QueryImpl in project molgenis by molgenis.
the class OntologyScriptInitializerImpl method initialize.
@Override
@RunAsSystem
public void initialize() {
Resource resource = new ClassPathResource("roc-curve.R");
if (resource.exists()) {
long count = dataService.count(SCRIPT, new QueryImpl<>().eq(ScriptMetaData.NAME, ROC_CURVE_SCRIPT_NAME));
if (count == 0) {
Entity scriptType = dataService.findOne(ScriptTypeMetaData.SCRIPT_TYPE, new QueryImpl<>().eq(ScriptTypeMetaData.NAME, "R"));
if (scriptType == null)
throw new UnknownEntityException("ScriptType R does not exist!");
String scriptContent;
try {
scriptContent = FileCopyUtils.copyToString(new InputStreamReader(resource.getInputStream(), "UTF-8"));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
if (dataService.count(SCRIPT_PARAMETER, new QueryImpl<>().eq(ScriptParameterMetaData.NAME, ROC_CURVE_SCRIPT_PARAMETER)) == 0) {
dataService.add(SCRIPT_PARAMETER, scriptParameterFactory.create().setName(ROC_CURVE_SCRIPT_PARAMETER));
}
Entity scriptParameterEntity = dataService.findOne(SCRIPT_PARAMETER, new QueryImpl<>().eq(ScriptParameterMetaData.NAME, ROC_CURVE_SCRIPT_PARAMETER));
Script script = scriptFactory.create();
script.setName(ROC_CURVE_SCRIPT_NAME);
script.setGenerateToken(true);
script.set(ScriptMetaData.TYPE, scriptType);
script.setResultFileExtension("png");
script.setContent(scriptContent);
script.set(ScriptMetaData.PARAMETERS, Arrays.asList(scriptParameterEntity));
dataService.add(SCRIPT, script);
LOG.info("Script entity \"roc\" has been added to the database!");
} else {
LOG.info("Script entity \"roc\" already exists in the database!");
}
} else {
LOG.info("R script \"roc-curve.R\" does not exist on classpath!");
}
}
use of org.molgenis.data.support.QueryImpl in project molgenis by molgenis.
the class SortaController method download.
@GetMapping("/match/download/{sortaJobExecutionId}")
public void download(@PathVariable String sortaJobExecutionId, HttpServletResponse response) throws IOException {
try (CsvWriter csvWriter = new CsvWriter(response.getOutputStream(), SortaServiceImpl.DEFAULT_SEPARATOR)) {
SortaJobExecution sortaJobExecution = findSortaJobExecution(sortaJobExecutionId);
response.setContentType("text/csv");
response.addHeader("Content-Disposition", "attachment; filename=" + generateCsvFileName());
List<String> columnHeaders = new ArrayList<>();
EntityType targetMetadata = entityTypeFactory.create("SortaDownload" + sortaJobExecutionId);
EntityType sourceMetaData = dataService.getEntityType(sortaJobExecution.getSourceEntityName());
for (Attribute attribute : sourceMetaData.getAttributes()) {
if (!attribute.getName().equalsIgnoreCase(SortaCsvRepository.ALLOWED_IDENTIFIER)) {
columnHeaders.add(attribute.getName());
targetMetadata.addAttribute(attrMetaFactory.create().setName(attribute.getName()));
}
}
columnHeaders.addAll(Arrays.asList(OntologyTermMetaData.ONTOLOGY_TERM_NAME, OntologyTermMetaData.ONTOLOGY_TERM_IRI, MatchingTaskContentMetaData.SCORE, MatchingTaskContentMetaData.VALIDATED));
targetMetadata.addAttribute(ontologyTermMetaData.getAttribute(OntologyTermMetaData.ONTOLOGY_TERM_NAME));
targetMetadata.addAttribute(ontologyTermMetaData.getAttribute(OntologyTermMetaData.ONTOLOGY_TERM_IRI));
targetMetadata.addAttribute(Attribute.newInstance(matchingTaskContentMetaData.getAttribute(MatchingTaskContentMetaData.SCORE), EntityType.AttributeCopyMode.SHALLOW_COPY_ATTRS, attrMetaFactory).setDataType(AttributeType.STRING));
targetMetadata.addAttribute(matchingTaskContentMetaData.getAttribute(MatchingTaskContentMetaData.VALIDATED));
csvWriter.writeAttributeNames(columnHeaders);
dataService.findAll(sortaJobExecution.getResultEntityName(), new QueryImpl<>()).forEach(resultEntity -> csvWriter.add(toDownloadRow(sortaJobExecution, resultEntity, targetMetadata)));
}
}
use of org.molgenis.data.support.QueryImpl in project molgenis by molgenis.
the class SortaController method retrieveSortaJobResults.
@PostMapping("/match/retrieve")
@ResponseBody
public EntityCollectionResponse retrieveSortaJobResults(@RequestBody SortaServiceRequest sortaServiceRequest) {
List<Map<String, Object>> entityMaps = new ArrayList<>();
String sortaJobExecutionId = sortaServiceRequest.getSortaJobExecutionId();
String filterQuery = sortaServiceRequest.getFilterQuery();
String ontologyIri = sortaServiceRequest.getOntologyIri();
EntityPager entityPager = sortaServiceRequest.getEntityPager();
SortaJobExecution sortaJobExecution = findSortaJobExecution(sortaJobExecutionId);
String resultEntityName = sortaJobExecution.getResultEntityName();
double threshold = sortaJobExecution.getThreshold();
boolean isMatched = sortaServiceRequest.isMatched();
QueryRule queryRuleInputEntities = new QueryRule(Arrays.asList(new QueryRule(VALIDATED, EQUALS, isMatched), new QueryRule(isMatched ? OR : AND), new QueryRule(SCORE, isMatched ? GREATER_EQUAL : LESS, threshold)));
List<QueryRule> queryRuleInputEntitiesInOneMatchingTask = singletonList(queryRuleInputEntities);
// Add filter to the query if query string is not empty
if (isNotEmpty(filterQuery)) {
Iterable<String> filteredInputTermIds = dataService.findAll(sortaJobExecution.getSourceEntityName(), new QueryImpl<>().search(filterQuery)).map(inputEntity -> inputEntity.getString(SortaServiceImpl.DEFAULT_MATCHING_IDENTIFIER)).collect(Collectors.toList());
QueryRule previousQueryRule = new QueryRule(queryRuleInputEntitiesInOneMatchingTask);
QueryRule queryRuleFilterInput = new QueryRule(MatchingTaskContentMetaData.INPUT_TERM, Operator.IN, filteredInputTermIds);
queryRuleInputEntitiesInOneMatchingTask = Arrays.asList(previousQueryRule, new QueryRule(Operator.AND), queryRuleFilterInput);
}
Query<Entity> query = new QueryImpl<>(queryRuleInputEntitiesInOneMatchingTask);
long count = dataService.count(resultEntityName, query);
int start = entityPager.getStart();
int num = entityPager.getNum();
Stream<Entity> findAll = dataService.findAll(sortaJobExecution.getResultEntityName(), query.offset(start).pageSize(num).sort(new Sort().on(VALIDATED, DESC).on(SCORE, DESC)));
findAll.forEach(mappingEntity -> {
Map<String, Object> outputEntity = new HashMap<>();
outputEntity.put("inputTerm", getEntityAsMap(mappingEntity.getEntity(INPUT_TERM)));
outputEntity.put("matchedTerm", getEntityAsMap(mappingEntity));
Object matchedTerm = mappingEntity.get(MATCHED_TERM);
if (matchedTerm != null) {
outputEntity.put("ontologyTerm", SortaServiceUtil.getEntityAsMap(sortaService.getOntologyTermEntity(matchedTerm.toString(), ontologyIri)));
}
entityMaps.add(outputEntity);
});
EntityPager pager = new EntityPager(start, num, count, null);
return new EntityCollectionResponse(pager, entityMaps, "/match/retrieve", ontologyTermMetaData, permissionService, dataService);
}
use of org.molgenis.data.support.QueryImpl in project molgenis by molgenis.
the class SortaServiceImplTest method beforeMethod.
@BeforeMethod
public void beforeMethod() {
// Mock ontology entity
Ontology ontology = ontologyFactory.create();
ontology.setOntologyIri(ONTOLOGY_IRI);
// define dataService actions for test one
when(dataService.findOne(ONTOLOGY, new QueryImpl<>().eq(OntologyMetaData.ONTOLOGY_IRI, ONTOLOGY_IRI))).thenReturn(ontology);
when(dataService.count(ONTOLOGY_TERM, new QueryImpl<>().eq(OntologyTermMetaData.ONTOLOGY, ontology))).thenReturn((long) 100);
QueryRule queryRule = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH, "hear")));
queryRule.setOperator(DIS_MAX);
when(dataService.count(ONTOLOGY_TERM, new QueryImpl<>(queryRule))).thenReturn((long) 50);
QueryRule queryRule2 = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH, "impair")));
queryRule2.setOperator(DIS_MAX);
when(dataService.count(ONTOLOGY_TERM, new QueryImpl<>(queryRule2))).thenReturn((long) 50);
when(dataService.findAll(ONTOLOGY)).thenReturn(Collections.<Entity>singletonList(ontology).stream());
// ########################### TEST ONE ###########################
// Mock the first ontology term entity only with name
OntologyTermSynonym ontologyTermSynonym0 = ontologyTermSynonymFactory.create();
ontologyTermSynonym0.setOntologyTermSynonym("hearing impairment");
OntologyTerm ontologyTerm0 = ontologyTermFactory.create();
ontologyTerm0.setId("1");
ontologyTerm0.setOntology(ontology);
ontologyTerm0.setOntologyTermName("hearing impairment");
ontologyTerm0.setOntologyTermIri(ONTOLOGY_IRI + '1');
ontologyTerm0.setOntologyTermSynonyms(singletonList(ontologyTermSynonym0));
ontologyTerm0.setOntologyTermDynamicAnnotations(emptyList());
// Mock the second ontology term entity only with name
OntologyTermSynonym ontologyTermSynonym1 = ontologyTermSynonymFactory.create();
ontologyTermSynonym1.setOntologyTermSynonym("mixed hearing impairment");
OntologyTerm ontologyTerm1 = ontologyTermFactory.create();
ontologyTerm1.setId("2");
ontologyTerm1.setOntology(ontology);
ontologyTerm1.setOntologyTermName("mixed hearing impairment");
ontologyTerm1.setOntologyTermIri(ONTOLOGY_IRI + '2');
ontologyTerm1.setOntologyTermSynonyms(singletonList(ontologyTermSynonym1));
ontologyTerm1.setOntologyTermDynamicAnnotations(emptyList());
// DataService action for regular matching ontology term synonyms
QueryRule disMaxRegularQueryRule = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH, "hear~0.8 impair~0.8")));
disMaxRegularQueryRule.setOperator(DIS_MAX);
List<QueryRule> finalQueryRules = asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), disMaxRegularQueryRule);
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(finalQueryRules).pageSize(50))).thenReturn(Arrays.<Entity>asList(ontologyTerm0, ontologyTerm1).stream());
// DataService action for n-gram matching ontology term synonyms
QueryRule disMaxNGramQueryRule = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH_NGRAM, "hear impair")));
disMaxNGramQueryRule.setOperator(DIS_MAX);
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), disMaxNGramQueryRule)).pageSize(10))).thenReturn(Arrays.<Entity>asList(ontologyTerm0, ontologyTerm1).stream());
// DataService action for querying specific ontology term based on ontologyIRI and ontologyTermIRI
when(dataService.findOne(ONTOLOGY_TERM, new QueryImpl<>().eq(OntologyTermMetaData.ONTOLOGY_TERM_IRI, ONTOLOGY_IRI + '1').and().eq(OntologyTermMetaData.ONTOLOGY, ontology))).thenReturn(ontologyTerm0);
when(dataService.findOne(ONTOLOGY_TERM, new QueryImpl<>().eq(OntologyTermMetaData.ONTOLOGY_TERM_IRI, ONTOLOGY_IRI + '2').and().eq(OntologyTermMetaData.ONTOLOGY, ontology))).thenReturn(ontologyTerm1);
// ########################### TEST TWO ###########################
OntologyTermSynonym ontologyTermSynonym2 = ontologyTermSynonymFactory.create();
ontologyTermSynonym2.setOntologyTermSynonym("ot_3");
// Mock ontologyTermDynamicAnnotation entities
OntologyTermDynamicAnnotation ontologyTermDynamicAnnotation_3_1 = ontologyTermDynamicAnnotationFactory.create();
ontologyTermDynamicAnnotation_3_1.setName("OMIM");
ontologyTermDynamicAnnotation_3_1.setValue("123456");
ontologyTermDynamicAnnotation_3_1.setLabel("OMIM:123456");
// Mock ontologyTerm entity based on the previous entities defined
OntologyTerm ontologyTermEntity_3 = ontologyTermFactory.create();
ontologyTermEntity_3.setId("3");
ontologyTermEntity_3.setOntology(ontology);
ontologyTermEntity_3.setOntologyTermName("ot_3");
ontologyTermEntity_3.setOntologyTermIri(ONTOLOGY_IRI + '3');
ontologyTermEntity_3.setOntologyTermSynonyms(// self reference intended? Arrays.asList(ontologyTermEntity_3)
singletonList(ontologyTermSynonym2));
ontologyTermEntity_3.set(OntologyTermMetaData.ONTOLOGY_TERM_DYNAMIC_ANNOTATION, singletonList(ontologyTermDynamicAnnotation_3_1));
// DataService action for matching ontology term annotation
QueryRule annotationQueryRule = new QueryRule(asList(new QueryRule(OntologyTermDynamicAnnotationMetaData.NAME, EQUALS, "OMIM"), new QueryRule(AND), new QueryRule(OntologyTermDynamicAnnotationMetaData.VALUE, EQUALS, "123456")));
when(dataService.findAll(ONTOLOGY_TERM_DYNAMIC_ANNOTATION, new QueryImpl<>(singletonList(annotationQueryRule)).pageSize(Integer.MAX_VALUE))).thenReturn(Collections.<Entity>singletonList(ontologyTermDynamicAnnotation_3_1).stream());
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_DYNAMIC_ANNOTATION, IN, singletonList(ontologyTermDynamicAnnotation_3_1)))).pageSize(Integer.MAX_VALUE))).thenReturn(Collections.<Entity>singletonList(ontologyTermEntity_3).stream());
// DataService action for elasticsearch regular matching ontology term synonyms
QueryRule disMaxRegularQueryRule_2 = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH, "input~0.8")));
disMaxRegularQueryRule_2.setOperator(DIS_MAX);
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), disMaxRegularQueryRule_2)).pageSize(49))).thenReturn(Stream.empty());
// DataService action for n-gram matching ontology term synonyms
QueryRule disMaxNGramQueryRule_2 = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH_NGRAM, "input")));
disMaxNGramQueryRule_2.setOperator(DIS_MAX);
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), disMaxNGramQueryRule_2)).pageSize(10))).thenReturn(Stream.empty());
// ########################### TEST THREE ###########################
// Define the input for test three
OntologyTermSynonym ontologyTermSynonym_4_1 = ontologyTermSynonymFactory.create();
ontologyTermSynonym_4_1.setOntologyTermSynonym("protruding eye");
OntologyTermSynonym ontologyTermSynonym_4_2 = ontologyTermSynonymFactory.create();
ontologyTermSynonym_4_2.setOntologyTermSynonym("proptosis");
OntologyTermSynonym ontologyTermSynonym_4_3 = ontologyTermSynonymFactory.create();
ontologyTermSynonym_4_3.setOntologyTermSynonym("Exophthalmos");
// Mock ontologyTerm entity based on the previous entities defined
OntologyTerm ontologyTermEntity_4 = ontologyTermFactory.create();
ontologyTermEntity_4.setId("4");
ontologyTermEntity_4.setOntology(ontology);
ontologyTermEntity_4.setOntologyTermName("protruding eye");
ontologyTermEntity_4.setOntologyTermIri(ONTOLOGY_IRI + '4');
ontologyTermEntity_4.setOntologyTermSynonyms(asList(ontologyTermSynonym_4_1, ontologyTermSynonym_4_2, ontologyTermSynonym_4_3));
ontologyTermEntity_4.setOntologyTermDynamicAnnotations(emptyList());
// DataService action for elasticsearch regular matching ontology term synonyms
QueryRule disMaxRegularQueryRule_3 = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH, "proptosi~0.8 protrud~0.8 ey~0.8 exophthalmo~0.8")));
disMaxRegularQueryRule_3.setOperator(DIS_MAX);
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), disMaxRegularQueryRule_3)).pageSize(50))).thenReturn(Collections.<Entity>singletonList(ontologyTermEntity_4).stream());
// DataService action for elasticsearch ngram matching ontology term synonyms
QueryRule disMaxNGramQueryRule_3 = new QueryRule(singletonList(new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_SYNONYM, FUZZY_MATCH_NGRAM, "proptosi protrud ey exophthalmo")));
disMaxNGramQueryRule_3.setOperator(QueryRule.Operator.DIS_MAX);
when(dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontology), new QueryRule(AND), disMaxNGramQueryRule_3)).pageSize(10))).thenReturn(Collections.<Entity>singletonList(ontologyTermEntity_4).stream());
}
use of org.molgenis.data.support.QueryImpl in project molgenis by molgenis.
the class SortaServiceImpl method annotationMatchOntologyTerms.
private void annotationMatchOntologyTerms(Entity inputEntity, Entity ontologyEntity, List<Entity> relevantEntities, List<QueryRule> rulesForOtherFields) {
List<Entity> ontologyTermAnnotationEntities = dataService.findAll(ONTOLOGY_TERM_DYNAMIC_ANNOTATION, new QueryImpl<>(rulesForOtherFields).pageSize(Integer.MAX_VALUE)).collect(Collectors.toList());
if (!ontologyTermAnnotationEntities.isEmpty()) {
List<QueryRule> rules = Arrays.asList(new QueryRule(OntologyTermMetaData.ONTOLOGY, EQUALS, ontologyEntity), new QueryRule(AND), new QueryRule(OntologyTermMetaData.ONTOLOGY_TERM_DYNAMIC_ANNOTATION, IN, ontologyTermAnnotationEntities));
Stream<Entity> ontologyTermEntities = dataService.findAll(ONTOLOGY_TERM, new QueryImpl<>(rules).pageSize(Integer.MAX_VALUE));
List<Entity> relevantOntologyTermEntities = ontologyTermEntities.map(ontologyTermEntity -> calculateNGromOTAnnotations(inputEntity, ontologyTermEntity)).collect(Collectors.toList());
relevantEntities.addAll(relevantOntologyTermEntities);
}
}
Aggregations