use of com.b2international.snowowl.snomed.common.SnomedConstants.Concepts in project snow-owl by b2ihealthcare.
the class SnomedRf2ExportRequest method getIdentifierConcepts.
private List<SnomedConcept> getIdentifierConcepts(final RepositoryContext context, final String currentVersion) {
final Collection<String> refSetsToLoad;
if (refSets == null) {
// Retrieve all reference sets if refSets is null
final Request<BranchContext, SnomedReferenceSets> refSetRequest = SnomedRequests.prepareSearchRefSet().all().build();
final SnomedReferenceSets allReferenceSets = execute(context, currentVersion, refSetRequest);
refSetsToLoad = allReferenceSets.stream().map(r -> r.getId()).collect(Collectors.toSet());
} else {
refSetsToLoad = refSets;
}
final SnomedConceptSearchRequestBuilder refSetRequestBuilder = SnomedRequests.prepareSearchConcept().all().filterByIds(refSetsToLoad).setExpand("pt(),referenceSet()").setLocales(locales());
final Request<BranchContext, SnomedConcepts> request = refSetRequestBuilder.build();
final SnomedConcepts referenceSets = execute(context, currentVersion, request);
// Return only the identifier concepts which have an existing reference set on this branch
return referenceSets.stream().filter(c -> c.getReferenceSet() != null).collect(Collectors.toList());
}
use of com.b2international.snowowl.snomed.common.SnomedConstants.Concepts in project snow-owl by b2ihealthcare.
the class Taxonomies method getStatements.
private static Collection<Object[]> getStatements(RevisionSearcher searcher, LongCollection conceptIds, String characteristicTypeId, boolean filterByConceptIds) throws IOException {
// merge stated relationships and OWL axiom relationships into a single array
ImmutableList.Builder<Object[]> isaStatementsBuilder = ImmutableList.builder();
final Set<String> concepts = LongSets.toStringSet(conceptIds);
ExpressionBuilder activeIsaRelationshipQuery = Expressions.builder().filter(active()).filter(typeId(Concepts.IS_A)).filter(characteristicTypeId(characteristicTypeId));
if (filterByConceptIds) {
activeIsaRelationshipQuery.filter(sourceIds(concepts)).filter(destinationIds(concepts));
}
final Query<String[]> activeStatedISARelationshipsQuery = Query.select(String[].class).from(SnomedRelationshipIndexEntry.class).fields(SnomedRelationshipIndexEntry.Fields.ID, SnomedRelationshipIndexEntry.Fields.SOURCE_ID, SnomedRelationshipIndexEntry.Fields.DESTINATION_ID).where(activeIsaRelationshipQuery.build()).limit(Integer.MAX_VALUE).build();
Hits<String[]> activeIsaRelationships = searcher.search(activeStatedISARelationshipsQuery);
activeIsaRelationships.forEach(activeIsaRelationship -> {
isaStatementsBuilder.add(new Object[] { activeIsaRelationship[0], Long.parseLong(activeIsaRelationship[1]), new long[] { Long.parseLong(activeIsaRelationship[2]) } });
});
activeIsaRelationships = null;
if (Concepts.STATED_RELATIONSHIP.equals(characteristicTypeId)) {
// search existing axioms defined for the given set of conceptIds
ExpressionBuilder activeOwlAxiomMemberQuery = Expressions.builder().filter(active());
if (filterByConceptIds) {
activeOwlAxiomMemberQuery.filter(SnomedRefSetMemberIndexEntry.Expressions.referencedComponentIds(concepts)).filter(Expressions.nestedMatch(SnomedRefSetMemberIndexEntry.Fields.CLASS_AXIOM_RELATIONSHIP, Expressions.builder().filter(typeId(Concepts.IS_A)).filter(destinationIds(concepts)).build()));
} else {
activeOwlAxiomMemberQuery.filter(Expressions.nestedMatch(SnomedRefSetMemberIndexEntry.Fields.CLASS_AXIOM_RELATIONSHIP, Expressions.builder().filter(typeId(Concepts.IS_A)).build()));
}
final Query<SnomedRefSetMemberIndexEntry> activeAxiomISARelationshipsQuery = Query.select(SnomedRefSetMemberIndexEntry.class).where(activeOwlAxiomMemberQuery.build()).limit(Integer.MAX_VALUE).build();
Hits<SnomedRefSetMemberIndexEntry> activeAxiomISARelationships = searcher.search(activeAxiomISARelationshipsQuery);
activeAxiomISARelationships.forEach(owlMember -> {
if (!CompareUtils.isEmpty(owlMember.getClassAxiomRelationships())) {
// XXX: breaks with a NumberFormatException if any of the IS A relationships has a value
long[] destinationIds = owlMember.getClassAxiomRelationships().stream().filter(classAxiom -> Concepts.IS_A.equals(classAxiom.getTypeId())).map(SnomedOWLRelationshipDocument::getDestinationId).mapToLong(Long::parseLong).toArray();
isaStatementsBuilder.add(new Object[] { owlMember.getId(), Long.parseLong(owlMember.getReferencedComponentId()), destinationIds });
}
});
activeAxiomISARelationships = null;
}
return isaStatementsBuilder.build();
}
use of com.b2international.snowowl.snomed.common.SnomedConstants.Concepts in project snow-owl by b2ihealthcare.
the class NormalFormGenerator method computeChanges.
@Override
public final void computeChanges(final IProgressMonitor monitor, final OntologyChangeProcessor<StatementFragment> statementProcessor, final OntologyChangeProcessor<ConcreteDomainFragment> concreteDomainProcessor) {
final Stopwatch stopwatch = Stopwatch.createStarted();
LOGGER.info(">>> Distribution normal form generation");
final LongList entries = reasonerTaxonomy.getIterationOrder();
final SubMonitor subMonitor = SubMonitor.convert(monitor, "Generating distribution normal form...", entries.size() * 2);
try {
LongSet previousLayer = null;
LongSet currentLayer = PrimitiveSets.newLongOpenHashSet();
final Set<Long> graphTypeIds = reasonerTaxonomy.getPropertyChains().stream().map(PropertyChain::getDestinationType).collect(Collectors.toSet());
// The first round can be skipped entirely, if no type IDs participate in a property chain
final boolean propertyChainsPresent = !graphTypeIds.isEmpty();
if (propertyChainsPresent) {
// Initialize node graphs for properties we need to traverse
LOGGER.info("--- Initializing node graphs for types: {}", graphTypeIds);
graphTypeIds.forEach(id -> transitiveNodeGraphs.put(id, new NodeGraph()));
// Round 1: build alternative hierarchies
for (final LongIterator itr = entries.iterator(); itr.hasNext(); ) /* empty */
{
final long conceptId = itr.next();
if (conceptId == ReasonerTaxonomyInferrer.DEPTH_CHANGE) {
if (previousLayer != null) {
invalidate(previousLayer);
}
previousLayer = currentLayer;
currentLayer = PrimitiveSets.newLongOpenHashSet();
continue;
}
precomputeProperties(conceptId, false);
final Collection<StatementFragment> inferredNonIsAFragments = statementCache.get(conceptId);
inferredNonIsAFragments.stream().filter(r -> transitiveNodeGraphs.keySet().contains(r.getTypeId())).filter(StatementFragmentWithDestination.class::isInstance).map(StatementFragmentWithDestination.class::cast).forEachOrdered(r -> transitiveNodeGraphs.get(r.getTypeId()).addParent(conceptId, r.getDestinationId()));
}
// Clear the last layer of concepts
previousLayer = null;
currentLayer = PrimitiveSets.newLongOpenHashSet();
statementCache.clear();
concreteDomainCache.clear();
} else {
LOGGER.info("--- Node graphs computation skipped, no types used for property chaining");
}
LOGGER.info("--- Use node graphs for hierarchy computation");
// Round 2: record changes using the hierarchies
for (final LongIterator itr = entries.iterator(); itr.hasNext(); ) /* empty */
{
final long conceptId = itr.next();
if (conceptId == ReasonerTaxonomyInferrer.DEPTH_CHANGE) {
if (previousLayer != null) {
invalidate(previousLayer);
}
previousLayer = currentLayer;
currentLayer = PrimitiveSets.newLongOpenHashSet();
continue;
}
// Run costly comparison of property chain hierarchies only if there are any
precomputeProperties(conceptId, propertyChainsPresent);
final Collection<StatementFragment> existingStatements = reasonerTaxonomy.getExistingInferredRelationships().get(conceptId);
final Collection<StatementFragment> targetStatements = getTargetRelationships(conceptId);
statementProcessor.apply(conceptId, existingStatements, targetStatements, StatementFragmentOrdering.INSTANCE, subMonitor.newChild(1));
final Collection<ConcreteDomainFragment> existingMembers = reasonerTaxonomy.getInferredConcreteDomainMembers().get(Long.toString(conceptId));
final Collection<ConcreteDomainFragment> targetMembers = getTargetMembers(conceptId);
concreteDomainProcessor.apply(conceptId, existingMembers, targetMembers, ConcreteDomainChangeOrdering.INSTANCE, subMonitor.newChild(1));
}
} finally {
subMonitor.done();
LOGGER.info("<<< Distribution normal form generation [{}]", stopwatch.toString());
}
}
use of com.b2international.snowowl.snomed.common.SnomedConstants.Concepts in project snow-owl by b2ihealthcare.
the class SnomedEclEvaluationRequest method eval.
/**
* Handles (possibly) filtered expression constraints by evaluating them along
* with the primary ECL expression, and adding the resulting query expressions as
* extra required clauses.
*
* @param context
* @param filtered
* @return
*/
protected Promise<Expression> eval(BranchContext context, final FilteredExpressionConstraint filtered) {
final ExpressionConstraint constraint = filtered.getConstraint();
final FilterConstraint filterConstraint = filtered.getFilter();
final Domain filterDomain = Ecl.getDomain(filterConstraint);
final Filter filter = filterConstraint.getFilter();
final Promise<Expression> evaluatedConstraint = evaluate(context, constraint);
Promise<Expression> evaluatedFilter = evaluate(context, filter);
if (Domain.DESCRIPTION.equals(filterDomain)) {
// Find concepts that match the description expression, then use the resulting concept IDs as the expression
evaluatedFilter = evaluatedFilter.then(ex -> executeDescriptionSearch(context, ex));
}
if (isAnyExpression(constraint)) {
// No need to combine "match all" with the filter query expression, return it directly
return evaluatedFilter;
}
return Promise.all(evaluatedConstraint, evaluatedFilter).then(results -> {
final Expressions.ExpressionBuilder builder = Expressions.builder();
results.forEach(f -> builder.filter((Expression) f));
return builder.build();
});
}
use of com.b2international.snowowl.snomed.common.SnomedConstants.Concepts in project snow-owl by b2ihealthcare.
the class SnomedRepositoryPreCommitHook method getChangeSetProcessors.
@Override
protected Collection<ChangeSetProcessor> getChangeSetProcessors(StagingArea staging, RevisionSearcher index) throws IOException {
final RepositoryContext context = ClassUtils.checkAndCast(staging.getContext(), RepositoryContext.class);
// initialize OWL Expression converter on the current branch
final SnomedOWLExpressionConverter expressionConverter = new BranchRequest<>(staging.getBranchPath(), branchContext -> {
return new SnomedOWLExpressionConverter(branchContext.inject().bind(RevisionSearcher.class, index).build());
}).execute(context);
final Set<String> statedSourceIds = Sets.newHashSet();
final Set<String> statedDestinationIds = Sets.newHashSet();
final Set<String> inferredSourceIds = Sets.newHashSet();
final Set<String> inferredDestinationIds = Sets.newHashSet();
collectIds(statedSourceIds, statedDestinationIds, staging.getNewObjects(SnomedRelationshipIndexEntry.class), Concepts.STATED_RELATIONSHIP);
collectIds(statedSourceIds, statedDestinationIds, staging.getChangedRevisions(SnomedRelationshipIndexEntry.class).map(diff -> (SnomedRelationshipIndexEntry) diff.newRevision), Concepts.STATED_RELATIONSHIP);
collectIds(inferredSourceIds, inferredDestinationIds, staging.getNewObjects(SnomedRelationshipIndexEntry.class), Concepts.INFERRED_RELATIONSHIP);
collectIds(inferredSourceIds, inferredDestinationIds, staging.getChangedRevisions(SnomedRelationshipIndexEntry.class).map(diff -> (SnomedRelationshipIndexEntry) diff.newRevision), Concepts.INFERRED_RELATIONSHIP);
collectIds(statedSourceIds, statedDestinationIds, staging.getNewObjects(SnomedRefSetMemberIndexEntry.class), expressionConverter);
collectIds(statedSourceIds, statedDestinationIds, staging.getChangedRevisions(SnomedRefSetMemberIndexEntry.class).map(diff -> (SnomedRefSetMemberIndexEntry) diff.newRevision), expressionConverter);
staging.getRemovedObjects(SnomedRelationshipIndexEntry.class).filter(detachedRelationship -> Concepts.IS_A.equals(detachedRelationship.getTypeId())).forEach(detachedRelationship -> {
// XXX: IS A relationships are expected to have a destination ID, not a value
checkState(!detachedRelationship.hasValue(), "IS A relationship found with value: %s", detachedRelationship.getId());
if (Concepts.STATED_RELATIONSHIP.equals(detachedRelationship.getCharacteristicTypeId())) {
statedSourceIds.add(detachedRelationship.getSourceId());
statedDestinationIds.add(detachedRelationship.getDestinationId());
} else if (Concepts.INFERRED_RELATIONSHIP.equals(detachedRelationship.getCharacteristicTypeId())) {
inferredSourceIds.add(detachedRelationship.getSourceId());
inferredDestinationIds.add(detachedRelationship.getDestinationId());
}
});
staging.getRemovedObjects(SnomedRefSetMemberIndexEntry.class).filter(detachedMember -> SnomedRefSetType.OWL_AXIOM == detachedMember.getReferenceSetType()).forEach(detachedOwlMember -> {
collectIds(statedSourceIds, statedDestinationIds, detachedOwlMember, expressionConverter);
});
final LongSet statedConceptIds = PrimitiveSets.newLongOpenHashSet();
final LongSet inferredConceptIds = PrimitiveSets.newLongOpenHashSet();
if (!statedDestinationIds.isEmpty()) {
for (SnomedConceptDocument statedDestinationConcept : index.get(SnomedConceptDocument.class, statedDestinationIds)) {
statedConceptIds.add(Long.parseLong(statedDestinationConcept.getId()));
if (statedDestinationConcept.getStatedParents() != null) {
statedConceptIds.addAll(statedDestinationConcept.getStatedParents());
}
if (statedDestinationConcept.getStatedAncestors() != null) {
statedConceptIds.addAll(statedDestinationConcept.getStatedAncestors());
}
}
}
if (!inferredDestinationIds.isEmpty()) {
for (SnomedConceptDocument inferredDestinationConcept : index.get(SnomedConceptDocument.class, inferredDestinationIds)) {
inferredConceptIds.add(Long.parseLong(inferredDestinationConcept.getId()));
if (inferredDestinationConcept.getParents() != null) {
inferredConceptIds.addAll(inferredDestinationConcept.getParents());
}
if (inferredDestinationConcept.getAncestors() != null) {
inferredConceptIds.addAll(inferredDestinationConcept.getAncestors());
}
}
}
staging.getRemovedObjects(SnomedDescriptionIndexEntry.class).forEach(removedDescription -> {
if (removedDescription.isFsn() && removedDescription.isActive()) {
statedSourceIds.add(removedDescription.getConceptId());
inferredSourceIds.add(removedDescription.getConceptId());
}
});
staging.getChangedRevisions(SnomedDescriptionIndexEntry.class).filter(diff -> ((SnomedDescriptionIndexEntry) diff.newRevision).isFsn()).filter(diff -> diff.hasRevisionPropertyChanges(ACTIVE_AND_TERM_FIELDS)).forEach(diff -> {
SnomedDescriptionIndexEntry newRevision = (SnomedDescriptionIndexEntry) diff.newRevision;
statedSourceIds.add(newRevision.getConceptId());
inferredSourceIds.add(newRevision.getConceptId());
});
staging.getNewObjects(SnomedDescriptionIndexEntry.class).filter(newDescription -> newDescription.isFsn() && newDescription.isActive()).forEach(newDescription -> {
statedSourceIds.add(newDescription.getConceptId());
inferredSourceIds.add(newDescription.getConceptId());
});
if (!statedSourceIds.isEmpty()) {
final Query<SnomedConceptDocument> statedSourceConceptsQuery = Query.select(SnomedConceptDocument.class).where(Expressions.builder().should(SnomedConceptDocument.Expressions.ids(statedSourceIds)).should(SnomedConceptDocument.Expressions.statedParents(statedSourceIds)).should(SnomedConceptDocument.Expressions.statedAncestors(statedSourceIds)).build()).limit(Integer.MAX_VALUE).build();
for (SnomedConceptDocument statedSourceConcept : index.search(statedSourceConceptsQuery)) {
statedConceptIds.add(Long.parseLong(statedSourceConcept.getId()));
if (statedSourceConcept.getStatedParents() != null) {
statedConceptIds.addAll(statedSourceConcept.getStatedParents());
}
if (statedSourceConcept.getStatedAncestors() != null) {
statedConceptIds.addAll(statedSourceConcept.getStatedAncestors());
}
}
}
if (!inferredSourceIds.isEmpty()) {
final Query<SnomedConceptDocument> inferredSourceConceptsQuery = Query.select(SnomedConceptDocument.class).where(Expressions.builder().should(SnomedConceptDocument.Expressions.ids(inferredSourceIds)).should(SnomedConceptDocument.Expressions.parents(inferredSourceIds)).should(SnomedConceptDocument.Expressions.ancestors(inferredSourceIds)).build()).limit(Integer.MAX_VALUE).build();
for (SnomedConceptDocument inferredSourceConcept : index.search(inferredSourceConceptsQuery)) {
inferredConceptIds.add(Long.parseLong(inferredSourceConcept.getId()));
if (inferredSourceConcept.getParents() != null) {
inferredConceptIds.addAll(inferredSourceConcept.getParents());
}
if (inferredSourceConcept.getAncestors() != null) {
inferredConceptIds.addAll(inferredSourceConcept.getAncestors());
}
}
}
staging.getNewObjects(SnomedConceptDocument.class).forEach(newConcept -> {
long longId = Long.parseLong(newConcept.getId());
statedConceptIds.add(longId);
inferredConceptIds.add(longId);
});
// collect all reactivated concepts for the taxonomy to properly re-register them in the tree even if they don't carry stated/inferred information in this commit, but they have something in the index
staging.getChangedRevisions(SnomedConceptDocument.class, Set.of(SnomedRf2Headers.FIELD_ACTIVE)).forEach(diff -> {
RevisionPropertyDiff propertyDiff = diff.getRevisionPropertyDiff(SnomedRf2Headers.FIELD_ACTIVE);
if ("false".equals(propertyDiff.getOldValue()) && "true".equals(propertyDiff.getNewValue())) {
long longId = Long.parseLong(diff.newRevision.getId());
statedConceptIds.add(longId);
inferredConceptIds.add(longId);
}
});
log.trace("Retrieving taxonomic information from store...");
final boolean checkCycles = !(context instanceof Rf2TransactionContext);
final Taxonomy inferredTaxonomy = Taxonomies.inferred(index, expressionConverter, staging, inferredConceptIds, checkCycles);
final Taxonomy statedTaxonomy = Taxonomies.stated(index, expressionConverter, staging, statedConceptIds, checkCycles);
// XXX change processor execution order is important!!!
return List.of(// those values will be used in the ConceptChangeProcessor for example to properly compute the preferredDescriptions derived field
new DescriptionChangeProcessor(), new ConceptChangeProcessor(DoiDataProvider.INSTANCE, SnomedIconProvider.INSTANCE.getAvailableIconIds(), statedTaxonomy, inferredTaxonomy), new RelationshipChangeProcessor());
}
Aggregations