use of com.b2international.index.revision.RevisionSearcher in project snow-owl by b2ihealthcare.
the class SnomedDescendantsExpander method expand.
@Override
protected void expand(List<SnomedConcept> results, final Set<String> conceptIds, Options descendantExpandOptions, boolean direct) {
try {
final int limit = getLimit(descendantExpandOptions);
final ExpressionBuilder expression = Expressions.builder();
expression.filter(active());
final ExpressionBuilder descendantFilter = Expressions.builder();
if (stated) {
descendantFilter.should(statedParents(conceptIds));
if (!direct) {
descendantFilter.should(statedAncestors(conceptIds));
}
} else {
descendantFilter.should(parents(conceptIds));
if (!direct) {
descendantFilter.should(ancestors(conceptIds));
}
}
expression.filter(descendantFilter.build());
final Query<SnomedConceptDocument> query = Query.select(SnomedConceptDocument.class).where(expression.build()).limit((conceptIds.size() == 1 && limit == 0) ? limit : Integer.MAX_VALUE).build();
final RevisionSearcher searcher = context().service(RevisionSearcher.class);
final Hits<SnomedConceptDocument> hits = searcher.search(query);
if (hits.getTotal() < 1) {
final SnomedConcepts descendants = new SnomedConcepts(0, 0);
for (SnomedConcept concept : results) {
if (stated) {
concept.setStatedDescendants(descendants);
} else {
concept.setDescendants(descendants);
}
}
return;
}
// XXX won't work if number of results is greater than one, either use custom ConceptSearch or figure out how to expand descendants effectively
if (conceptIds.size() == 1 && limit == 0) {
for (SnomedConcept concept : results) {
final SnomedConcepts descendants = new SnomedConcepts(0, hits.getTotal());
if (stated) {
concept.setStatedDescendants(descendants);
} else {
concept.setDescendants(descendants);
}
}
return;
}
final Multimap<String, String> descendantsByAncestor = TreeMultimap.create();
for (SnomedConceptDocument hit : hits) {
final Set<String> parentsAndAncestors = newHashSet();
if (stated) {
parentsAndAncestors.addAll(LongSets.toStringSet(hit.getStatedParents()));
if (!direct) {
parentsAndAncestors.addAll(LongSets.toStringSet(hit.getStatedAncestors()));
}
} else {
parentsAndAncestors.addAll(LongSets.toStringSet(hit.getParents()));
if (!direct) {
parentsAndAncestors.addAll(LongSets.toStringSet(hit.getAncestors()));
}
}
parentsAndAncestors.retainAll(conceptIds);
for (String ancestor : parentsAndAncestors) {
descendantsByAncestor.put(ancestor, hit.getId());
}
}
final Collection<String> componentIds = newHashSet(descendantsByAncestor.values());
if (limit > 0 && !componentIds.isEmpty()) {
// query descendants again
final SnomedConcepts descendants = SnomedRequests.prepareSearchConcept().all().filterByIds(componentIds).setLocales(locales()).setExpand(descendantExpandOptions.get("expand", Options.class)).build().execute(context());
final Map<String, SnomedConcept> descendantsById = newHashMap();
descendantsById.putAll(Maps.uniqueIndex(descendants, SnomedConcept::getId));
for (SnomedConcept concept : results) {
final Collection<String> descendantIds = descendantsByAncestor.get(concept.getId());
final List<SnomedConcept> currentDescendants = FluentIterable.from(descendantIds).limit(limit).transform(Functions.forMap(descendantsById)).toList();
final SnomedConcepts descendantConcepts = new SnomedConcepts(currentDescendants, null, limit, descendantIds.size());
if (stated) {
concept.setStatedDescendants(descendantConcepts);
} else {
concept.setDescendants(descendantConcepts);
}
}
} else {
for (SnomedConcept concept : results) {
final Collection<String> descendantIds = descendantsByAncestor.get(concept.getId());
final SnomedConcepts descendants = new SnomedConcepts(limit, descendantIds.size());
if (stated) {
concept.setStatedDescendants(descendants);
} else {
concept.setDescendants(descendants);
}
}
}
} catch (IOException e) {
throw SnowowlRuntimeException.wrap(e);
}
}
use of com.b2international.index.revision.RevisionSearcher in project snow-owl by b2ihealthcare.
the class EclExpression method resolve.
public Promise<Set<String>> resolve(final BranchContext context) {
if (promise == null) {
RevisionSearcher searcher = context.service(RevisionSearcher.class);
boolean cached = context.optionalService(PathWithVersion.class).isPresent();
promise = resolveToExpression(context).then(expression -> {
// shortcut to extract IDs from the query itself if possible
if (SnomedEclEvaluationRequest.canExtractIds(expression)) {
return SnomedEclEvaluationRequest.extractIds(expression);
}
try {
return newHashSet(searcher.search(Query.select(String.class).from(SnomedConceptDocument.class).fields(SnomedConceptDocument.Fields.ID).where(expression).limit(Integer.MAX_VALUE).cached(cached).build()));
} catch (IOException e) {
throw new SnowowlRuntimeException(e);
}
});
}
return promise;
}
use of com.b2international.index.revision.RevisionSearcher in project snow-owl by b2ihealthcare.
the class ConceptChangeProcessor method process.
@Override
public void process(StagingArea staging, RevisionSearcher searcher) throws IOException {
// collect member changes
this.referringRefSets = HashMultimap.create(memberChangeProcessor.process(staging, searcher));
processNewConcepts(staging);
// collect dirty concepts that require additional properties to be set for index
final Map<String, RevisionDiff> dirtyConceptDiffsById = Maps.uniqueIndex(staging.getChangedRevisions(SnomedConceptDocument.class).iterator(), diff -> diff.newRevision.getId());
final Set<String> dirtyConceptIds = collectDirtyConceptIds(staging);
// remaining new/dirty/detached descriptions should be properly processed for preferredDescriptions field
final Map<String, SnomedDescriptionIndexEntry> affectedDescriptionsById = getDescriptionDocuments(staging, searcher);
final Multimap<String, SnomedDescriptionIndexEntry> affectedDescriptionsByConcept = Multimaps.index(affectedDescriptionsById.values(), SnomedDescriptionIndexEntry::getConceptId);
dirtyConceptIds.addAll(affectedDescriptionsByConcept.keySet());
// remove all new/detached concept IDs, we've already processed them
staging.getRemovedObjects(SnomedConceptDocument.class).map(SnomedConceptDocument::getId).forEach(dirtyConceptIds::remove);
staging.getNewObjects(SnomedConceptDocument.class).map(SnomedConceptDocument::getId).forEach(dirtyConceptIds::remove);
if (!dirtyConceptIds.isEmpty()) {
final Map<ObjectId, RevisionDiff> changedRevisions = staging.getChangedRevisions();
// fetch all dirty concept documents by their ID
final Set<String> missingCurrentConceptIds = dirtyConceptIds.stream().filter(id -> !changedRevisions.containsKey(ObjectId.of(SnomedConcept.TYPE, id))).collect(Collectors.toSet());
final Map<String, SnomedConceptDocument> currentConceptDocumentsById = newHashMap(Maps.uniqueIndex(searcher.get(SnomedConceptDocument.class, missingCurrentConceptIds), Revision::getId));
dirtyConceptIds.stream().map(id -> ObjectId.of(SnomedConcept.TYPE, id)).filter(changedRevisions::containsKey).map(changedRevisions::get).map(diff -> (SnomedConceptDocument) diff.oldRevision).forEach(doc -> currentConceptDocumentsById.put(doc.getId(), doc));
// update dirty concepts
for (final String id : dirtyConceptIds) {
final SnomedConceptDocument concept = dirtyConceptDiffsById.containsKey(id) ? (SnomedConceptDocument) dirtyConceptDiffsById.get(id).newRevision : null;
final SnomedConceptDocument currentDoc = currentConceptDocumentsById.get(id);
if (currentDoc == null) {
throw new IllegalStateException("Current concept revision should not be null for: " + id);
}
final Builder doc = SnomedConceptDocument.builder(currentDoc);
final Collection<SnomedDescriptionIndexEntry> affectedDescriptions = affectedDescriptionsByConcept.get(id);
if (!affectedDescriptions.isEmpty()) {
final Map<String, SnomedDescriptionFragment> updatedPreferredDescriptions = newHashMap(Maps.uniqueIndex(currentDoc.getPreferredDescriptions(), SnomedDescriptionFragment::getId));
// add new/dirty fragments if they are preferred and active terms
for (SnomedDescriptionIndexEntry affectedDescription : affectedDescriptions) {
if (staging.isNew(affectedDescription) || staging.isChanged(affectedDescription)) {
updatedPreferredDescriptions.remove(affectedDescription.getId());
if (affectedDescription.isActive() && !getPreferredLanguageMembers(affectedDescription).isEmpty()) {
updatedPreferredDescriptions.put(affectedDescription.getId(), toDescriptionFragment(affectedDescription));
}
}
}
// remove deleted descriptions
for (SnomedDescriptionIndexEntry affectedDescription : affectedDescriptions) {
if (staging.isRemoved(affectedDescription)) {
updatedPreferredDescriptions.remove(affectedDescription.getId());
}
}
final List<SnomedDescriptionFragment> preferredDescriptions = updatedPreferredDescriptions.values().stream().sorted(DESCRIPTION_FRAGMENT_ORDER).collect(Collectors.toList());
update(doc, preferredDescriptions, concept, currentDoc);
} else {
update(doc, currentDoc.getPreferredDescriptions(), concept, currentDoc);
}
stageChange(currentDoc, doc.build());
}
}
}
use of com.b2international.index.revision.RevisionSearcher in project snow-owl by b2ihealthcare.
the class DescriptionChangeProcessor method process.
@Override
public void process(StagingArea staging, RevisionSearcher searcher) throws IOException {
final Map<String, Multimap<Acceptability, RefSetMemberChange>> acceptabilityChangesByDescription = new DescriptionAcceptabilityChangeProcessor().process(staging, searcher);
final Multimap<String, RefSetMemberChange> referringRefSets = HashMultimap.create(memberChangeProcessor.process(staging, searcher));
// (re)index new and dirty descriptions
final Map<String, SnomedDescriptionIndexEntry> newDescriptionsById = staging.getNewObjects(SnomedDescriptionIndexEntry.class).collect(Collectors.toMap(description -> description.getId(), description -> description));
final Map<String, SnomedDescriptionIndexEntry> changedDescriptionsById = staging.getChangedRevisions(SnomedDescriptionIndexEntry.class).collect(Collectors.toMap(diff -> diff.newRevision.getId(), diff -> (SnomedDescriptionIndexEntry) diff.newRevision));
final Set<String> changedDescriptionIds = newHashSet(changedDescriptionsById.keySet());
final Set<String> referencedDescriptionIds = newHashSet(referringRefSets.keySet());
referencedDescriptionIds.removeAll(newDescriptionsById.keySet());
changedDescriptionIds.addAll(referencedDescriptionIds);
// load the known descriptions
final Iterable<SnomedDescriptionIndexEntry> changedDescriptionHits = searcher.get(SnomedDescriptionIndexEntry.class, changedDescriptionIds);
final Map<String, SnomedDescriptionIndexEntry> changedDescriptionRevisionsById = Maps.uniqueIndex(changedDescriptionHits, Revision::getId);
// load missing descriptions with only changed acceptability values
final Set<String> descriptionsToBeLoaded = newHashSet();
for (String descriptionWithAccepatibilityChange : acceptabilityChangesByDescription.keySet()) {
if (!newDescriptionsById.containsKey(descriptionWithAccepatibilityChange) && !changedDescriptionIds.contains(descriptionWithAccepatibilityChange)) {
descriptionsToBeLoaded.add(descriptionWithAccepatibilityChange);
}
}
// process changes
for (final String id : Iterables.concat(newDescriptionsById.keySet(), changedDescriptionIds)) {
if (newDescriptionsById.containsKey(id)) {
final SnomedDescriptionIndexEntry description = newDescriptionsById.get(id);
final Builder doc = SnomedDescriptionIndexEntry.builder(description);
processChanges(id, doc, null, acceptabilityChangesByDescription.get(id), referringRefSets);
stageNew(doc.build());
} else if (changedDescriptionIds.contains(id)) {
final SnomedDescriptionIndexEntry currentDoc = changedDescriptionRevisionsById.get(id);
if (currentDoc == null) {
throw new IllegalStateException(String.format("Current description revision should not be null for: %s", id));
}
final SnomedDescriptionIndexEntry description = changedDescriptionsById.get(id);
final Builder doc;
if (description != null) {
doc = SnomedDescriptionIndexEntry.builder(description);
} else {
doc = SnomedDescriptionIndexEntry.builder(currentDoc);
}
processChanges(id, doc, currentDoc, acceptabilityChangesByDescription.get(id), referringRefSets);
stageChange(currentDoc, doc.build());
} else {
throw new IllegalStateException(String.format("Description %s is missing from new and dirty maps", id));
}
}
// process cascading acceptability changes in unchanged docs
if (!descriptionsToBeLoaded.isEmpty()) {
for (SnomedDescriptionIndexEntry unchangedDescription : searcher.get(SnomedDescriptionIndexEntry.class, descriptionsToBeLoaded)) {
final Builder doc = SnomedDescriptionIndexEntry.builder(unchangedDescription);
processChanges(unchangedDescription.getId(), doc, unchangedDescription, acceptabilityChangesByDescription.get(unchangedDescription.getId()), HashMultimap.<String, RefSetMemberChange>create());
stageChange(unchangedDescription, doc.build());
}
}
}
use of com.b2international.index.revision.RevisionSearcher in project snow-owl by b2ihealthcare.
the class ReferringMemberChangeProcessor method process.
public Multimap<String, RefSetMemberChange> process(StagingArea staging, RevisionSearcher searcher) throws IOException {
final Multimap<String, RefSetMemberChange> memberChanges = HashMultimap.create();
// process new members
staging.getNewObjects(SnomedRefSetMemberIndexEntry.class).filter(this::byReferencedComponentType).forEach((newMember) -> {
addChange(memberChanges, newMember, MemberChangeKind.ADDED);
});
// process dirty members
staging.getChangedRevisions(SnomedRefSetMemberIndexEntry.class).filter(diff -> byReferencedComponentType((SnomedRefSetMemberIndexEntry) diff.newRevision)).forEach((diff) -> {
RevisionPropertyDiff propChange = diff.getRevisionPropertyDiff(SnomedRefSetMemberIndexEntry.Fields.ACTIVE);
if (propChange != null) {
addChange(memberChanges, (SnomedRefSetMemberIndexEntry) diff.newRevision, MemberChangeKind.CHANGED);
}
});
// process detached members
staging.getRemovedObjects(SnomedRefSetMemberIndexEntry.class).filter(this::byReferencedComponentType).forEach(doc -> {
final String uuid = doc.getId();
final String referencedComponentId = doc.getReferencedComponentId();
final String refSetId = doc.getRefsetId();
memberChanges.put(referencedComponentId, new RefSetMemberChange(uuid, refSetId, MemberChangeKind.REMOVED, doc.isActive()));
});
return memberChanges;
}
Aggregations