use of com.b2international.snowowl.snomed.datastore.index.entry.SnomedDescriptionIndexEntry in project snow-owl by b2ihealthcare.
the class ConceptChangeProcessor method processNewConcepts.
private void processNewConcepts(StagingArea staging) {
final Multimap<String, SnomedDescriptionFragment> newDescriptionFragmentsByConcept = HashMultimap.create();
// changed descriptions are coming from potential merges/rebases
Streams.concat(staging.getNewObjects(SnomedDescriptionIndexEntry.class), staging.getChangedObjects(SnomedDescriptionIndexEntry.class)).filter(SnomedDescriptionIndexEntry::isActive).filter(description -> !Concepts.TEXT_DEFINITION.equals(description.getTypeId())).filter(description -> !getPreferredLanguageMembers(description).isEmpty()).forEach(description -> newDescriptionFragmentsByConcept.put(description.getConceptId(), toDescriptionFragment(description)));
// index new concepts
staging.getNewObjects(SnomedConceptDocument.class).forEach(concept -> {
final String id = concept.getId();
final Builder doc = SnomedConceptDocument.builder().id(id);
// in case of a new concept, all of its descriptions should be part of the staging area as well
final List<SnomedDescriptionFragment> preferredDescriptions = newDescriptionFragmentsByConcept.removeAll(id).stream().sorted(DESCRIPTION_FRAGMENT_ORDER).collect(Collectors.toList());
update(doc, preferredDescriptions, concept, null);
stageNew(doc.build());
});
}
use of com.b2international.snowowl.snomed.datastore.index.entry.SnomedDescriptionIndexEntry in project snow-owl by b2ihealthcare.
the class ConceptChangeProcessor method process.
@Override
public void process(StagingArea staging, RevisionSearcher searcher) throws IOException {
// collect member changes
this.referringRefSets = HashMultimap.create(memberChangeProcessor.process(staging, searcher));
processNewConcepts(staging);
// collect dirty concepts that require additional properties to be set for index
final Map<String, RevisionDiff> dirtyConceptDiffsById = Maps.uniqueIndex(staging.getChangedRevisions(SnomedConceptDocument.class).iterator(), diff -> diff.newRevision.getId());
final Set<String> dirtyConceptIds = collectDirtyConceptIds(staging);
// remaining new/dirty/detached descriptions should be properly processed for preferredDescriptions field
final Map<String, SnomedDescriptionIndexEntry> affectedDescriptionsById = getDescriptionDocuments(staging, searcher);
final Multimap<String, SnomedDescriptionIndexEntry> affectedDescriptionsByConcept = Multimaps.index(affectedDescriptionsById.values(), SnomedDescriptionIndexEntry::getConceptId);
dirtyConceptIds.addAll(affectedDescriptionsByConcept.keySet());
// remove all new/detached concept IDs, we've already processed them
staging.getRemovedObjects(SnomedConceptDocument.class).map(SnomedConceptDocument::getId).forEach(dirtyConceptIds::remove);
staging.getNewObjects(SnomedConceptDocument.class).map(SnomedConceptDocument::getId).forEach(dirtyConceptIds::remove);
if (!dirtyConceptIds.isEmpty()) {
final Map<ObjectId, RevisionDiff> changedRevisions = staging.getChangedRevisions();
// fetch all dirty concept documents by their ID
final Set<String> missingCurrentConceptIds = dirtyConceptIds.stream().filter(id -> !changedRevisions.containsKey(ObjectId.of(SnomedConcept.TYPE, id))).collect(Collectors.toSet());
final Map<String, SnomedConceptDocument> currentConceptDocumentsById = newHashMap(Maps.uniqueIndex(searcher.get(SnomedConceptDocument.class, missingCurrentConceptIds), Revision::getId));
dirtyConceptIds.stream().map(id -> ObjectId.of(SnomedConcept.TYPE, id)).filter(changedRevisions::containsKey).map(changedRevisions::get).map(diff -> (SnomedConceptDocument) diff.oldRevision).forEach(doc -> currentConceptDocumentsById.put(doc.getId(), doc));
// update dirty concepts
for (final String id : dirtyConceptIds) {
final SnomedConceptDocument concept = dirtyConceptDiffsById.containsKey(id) ? (SnomedConceptDocument) dirtyConceptDiffsById.get(id).newRevision : null;
final SnomedConceptDocument currentDoc = currentConceptDocumentsById.get(id);
if (currentDoc == null) {
throw new IllegalStateException("Current concept revision should not be null for: " + id);
}
final Builder doc = SnomedConceptDocument.builder(currentDoc);
final Collection<SnomedDescriptionIndexEntry> affectedDescriptions = affectedDescriptionsByConcept.get(id);
if (!affectedDescriptions.isEmpty()) {
final Map<String, SnomedDescriptionFragment> updatedPreferredDescriptions = newHashMap(Maps.uniqueIndex(currentDoc.getPreferredDescriptions(), SnomedDescriptionFragment::getId));
// add new/dirty fragments if they are preferred and active terms
for (SnomedDescriptionIndexEntry affectedDescription : affectedDescriptions) {
if (staging.isNew(affectedDescription) || staging.isChanged(affectedDescription)) {
updatedPreferredDescriptions.remove(affectedDescription.getId());
if (affectedDescription.isActive() && !getPreferredLanguageMembers(affectedDescription).isEmpty()) {
updatedPreferredDescriptions.put(affectedDescription.getId(), toDescriptionFragment(affectedDescription));
}
}
}
// remove deleted descriptions
for (SnomedDescriptionIndexEntry affectedDescription : affectedDescriptions) {
if (staging.isRemoved(affectedDescription)) {
updatedPreferredDescriptions.remove(affectedDescription.getId());
}
}
final List<SnomedDescriptionFragment> preferredDescriptions = updatedPreferredDescriptions.values().stream().sorted(DESCRIPTION_FRAGMENT_ORDER).collect(Collectors.toList());
update(doc, preferredDescriptions, concept, currentDoc);
} else {
update(doc, currentDoc.getPreferredDescriptions(), concept, currentDoc);
}
stageChange(currentDoc, doc.build());
}
}
}
use of com.b2international.snowowl.snomed.datastore.index.entry.SnomedDescriptionIndexEntry in project snow-owl by b2ihealthcare.
the class DescriptionChangeProcessor method process.
@Override
public void process(StagingArea staging, RevisionSearcher searcher) throws IOException {
final Map<String, Multimap<Acceptability, RefSetMemberChange>> acceptabilityChangesByDescription = new DescriptionAcceptabilityChangeProcessor().process(staging, searcher);
final Multimap<String, RefSetMemberChange> referringRefSets = HashMultimap.create(memberChangeProcessor.process(staging, searcher));
// (re)index new and dirty descriptions
final Map<String, SnomedDescriptionIndexEntry> newDescriptionsById = staging.getNewObjects(SnomedDescriptionIndexEntry.class).collect(Collectors.toMap(description -> description.getId(), description -> description));
final Map<String, SnomedDescriptionIndexEntry> changedDescriptionsById = staging.getChangedRevisions(SnomedDescriptionIndexEntry.class).collect(Collectors.toMap(diff -> diff.newRevision.getId(), diff -> (SnomedDescriptionIndexEntry) diff.newRevision));
final Set<String> changedDescriptionIds = newHashSet(changedDescriptionsById.keySet());
final Set<String> referencedDescriptionIds = newHashSet(referringRefSets.keySet());
referencedDescriptionIds.removeAll(newDescriptionsById.keySet());
changedDescriptionIds.addAll(referencedDescriptionIds);
// load the known descriptions
final Iterable<SnomedDescriptionIndexEntry> changedDescriptionHits = searcher.get(SnomedDescriptionIndexEntry.class, changedDescriptionIds);
final Map<String, SnomedDescriptionIndexEntry> changedDescriptionRevisionsById = Maps.uniqueIndex(changedDescriptionHits, Revision::getId);
// load missing descriptions with only changed acceptability values
final Set<String> descriptionsToBeLoaded = newHashSet();
for (String descriptionWithAccepatibilityChange : acceptabilityChangesByDescription.keySet()) {
if (!newDescriptionsById.containsKey(descriptionWithAccepatibilityChange) && !changedDescriptionIds.contains(descriptionWithAccepatibilityChange)) {
descriptionsToBeLoaded.add(descriptionWithAccepatibilityChange);
}
}
// process changes
for (final String id : Iterables.concat(newDescriptionsById.keySet(), changedDescriptionIds)) {
if (newDescriptionsById.containsKey(id)) {
final SnomedDescriptionIndexEntry description = newDescriptionsById.get(id);
final Builder doc = SnomedDescriptionIndexEntry.builder(description);
processChanges(id, doc, null, acceptabilityChangesByDescription.get(id), referringRefSets);
stageNew(doc.build());
} else if (changedDescriptionIds.contains(id)) {
final SnomedDescriptionIndexEntry currentDoc = changedDescriptionRevisionsById.get(id);
if (currentDoc == null) {
throw new IllegalStateException(String.format("Current description revision should not be null for: %s", id));
}
final SnomedDescriptionIndexEntry description = changedDescriptionsById.get(id);
final Builder doc;
if (description != null) {
doc = SnomedDescriptionIndexEntry.builder(description);
} else {
doc = SnomedDescriptionIndexEntry.builder(currentDoc);
}
processChanges(id, doc, currentDoc, acceptabilityChangesByDescription.get(id), referringRefSets);
stageChange(currentDoc, doc.build());
} else {
throw new IllegalStateException(String.format("Description %s is missing from new and dirty maps", id));
}
}
// process cascading acceptability changes in unchanged docs
if (!descriptionsToBeLoaded.isEmpty()) {
for (SnomedDescriptionIndexEntry unchangedDescription : searcher.get(SnomedDescriptionIndexEntry.class, descriptionsToBeLoaded)) {
final Builder doc = SnomedDescriptionIndexEntry.builder(unchangedDescription);
processChanges(unchangedDescription.getId(), doc, unchangedDescription, acceptabilityChangesByDescription.get(unchangedDescription.getId()), HashMultimap.<String, RefSetMemberChange>create());
stageChange(unchangedDescription, doc.build());
}
}
}
Aggregations