use of com.b2international.collections.longs.LongIterator in project snow-owl by b2ihealthcare.
the class Taxonomies method buildTaxonomy.
private static Taxonomy buildTaxonomy(RevisionSearcher searcher, SnomedOWLExpressionConverter expressionConverter, StagingArea staging, LongCollection conceptIds, String characteristicTypeId, boolean checkCycles) {
try {
Collection<Object[]> isaStatements = getStatements(searcher, conceptIds, characteristicTypeId, true);
final TaxonomyGraph oldTaxonomy = new TaxonomyGraph(conceptIds.size(), isaStatements.size());
oldTaxonomy.setCheckCycles(checkCycles);
final TaxonomyGraph newTaxonomy = new TaxonomyGraph(conceptIds.size(), isaStatements.size());
newTaxonomy.setCheckCycles(checkCycles);
// populate nodes
LongIterator conceptIdsIt = conceptIds.iterator();
while (conceptIdsIt.hasNext()) {
long nodeId = conceptIdsIt.next();
if (IComponent.ROOT_IDL == nodeId) {
continue;
}
oldTaxonomy.addNode(nodeId);
newTaxonomy.addNode(nodeId);
}
// populate edges
for (Object[] isaStatement : isaStatements) {
oldTaxonomy.addEdge((String) isaStatement[0], (long) isaStatement[1], (long[]) isaStatement[2]);
newTaxonomy.addEdge((String) isaStatement[0], (long) isaStatement[1], (long[]) isaStatement[2]);
}
isaStatements = null;
oldTaxonomy.update();
final TaxonomyGraphStatus status = updateTaxonomy(searcher, expressionConverter, staging, newTaxonomy, characteristicTypeId);
final Set<String> newKeys = newTaxonomy.getEdgeIds();
final Set<String> oldKeys = oldTaxonomy.getEdgeIds();
// new edges
final Set<String> newEdges = Sets.difference(newKeys, oldKeys);
// changed edges
final Set<String> changedEdges = Sets.newHashSet();
for (String nextEdge : Sets.intersection(newKeys, oldKeys)) {
Edges oldValue = oldTaxonomy.getEdge(nextEdge);
Edges newValue = newTaxonomy.getEdge(nextEdge);
if (!oldValue.equals(newValue)) {
changedEdges.add(nextEdge);
}
}
// detached edges
final Set<String> detachedEdges = Sets.difference(oldKeys, newKeys);
return new Taxonomy(newTaxonomy, oldTaxonomy, status, newEdges, changedEdges, detachedEdges);
} catch (IOException e) {
throw new SnowowlRuntimeException(e);
}
}
use of com.b2international.collections.longs.LongIterator in project snow-owl by b2ihealthcare.
the class NormalFormGenerator method computeChanges.
@Override
public final void computeChanges(final IProgressMonitor monitor, final OntologyChangeProcessor<StatementFragment> statementProcessor, final OntologyChangeProcessor<ConcreteDomainFragment> concreteDomainProcessor) {
final Stopwatch stopwatch = Stopwatch.createStarted();
LOGGER.info(">>> Distribution normal form generation");
final LongList entries = reasonerTaxonomy.getIterationOrder();
final SubMonitor subMonitor = SubMonitor.convert(monitor, "Generating distribution normal form...", entries.size() * 2);
try {
LongSet previousLayer = null;
LongSet currentLayer = PrimitiveSets.newLongOpenHashSet();
final Set<Long> graphTypeIds = reasonerTaxonomy.getPropertyChains().stream().map(PropertyChain::getDestinationType).collect(Collectors.toSet());
// The first round can be skipped entirely, if no type IDs participate in a property chain
final boolean propertyChainsPresent = !graphTypeIds.isEmpty();
if (propertyChainsPresent) {
// Initialize node graphs for properties we need to traverse
LOGGER.info("--- Initializing node graphs for types: {}", graphTypeIds);
graphTypeIds.forEach(id -> transitiveNodeGraphs.put(id, new NodeGraph()));
// Round 1: build alternative hierarchies
for (final LongIterator itr = entries.iterator(); itr.hasNext(); ) /* empty */
{
final long conceptId = itr.next();
if (conceptId == ReasonerTaxonomyInferrer.DEPTH_CHANGE) {
if (previousLayer != null) {
invalidate(previousLayer);
}
previousLayer = currentLayer;
currentLayer = PrimitiveSets.newLongOpenHashSet();
continue;
}
precomputeProperties(conceptId, false);
final Collection<StatementFragment> inferredNonIsAFragments = statementCache.get(conceptId);
inferredNonIsAFragments.stream().filter(r -> transitiveNodeGraphs.keySet().contains(r.getTypeId())).filter(StatementFragmentWithDestination.class::isInstance).map(StatementFragmentWithDestination.class::cast).forEachOrdered(r -> transitiveNodeGraphs.get(r.getTypeId()).addParent(conceptId, r.getDestinationId()));
}
// Clear the last layer of concepts
previousLayer = null;
currentLayer = PrimitiveSets.newLongOpenHashSet();
statementCache.clear();
concreteDomainCache.clear();
} else {
LOGGER.info("--- Node graphs computation skipped, no types used for property chaining");
}
LOGGER.info("--- Use node graphs for hierarchy computation");
// Round 2: record changes using the hierarchies
for (final LongIterator itr = entries.iterator(); itr.hasNext(); ) /* empty */
{
final long conceptId = itr.next();
if (conceptId == ReasonerTaxonomyInferrer.DEPTH_CHANGE) {
if (previousLayer != null) {
invalidate(previousLayer);
}
previousLayer = currentLayer;
currentLayer = PrimitiveSets.newLongOpenHashSet();
continue;
}
// Run costly comparison of property chain hierarchies only if there are any
precomputeProperties(conceptId, propertyChainsPresent);
final Collection<StatementFragment> existingStatements = reasonerTaxonomy.getExistingInferredRelationships().get(conceptId);
final Collection<StatementFragment> targetStatements = getTargetRelationships(conceptId);
statementProcessor.apply(conceptId, existingStatements, targetStatements, StatementFragmentOrdering.INSTANCE, subMonitor.newChild(1));
final Collection<ConcreteDomainFragment> existingMembers = reasonerTaxonomy.getInferredConcreteDomainMembers().get(Long.toString(conceptId));
final Collection<ConcreteDomainFragment> targetMembers = getTargetMembers(conceptId);
concreteDomainProcessor.apply(conceptId, existingMembers, targetMembers, ConcreteDomainChangeOrdering.INSTANCE, subMonitor.newChild(1));
}
} finally {
subMonitor.done();
LOGGER.info("<<< Distribution normal form generation [{}]", stopwatch.toString());
}
}
use of com.b2international.collections.longs.LongIterator in project snow-owl by b2ihealthcare.
the class NormalFormGenerator method precomputeProperties.
private void precomputeProperties(final long conceptId, final boolean useNodeGraphs) {
final LongSet parentIds = reasonerTaxonomy.getInferredAncestors().getDestinations(conceptId, true);
/*
* Non IS-A relationships are fetched from ancestors; redundancy must be removed. Since we are working through the list
* of concepts in breadth-first order, we only need to look at cached results from the direct parents, and "distill"
* a non-redundant set of components out of them.
*/
final LongKeyMap<Collection<StatementFragment>> candidateNonIsARelationships = PrimitiveMaps.newLongKeyOpenHashMap();
for (final LongIterator itr = parentIds.iterator(); itr.hasNext(); ) /* empty */
{
final long parentId = itr.next();
candidateNonIsARelationships.put(parentId, statementCache.get(parentId));
}
// Stated axiom fragments are non-IS A, but any stated relationships need to be filtered (if they are still present)
final Collection<StatementFragment> ownStatedRelationships = reasonerTaxonomy.getStatedRelationships().get(conceptId);
final Collection<StatementFragment> ownStatedNonIsaRelationships = ownStatedRelationships.stream().filter(r -> r.getTypeId() != IS_A).collect(Collectors.toList());
candidateNonIsARelationships.put(conceptId, ImmutableList.<StatementFragment>builder().addAll(ownStatedNonIsaRelationships).addAll(reasonerTaxonomy.getAxiomNonIsARelationships().get(conceptId)).addAll(reasonerTaxonomy.getAdditionalGroupedRelationships().get(conceptId)).build());
// Collect existing inferred relationships for cross-referencing group numbers
final Collection<StatementFragment> ownInferredRelationships = reasonerTaxonomy.getExistingInferredRelationships().get(conceptId);
final Collection<StatementFragment> ownInferredNonIsaRelationships = ownInferredRelationships.stream().filter(r -> r.getTypeId() != IS_A).collect(Collectors.toList());
/*
* Do the same as the above, but for CD members
*/
final LongKeyMap<Collection<ConcreteDomainFragment>> candidateMembers = PrimitiveMaps.newLongKeyOpenHashMap();
for (final LongIterator itr = parentIds.iterator(); itr.hasNext(); ) /* empty */
{
final long parentId = itr.next();
candidateMembers.put(parentId, concreteDomainCache.get(parentId));
}
final String referencedComponentId = Long.toString(conceptId);
final Collection<ConcreteDomainFragment> ownStatedMembers = reasonerTaxonomy.getStatedConcreteDomainMembers().get(referencedComponentId);
final Collection<ConcreteDomainFragment> ownAdditionalGroupedMembers = reasonerTaxonomy.getAdditionalGroupedConcreteDomainMembers().get(referencedComponentId);
candidateMembers.put(conceptId, ImmutableList.<ConcreteDomainFragment>builder().addAll(ownStatedMembers).addAll(ownAdditionalGroupedMembers).build());
final Collection<ConcreteDomainFragment> ownInferredMembers = reasonerTaxonomy.getInferredConcreteDomainMembers().get(referencedComponentId);
// Remove redundancy
final NormalFormGroupSet targetGroupSet = getTargetGroupSet(conceptId, parentIds, ownInferredNonIsaRelationships, ownInferredMembers, candidateNonIsARelationships, candidateMembers, useNodeGraphs);
// Extract results; place them in the cache, so following concepts can re-use it
statementCache.put(conceptId, ImmutableList.copyOf(relationshipsFromGroupSet(targetGroupSet)));
concreteDomainCache.put(conceptId, ImmutableList.copyOf(membersFromGroupSet(targetGroupSet)));
}
use of com.b2international.collections.longs.LongIterator in project snow-owl by b2ihealthcare.
the class NormalFormGenerator method getTargetGroupSet.
private NormalFormGroupSet getTargetGroupSet(final long conceptId, final LongSet parentIds, final Collection<StatementFragment> existingInferredNonIsAFragments, final Collection<ConcreteDomainFragment> existingInferredMembers, final LongKeyMap<Collection<StatementFragment>> candidateNonIsAFragments, final LongKeyMap<Collection<ConcreteDomainFragment>> candidateMembers, final boolean useNodeGraphs) {
// Index existing inferred properties into a GroupSet (without redundancy check)
final NormalFormGroupSet existingGroupSet = new NormalFormGroupSet();
final Iterable<NormalFormGroup> existingGroups = toGroups(true, existingInferredNonIsAFragments, existingInferredMembers, useNodeGraphs);
for (final NormalFormGroup ownInferredGroup : existingGroups) {
existingGroupSet.addUnique(ownInferredGroup);
}
// Eliminate redundancy between candidate target properties in another GroupSet
final NormalFormGroupSet targetGroupSet = new NormalFormGroupSet();
for (final LongIterator itr = parentIds.iterator(); itr.hasNext(); ) /* empty */
{
final long parentId = itr.next();
final Iterable<NormalFormGroup> otherGroups = toGroups(false, candidateNonIsAFragments.get(parentId), candidateMembers.get(parentId), useNodeGraphs);
Iterables.addAll(targetGroupSet, otherGroups);
}
// Finally, add the (stated) information from the concept itself
final Iterable<NormalFormGroup> ownGroups = toGroups(false, candidateNonIsAFragments.get(conceptId), candidateMembers.get(conceptId), useNodeGraphs);
Iterables.addAll(targetGroupSet, ownGroups);
// Shuffle around group numbers to match existing inferred group numbers as much as possible
targetGroupSet.adjustOrder(existingGroupSet);
// Populate the group number for remaining groups
targetGroupSet.fillNumbers();
return targetGroupSet;
}
use of com.b2international.collections.longs.LongIterator in project snow-owl by b2ihealthcare.
the class NormalFormRelationship method hasCommonExhaustiveSuperType.
private boolean hasCommonExhaustiveSuperType(final NormalFormRelationship other) {
final InternalIdEdges inferredAncestors = reasonerTaxonomy.getInferredAncestors();
final LongSet valueAncestors = inferredAncestors.getDestinations(getDestinationId(), false);
final LongSet otherValueAncestors = inferredAncestors.getDestinations(other.getDestinationId(), false);
final LongSet commonAncestors = LongSets.intersection(valueAncestors, otherValueAncestors);
for (final LongIterator itr = commonAncestors.iterator(); itr.hasNext(); ) /* empty */
{
final long commonAncestor = itr.next();
if (isExhaustive(commonAncestor)) {
return true;
}
}
return false;
}
Aggregations