use of org.snomed.snowstorm.core.util.TimerUtil in project snowstorm by IHTSDO.
the class MRCMLoader method loadActiveMRCMFromCache.
/**
* Retrieve the latest MRCM for the given branch. If the MRCM has been read
* for the given branch, then the data is read from an internal cache. The cache will
* subsequently be cleared whenever the MRCM is updated.
*
* @param branchPath The branch to read MRCM data from.
* @return The MRCM for the given branch.
* @throws ServiceException When there is an issue reading MRCM.
*/
public MRCM loadActiveMRCMFromCache(String branchPath) throws ServiceException {
LOGGER.debug("Checking cache for MRCM.");
final MRCM cachedMRCM = cache.get(branchPath);
if (cachedMRCM != null) {
LOGGER.debug("MRCM present in cache.");
return cachedMRCM;
}
LOGGER.debug("MRCM not present in cache; loading MRCM.");
final BranchCriteria branchCriteria = versionControlHelper.getBranchCriteria(branchPath);
final TimerUtil timer = new TimerUtil("MRCM");
final List<Domain> domains = getDomains(branchPath, branchCriteria, timer);
final List<AttributeDomain> attributeDomains = getAttributeDomains(branchPath, branchCriteria, timer);
final List<AttributeRange> attributeRanges = getAttributeRanges(branchPath, branchCriteria, timer);
final MRCM mrcm = new MRCM(domains, attributeDomains, attributeRanges);
cache.putIfAbsent(branchPath, mrcm);
return mrcm;
}
use of org.snomed.snowstorm.core.util.TimerUtil in project snowstorm by IHTSDO.
the class MRCMService method retrieveConceptModelAttributeHierarchy.
public ConceptMini retrieveConceptModelAttributeHierarchy(String branch, List<LanguageDialect> languageDialects) {
logger.info("Loading concept model attribute hierarchy.");
TimerUtil timer = new TimerUtil("attribute-tree", Level.INFO);
String topId = Concepts.CONCEPT_MODEL_ATTRIBUTE;
long topIdLong = parseLong(topId);
// Load all attributes including terms
List<ConceptMini> allAttributes = ecl("<<" + topId, branch, languageDialects);
timer.checkpoint("load all with terms");
Map<Long, ConceptMini> attributeMap = allAttributes.stream().collect(Collectors.toMap(ConceptMini::getConceptIdAsLong, Function.identity()));
if (!attributeMap.containsKey(topIdLong)) {
throw new IllegalStateException("Concept not found: " + topId + " | Concept model attribute (attribute) |.");
}
Set<Long> remainingAttributes = new HashSet<>(attributeMap.keySet());
remainingAttributes.remove(topIdLong);
BranchCriteria branchCriteria = versionControlHelper.getBranchCriteria(branch);
NativeSearchQueryBuilder queryConceptQuery = new NativeSearchQueryBuilder().withQuery(boolQuery().must(branchCriteria.getEntityBranchCriteria(QueryConcept.class)).must(termQuery(QueryConcept.Fields.STATED, false)).filter(termsQuery(QueryConcept.Fields.CONCEPT_ID, remainingAttributes))).withFields(QueryConcept.Fields.CONCEPT_ID, QueryConcept.Fields.PARENTS).withPageable(LARGE_PAGE);
try (SearchHitsIterator<QueryConcept> queryConcepts = elasticsearchTemplate.searchForStream(queryConceptQuery.build(), QueryConcept.class)) {
queryConcepts.forEachRemaining(hit -> {
for (Long parent : hit.getContent().getParents()) {
ConceptMini parentMini = attributeMap.get(parent);
if (parentMini.getExtraFields() == null || parentMini.getExtraFields().get(CHILDREN) == null) {
parentMini.addExtraField(CHILDREN, new ArrayList<>());
}
@SuppressWarnings("unchecked") List<ConceptMini> children = (List<ConceptMini>) parentMini.getExtraFields().get(CHILDREN);
children.add(attributeMap.get(hit.getContent().getConceptIdL()));
children.sort(Comparator.comparing(ConceptMini::getFsnTerm));
}
});
}
timer.finish();
return attributeMap.get(topIdLong);
}
use of org.snomed.snowstorm.core.util.TimerUtil in project snowstorm by IHTSDO.
the class ECLQueryService method doSelectConceptIds.
public Page<Long> doSelectConceptIds(String ecl, BranchCriteria branchCriteria, String path, boolean stated, Collection<Long> conceptIdFilter, PageRequest pageRequest, SExpressionConstraint expressionConstraint) {
TimerUtil eclSlowQueryTimer = getEclSlowQueryTimer();
if (expressionConstraint == null) {
expressionConstraint = (SExpressionConstraint) eclQueryBuilder.createQuery(ecl);
}
// - Optimisation idea -
// Changing something like "(id) AND (<<id OR >>id)" to "(id AND <<id) OR (id AND >>id)" will run in a fraction of the time because there will be no large fetches
Optional<Page<Long>> pageOptional;
if (eclCacheEnabled) {
BranchVersionECLCache branchVersionCache = resultsCache.getOrCreateBranchVersionCache(path, branchCriteria.getTimepoint());
PageRequest queryPageRequest = pageRequest;
LongPredicate filter = null;
if (conceptIdFilter != null) {
// Fetch all, without conceptIdFilter or paging. Apply filter and paging afterwards.
// This may be expensive, but it's the only way to allow the cache to help with this sort of query.
queryPageRequest = null;
final LongOpenHashSet fastSet = new LongOpenHashSet(conceptIdFilter);
filter = fastSet::contains;
}
Page<Long> cachedPage = branchVersionCache.get(ecl, stated, queryPageRequest);
if (cachedPage != null) {
final int pageNumber = pageRequest != null ? pageRequest.getPageNumber() : 0;
final int pageSize = pageRequest != null ? pageRequest.getPageSize() : -1;
logger.debug("ECL cache hit {}@{} \"{}\" {}:{}", path, branchCriteria.getTimepoint().getTime(), ecl, pageNumber, pageSize);
branchVersionCache.recordHit();
pageOptional = Optional.of(cachedPage);
} else {
pageOptional = expressionConstraint.select(path, branchCriteria, stated, null, queryPageRequest, queryService);
if (pageOptional.isPresent()) {
// Cache results
final Page<Long> page = pageOptional.get();
branchVersionCache.put(ecl, stated, queryPageRequest, page);
eclSlowQueryTimer.checkpoint(String.format("ecl:'%s', with %s results in this page, now cached for this branch/commit/page.", ecl, pageOptional.get().getNumberOfElements()));
}
}
if (pageOptional.isPresent()) {
// Filter results
if (filter != null) {
final List<Long> filteredList = pageOptional.get().get().filter(filter::test).collect(Collectors.toList());
pageOptional = Optional.of(ConceptSelectorHelper.getPage(pageRequest, filteredList));
}
}
} else {
pageOptional = expressionConstraint.select(path, branchCriteria, stated, conceptIdFilter, pageRequest, queryService);
pageOptional.ifPresent(conceptIds -> eclSlowQueryTimer.checkpoint(String.format("ecl:'%s', with %s results in this page, cache not enabled.", ecl, conceptIds.getNumberOfElements())));
}
if (pageOptional.isEmpty()) {
return getWildcardPage(branchCriteria, stated, conceptIdFilter, pageRequest);
}
return pageOptional.get();
}
use of org.snomed.snowstorm.core.util.TimerUtil in project snowstorm by IHTSDO.
the class ExpressionRepositoryService method parseValidateAndTransformExpression.
public PostCoordinatedExpression parseValidateAndTransformExpression(String branch, String closeToUserForm) throws ServiceException {
TimerUtil timer = new TimerUtil("exp");
try {
// Sort contents of expression
ComparableExpression expression = expressionParser.parseExpression(closeToUserForm);
timer.checkpoint("Parse expression");
ExpressionContext context = new ExpressionContext(branch, versionControlHelper, mrcmService, timer);
// Validate expression against MRCM
mrcmAttributeRangeValidation(expression, context);
timer.checkpoint("MRCM validation");
final ComparableExpression classifiableFormExpression;
try {
classifiableFormExpression = transformationService.transform(expression, context);
timer.checkpoint("Transformation");
} catch (TransformationException e) {
String errorExpression = createHumanReadableExpression(expression.toString(), context);
throw new TransformationException(String.format("Expression could not be transformed: \"%s\". \n%s", errorExpression, e.getMessage()), e);
}
final String classifiableForm = classifiableFormExpression.toString();
final PostCoordinatedExpression pce = new PostCoordinatedExpression(null, closeToUserForm, classifiableForm);
pce.setHumanReadableClassifiableForm(createHumanReadableExpression(classifiableForm, context));
timer.checkpoint("Add human readable");
timer.finish();
return pce;
} catch (SCGException e) {
throw new IllegalArgumentException("Failed to parse expression: " + e.getMessage(), e);
}
}
use of org.snomed.snowstorm.core.util.TimerUtil in project snowstorm by IHTSDO.
the class AuthoringStatsService method getStats.
public AuthoringStatsSummary getStats(String branch) {
BranchCriteria branchCriteria = versionControlHelper.getBranchCriteria(branch);
TimerUtil timer = new TimerUtil("Authoring stats", Level.INFO, 5);
AuthoringStatsSummary authoringStatsSummary = new AuthoringStatsSummary(new Date());
authoringStatsSummary.setTitle("Authoring changes since last release");
// New concepts
PageRequest pageOfOne = PageRequest.of(0, 1);
SearchHits<Concept> newConceptsPage = elasticsearchOperations.search(withTotalHitsTracking(getNewConceptCriteria(branchCriteria).withPageable(pageOfOne).build()), Concept.class);
timer.checkpoint("new concepts");
authoringStatsSummary.setNewConceptsCount(newConceptsPage.getTotalHits());
// Inactivated concepts
SearchHits<Concept> inactivatedConceptsPage = elasticsearchOperations.search(withTotalHitsTracking(getInactivatedConceptsCriteria(branchCriteria).withPageable(pageOfOne).build()), Concept.class);
timer.checkpoint("inactivated concepts");
authoringStatsSummary.setInactivatedConceptsCount(inactivatedConceptsPage.getTotalHits());
// Reactivated concepts
SearchHits<Concept> reactivatedConceptsPage = elasticsearchOperations.search(withTotalHitsTracking(getReactivatedConceptsCriteria(branchCriteria).withPageable(pageOfOne).build()), Concept.class);
timer.checkpoint("reactivated concepts");
authoringStatsSummary.setReactivatedConceptsCount(reactivatedConceptsPage.getTotalHits());
// Changed FSNs
SearchHits<Description> changedFSNsPage = elasticsearchOperations.search(withTotalHitsTracking(getChangedFSNsCriteria(branchCriteria).withPageable(pageOfOne).build()), Description.class);
timer.checkpoint("changed FSNs");
authoringStatsSummary.setChangedFsnCount(changedFSNsPage.getTotalHits());
// Inactivated synonyms
SearchHits<Description> inactivatedSynonyms = elasticsearchOperations.search(withTotalHitsTracking(getInactivatedSynonymCriteria(branchCriteria).withFields(Description.Fields.CONCEPT_ID).withPageable(pageOfOne).build()), Description.class);
timer.checkpoint("inactivated descriptions");
authoringStatsSummary.setInactivatedSynonymsCount(inactivatedSynonyms.getTotalHits());
// New synonyms for existing concepts
SearchHits<Description> newSynonymsForExistingConcepts = elasticsearchOperations.search(withTotalHitsTracking(getNewSynonymsOnExistingConceptsCriteria(branchCriteria, timer).withFields(Description.Fields.DESCRIPTION_ID, Description.Fields.CONCEPT_ID).withPageable(pageOfOne).build()), Description.class);
timer.checkpoint("new synonyms for existing concepts");
authoringStatsSummary.setNewSynonymsForExistingConceptsCount(newSynonymsForExistingConcepts.getTotalHits());
// Reactivated synonyms
SearchHits<Description> reactivatedSynonyms = elasticsearchOperations.search(withTotalHitsTracking(getReactivatedSynonymsCriteria(branchCriteria).withFields(Description.Fields.CONCEPT_ID).withPageable(pageOfOne).build()), Description.class);
timer.checkpoint("reactivated descriptions");
authoringStatsSummary.setReactivatedSynonymsCount(reactivatedSynonyms.getTotalHits());
return authoringStatsSummary;
}
Aggregations