use of org.springframework.scheduling.annotation.Scheduled in project oncotree by cBioPortal.
the class MSKConceptCache method resetCache.
// call when constructed
@PostConstruct
// call every Sunday at 3am
@Scheduled(cron = "0 0 3 * * SUN")
private void resetCache() {
logger.info("resetCache() -- clearing Crosswalk MSKConcept cache and refilling");
oncoTreeCodesToMSKConcepts.clear();
// versions are ordered in ascending order by release date
for (Version version : VersionUtil.getVersions()) {
List<OncoTreeNode> oncoTreeNodes = oncoTreeRepository.getOncoTree(version);
for (OncoTreeNode node : oncoTreeNodes) {
MSKConcept mskConcept = getFromCrosswalkAndSave(node.getCode());
// get all codes defined so far for this topbraid uri and save in history
if (topBraidURIsToOncotreeCodes.containsKey(node.getURI())) {
// do not add this code to the history, but add any others
HashSet<String> allButThisNode = new HashSet<String>(topBraidURIsToOncotreeCodes.get(node.getURI()));
allButThisNode.remove(node.getCode());
mskConcept.addHistory(allButThisNode);
} else {
topBraidURIsToOncotreeCodes.put(node.getURI(), new HashSet<String>());
}
// now save this as onoctree code history for this topbraid uri
topBraidURIsToOncotreeCodes.get(node.getURI()).add(node.getCode());
}
}
}
use of org.springframework.scheduling.annotation.Scheduled in project molgenis by molgenis.
the class GavinController method cleanUp.
/**
* Removes old files in the gavin working directory from the file store.
*/
@Scheduled(cron = "0 0 * * * *")
public void cleanUp() {
LOG.debug("Clean up old jobs in the file store...");
try {
final File[] oldFiles = fileStore.getFile(GAVIN_APP).listFiles(file -> file.isDirectory() && MILLISECONDS.toSeconds(file.lastModified()) < now().minusHours(24).toEpochSecond());
if (oldFiles != null) {
for (File file : oldFiles) {
LOG.info("Deleting job directory {}", file.getName());
fileStore.deleteDirectory(GAVIN_APP + separator + file.getName());
}
}
LOG.debug("Done.");
} catch (IOException e) {
LOG.error("Failed to clean up working directory", e);
}
}
use of org.springframework.scheduling.annotation.Scheduled in project hello-world by haoziapple.
the class UserService method removeNotActivatedUsers.
/**
* Not activated users should be automatically deleted after 3 days.
* <p>
* This is scheduled to get fired everyday, at 01:00 (am).
*/
@Scheduled(cron = "0 0 1 * * ?")
public void removeNotActivatedUsers() {
List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(Instant.now().minus(3, ChronoUnit.DAYS));
for (User user : users) {
log.debug("Deleting not activated user {}", user.getLogin());
userRepository.delete(user);
}
}
use of org.springframework.scheduling.annotation.Scheduled in project molgenis by molgenis.
the class IndexJobSchedulerImpl method cleanupJobExecutions.
/**
* Cleans up successful IndexJobExecutions that finished longer than five minutes ago.
* delay for a minute to allow the transaction manager to become available
*/
@Scheduled(initialDelay = 1 * 60 * 1000, fixedRate = 5 * 60 * 1000)
public void cleanupJobExecutions() {
runAsSystem(() -> {
LOG.trace("Clean up Index job executions...");
Instant fiveMinutesAgo = Instant.now().minus(5, ChronoUnit.MINUTES);
boolean indexJobExecutionExists = dataService.hasRepository(IndexJobExecutionMeta.INDEX_JOB_EXECUTION);
if (indexJobExecutionExists) {
Stream<Entity> executions = dataService.getRepository(IndexJobExecutionMeta.INDEX_JOB_EXECUTION).query().lt(END_DATE, fiveMinutesAgo).and().eq(STATUS, SUCCESS.toString()).findAll();
dataService.delete(IndexJobExecutionMeta.INDEX_JOB_EXECUTION, executions);
LOG.debug("Cleaned up Index job executions.");
} else {
LOG.warn(IndexJobExecutionMeta.INDEX_JOB_EXECUTION + " does not exist");
}
});
}
use of org.springframework.scheduling.annotation.Scheduled in project goci by EBISPOT.
the class NewWeeklyReportService method createWeeklyReports.
@Scheduled(cron = "0 30 0 * * MON")
public void createWeeklyReports() {
log.info("Creating weekly reports ...");
double sTime = System.currentTimeMillis();
List<String> pmids = publicationRepository.findAllPubmedIds();
Map<String, List<String>> allPublications = new HashMap<>();
Map<String, List<String>> openTargetsPublications = new HashMap<>();
for (String pmid : pmids) {
boolean isOT = false;
Collection<Study> studies = studyRepository.findTop10ByPublicationIdPubmedId(pmid);
List<String> ids = new ArrayList<>();
for (Study study : studies) {
if (study.getOpenTargets() != null) {
if (study.getOpenTargets().booleanValue()) {
isOT = true;
}
}
ids.add(Long.toString(study.getId()));
}
allPublications.put(pmid, ids);
if (isOT) {
openTargetsPublications.put(pmid, ids);
}
}
Calendar todayCalendar = Calendar.getInstance();
Date today = todayCalendar.getTime();
int weekCode = todayCalendar.get(Calendar.WEEK_OF_YEAR);
try {
createSpecificWeeklyReports(allPublications, ALL, today, weekCode);
createSpecificWeeklyReports(openTargetsPublications, OPEN_TARGETS, today, weekCode);
} catch (Exception e) {
log.error("ERROR: Unable to create weekly reports: {}", e.getMessage(), e);
mailService.sendWeeklyReportErrorCreationEmail(StringUtils.join(Arrays.asList(e.getStackTrace()), "\n"));
}
double eTime = System.currentTimeMillis();
double total = (eTime - sTime) / 1000;
log.info("Done creating weekly reports: {}s", total);
}
Aggregations