use of org.springframework.scheduling.annotation.Scheduled in project CzechIdMng by bcvsolutions.
the class DefaultAttachmentManager method purgeTempFiles.
/**
* Purge old temporary files once per day.
* Temporary files older than configured ttl will be purged.
*
* @return purged files count
*/
@Scheduled(fixedDelay = 3600000)
public void purgeTempFiles() {
int purgedFiles = 0;
long ttl = attachmentConfiguration.getTempTtl();
if (ttl == 0) {
LOG.warn("Removing old temporary files is disabled. Configure property [{}] - time to live in milliseconds (greater than zero).", AttachmentConfiguration.PROPERTY_TEMP_TTL);
return;
}
//
// purge older temporary files than purge time
long purgeTime = System.currentTimeMillis() - ttl;
File temp = new File(getTempPath());
if (temp.isDirectory()) {
File[] files = temp.listFiles();
if (files != null) {
for (File f : files) {
try {
if (f.getName().endsWith("." + DEFAULT_TEMP_FILE_EXTENSION) && f.lastModified() < purgeTime) {
f.delete();
purgedFiles++;
}
} catch (Exception ex) {
LOG.error("Removing old temporary [.{}] file [{}] failed", DEFAULT_TEMP_FILE_EXTENSION, f.getName(), ex);
}
}
}
}
LOG.debug("Temporary files were purged [{}]", purgedFiles);
}
use of org.springframework.scheduling.annotation.Scheduled in project CzechIdMng by bcvsolutions.
the class DefaultEntityEventManager method scheduleProcessCreated.
/**
* Spring schedule new task after previous task ended (don't run concurrently)
*/
@Scheduled(fixedDelayString = "${" + SchedulerConfiguration.PROPERTY_EVENT_QUEUE_PROCESS + ":" + SchedulerConfiguration.DEFAULT_EVENT_QUEUE_PROCESS + "}")
public void scheduleProcessCreated() {
if (!eventConfiguration.isAsynchronous()) {
// prevent to debug some messages into log - usable for devs
return;
}
// run as system - called from scheduler internally
securityService.setSystemAuthentication();
//
// calculate events to process
String instanceId = configurationService.getInstanceId();
List<IdmEntityEventDto> events = getCreatedEvents(instanceId);
LOG.trace("Events to process [{}] on instance [{}].", events.size(), instanceId);
for (IdmEntityEventDto event : events) {
// @Transactional
context.getBean(this.getClass()).executeEvent(event);
;
}
}
use of org.springframework.scheduling.annotation.Scheduled in project vft-capture by videofirst.
the class DefaultUploadService method purgeFinishedUploads.
@Scheduled(fixedDelayString = "${vft_config.upload.purgeFinishedUploadSchedule:2000}")
public void purgeFinishedUploads() {
for (String captureId : uploads.keySet()) {
Capture capture = uploads.get(captureId);
LocalDateTime time = LocalDateTime.now().minusSeconds(uploadConfig.getKeepFinishedUploadsInSecs());
if (capture.getUpload() != null && capture.getUpload().getState() == UploadState.finished && capture.getUpload().getFinished().isBefore(time)) {
log.info("Removing capture " + captureId);
uploads.remove(captureId);
}
log.debug("Not removing capture " + captureId);
}
}
use of org.springframework.scheduling.annotation.Scheduled in project rocketmq-externals by apache.
the class DashboardCollectTask method collectTopic.
@Scheduled(cron = "30 0/1 * * * ?")
@MultiMQAdminCmdMethod(timeoutMillis = 5000)
public void collectTopic() {
if (!rmqConfigure.isEnableDashBoardCollect()) {
return;
}
Date date = new Date();
Stopwatch stopwatch = Stopwatch.createStarted();
try {
TopicList topicList = mqAdminExt.fetchAllTopicList();
Set<String> topicSet = topicList.getTopicList();
for (String topic : topicSet) {
if (topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX) || topic.startsWith(MixAll.DLQ_GROUP_TOPIC_PREFIX)) {
continue;
}
TopicRouteData topicRouteData = mqAdminExt.examineTopicRouteInfo(topic);
GroupList groupList = mqAdminExt.queryTopicConsumeByWho(topic);
double inTPS = 0;
long inMsgCntToday = 0;
double outTPS = 0;
long outMsgCntToday = 0;
for (BrokerData bd : topicRouteData.getBrokerDatas()) {
String masterAddr = bd.getBrokerAddrs().get(MixAll.MASTER_ID);
if (masterAddr != null) {
try {
stopwatch.start();
log.info("start time: {}", stopwatch.toString());
BrokerStatsData bsd = mqAdminExt.viewBrokerStatsData(masterAddr, BrokerStatsManager.TOPIC_PUT_NUMS, topic);
stopwatch.stop();
log.info("stop time : {}", stopwatch.toString());
stopwatch.reset();
inTPS += bsd.getStatsMinute().getTps();
inMsgCntToday += StatsAllSubCommand.compute24HourSum(bsd);
} catch (Exception e) {
// throw Throwables.propagate(e);
}
}
}
if (groupList != null && !groupList.getGroupList().isEmpty()) {
for (String group : groupList.getGroupList()) {
for (BrokerData bd : topicRouteData.getBrokerDatas()) {
String masterAddr = bd.getBrokerAddrs().get(MixAll.MASTER_ID);
if (masterAddr != null) {
try {
String statsKey = String.format("%s@%s", topic, group);
BrokerStatsData bsd = mqAdminExt.viewBrokerStatsData(masterAddr, BrokerStatsManager.GROUP_GET_NUMS, statsKey);
outTPS += bsd.getStatsMinute().getTps();
outMsgCntToday += StatsAllSubCommand.compute24HourSum(bsd);
} catch (Exception e) {
// throw Throwables.propagate(e);
}
}
}
}
}
List<String> list;
try {
list = dashboardCollectService.getTopicMap().get(topic);
} catch (ExecutionException e) {
throw Throwables.propagate(e);
}
if (null == list) {
list = Lists.newArrayList();
}
list.add(date.getTime() + "," + new BigDecimal(inTPS).setScale(5, BigDecimal.ROUND_HALF_UP) + "," + inMsgCntToday + "," + new BigDecimal(outTPS).setScale(5, BigDecimal.ROUND_HALF_UP) + "," + outMsgCntToday);
dashboardCollectService.getTopicMap().put(topic, list);
}
log.debug("Topic Collected Data in memory = {}" + JsonUtil.obj2String(dashboardCollectService.getTopicMap().asMap()));
} catch (Exception err) {
throw Throwables.propagate(err);
}
}
use of org.springframework.scheduling.annotation.Scheduled in project nixmash-blog by mintster.
the class GithubJobRunner method runGithubJob.
@Scheduled(fixedRateString = "${github.job.fixed.delay.seconds:60}000")
public void runGithubJob() {
SimpleDateFormat format = new SimpleDateFormat("M-dd-yy hh:mm:ss");
String startDateTime = format.format(new Date());
JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis()).toJobParameters();
try {
logger.info("");
logger.info("STARTING GITHUB BATCH JOB : " + startDateTime);
JobExecution execution = jobLauncher.run(githubJob, jobParameters);
logger.info("JOB STATUS : " + execution.getStatus());
} catch (Exception e) {
e.printStackTrace();
logger.info("JOB FAILED!!!");
}
}
Aggregations