use of org.springframework.scheduling.annotation.Scheduled in project NabAlive by jcheype.
the class ApplicationScheduler method clock.
@Scheduled(cron = "0 0 * * * *")
public void clock() {
logger.debug("clock trigger");
// clock
Application application = checkNotNull(applicationManager.getApplication(CLOCK_APIKEY));
Query<Nabaztag> query = nabaztagDAO.createQuery().filter("applicationConfigList.applicationStoreApikey", CLOCK_APIKEY);
Iterator<Nabaztag> iterator = nabaztagDAO.find(query).iterator();
while (iterator.hasNext()) {
Nabaztag nabaztag = iterator.next();
Status status = connectionManager.get(nabaztag.getMacAddress());
if (status != null && !status.isAsleep()) {
try {
application.onStartup(nabaztag, findConfig(CLOCK_APIKEY, nabaztag.getApplicationConfigList()));
} catch (Exception e) {
logger.debug("cannot send message", e);
}
}
}
}
use of org.springframework.scheduling.annotation.Scheduled in project webapp by elimu-ai.
the class MailChimpScheduler method executeDataSync.
/**
* Sync data registered for existing subscribers.
*/
// 20 past every hour
@Scheduled(cron = "00 20 * * * *")
public synchronized void executeDataSync() {
logger.info("executeDataSync");
if (EnvironmentContextLoaderListener.env != Environment.PROD) {
return;
}
List<Contributor> contributors = contributorDao.readAll();
logger.info("contributors.size(): " + contributors.size());
for (Contributor contributor : contributors) {
try {
String memberInfo = MailChimpApiHelper.getMemberInfo(contributor.getEmail());
if (StringUtils.isNotBlank(memberInfo)) {
// Sync Contributor data with mailing list
MailChimpApiHelper.updateTeams(contributor.getEmail(), contributor.getTeams());
}
} catch (IOException ex) {
logger.error(null, ex);
break;
}
}
logger.info("executeDataSync complete");
}
use of org.springframework.scheduling.annotation.Scheduled in project goci by EBISPOT.
the class DailyNcbiExportTask method dailyNcbiExport.
// Scheduled for 00:15
@Scheduled(cron = "0 15 0 * * SUN")
public void dailyNcbiExport() throws IOException {
// Create date stamped file
String uploadDir = System.getProperty("java.io.tmpdir") + File.separator + "gwas_ncbi_export" + File.separator;
DateFormat df = new SimpleDateFormat("yyyy_MM_dd");
String dateStamp = df.format(new Date());
File outputFile = new File(uploadDir + dateStamp + "_gwas.txt");
outputFile.getParentFile().mkdirs();
// If at this stage we haven't got a file create one
if (!outputFile.exists()) {
outputFile.createNewFile();
}
getLog().info("Created file: " + outputFile);
// Call methods to create NCBI spreadsheet
String[][] data = catalogExportRepository.getNCBISpreadsheet();
catalogSpreadsheetExporter.writeToFile(data, outputFile);
// Check we have something in our output file
if (outputFile.length() != 0) {
getLog().info("Begin file upload to FTP...");
ftpFileService.ftpFileUpload(outputFile);
} else {
getLog().error("File is empty");
}
}
use of org.springframework.scheduling.annotation.Scheduled in project Protocol-Adapter-IEC61850 by OSGP.
the class RtuSimulator method generateData.
@Scheduled(fixedDelay = 60000)
public void generateData() {
synchronized (this.stopGeneratingValues) {
if (!this.stopGeneratingValues.get()) {
final Date timestamp = new Date();
final List<BasicDataAttribute> values = new ArrayList<>();
for (final LogicalDevice ld : this.logicalDevices) {
values.addAll(ld.getAttributesAndSetValues(timestamp));
}
this.server.setValues(values);
LOGGER.info("Generated values");
}
}
}
use of org.springframework.scheduling.annotation.Scheduled in project rocketmq-externals by apache.
the class DashboardCollectTask method collectBroker.
@Scheduled(cron = "0 0/1 * * * ?")
public void collectBroker() {
if (!rmqConfigure.isEnableDashBoardCollect()) {
return;
}
try {
Date date = new Date();
ClusterInfo clusterInfo = mqAdminExt.examineBrokerClusterInfo();
Set<Map.Entry<String, BrokerData>> clusterEntries = clusterInfo.getBrokerAddrTable().entrySet();
Map<String, String> addresses = Maps.newHashMap();
for (Map.Entry<String, BrokerData> clusterEntry : clusterEntries) {
HashMap<Long, String> addrs = clusterEntry.getValue().getBrokerAddrs();
Set<Map.Entry<Long, String>> addrsEntries = addrs.entrySet();
for (Map.Entry<Long, String> addrEntry : addrsEntries) {
addresses.put(addrEntry.getValue(), clusterEntry.getKey() + ":" + addrEntry.getKey());
}
}
Set<Map.Entry<String, String>> entries = addresses.entrySet();
for (Map.Entry<String, String> entry : entries) {
List<String> list = dashboardCollectService.getBrokerMap().get(entry.getValue());
if (null == list) {
list = Lists.newArrayList();
}
KVTable kvTable = fetchBrokerRuntimeStats(entry.getKey(), 3);
if (kvTable == null) {
continue;
}
String[] tpsArray = kvTable.getTable().get("getTotalTps").split(" ");
BigDecimal totalTps = new BigDecimal(0);
for (String tps : tpsArray) {
totalTps = totalTps.add(new BigDecimal(tps));
}
BigDecimal averageTps = totalTps.divide(new BigDecimal(tpsArray.length), 5, BigDecimal.ROUND_HALF_UP);
list.add(date.getTime() + "," + averageTps.toString());
dashboardCollectService.getBrokerMap().put(entry.getValue(), list);
}
log.debug("Broker Collected Data in memory = {}" + JsonUtil.obj2String(dashboardCollectService.getBrokerMap().asMap()));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
Aggregations