use of org.apache.syncope.core.persistence.api.entity.Report in project syncope by apache.
the class ReportTest method saveWithExistingName.
@Test
public void saveWithExistingName() {
assertThrows(EntityExistsException.class, () -> {
Report report = reportDAO.find("0062ea9c-924d-4ecf-9961-4492a8cc6d1b");
assertNotNull(report);
String name = report.getName();
report = entityFactory.newEntity(Report.class);
report.setName(name);
report.setActive(true);
report.setTemplate(reportTemplateDAO.find("sample"));
reportDAO.save(report);
reportDAO.flush();
});
}
use of org.apache.syncope.core.persistence.api.entity.Report in project syncope by apache.
the class JobManagerImpl method load.
@Transactional
@Override
public void load() {
if (disableQuartzInstance) {
String instanceId = "AUTO";
try {
instanceId = scheduler.getScheduler().getSchedulerInstanceId();
scheduler.getScheduler().standby();
LOG.info("Successfully put Quartz instance {} in standby", instanceId);
} catch (SchedulerException e) {
LOG.error("Could not put Quartz instance {} in standby", instanceId, e);
}
return;
}
final Pair<String, Long> conf = AuthContextUtils.execWithAuthContext(SyncopeConstants.MASTER_DOMAIN, () -> {
String notificationJobCronExpression = StringUtils.EMPTY;
Optional<? extends CPlainAttr> notificationJobCronExp = confDAO.find("notificationjob.cronExpression");
if (!notificationJobCronExp.isPresent()) {
notificationJobCronExpression = NotificationJob.DEFAULT_CRON_EXP;
} else if (!notificationJobCronExp.get().getValuesAsStrings().isEmpty()) {
notificationJobCronExpression = notificationJobCronExp.get().getValuesAsStrings().get(0);
}
long interruptMaxRetries = confDAO.find("tasks.interruptMaxRetries", 1L);
return Pair.of(notificationJobCronExpression, interruptMaxRetries);
});
for (String domain : domainsHolder.getDomains().keySet()) {
AuthContextUtils.execWithAuthContext(domain, () -> {
// 1. jobs for SchedTasks
Set<SchedTask> tasks = new HashSet<>(taskDAO.<SchedTask>findAll(TaskType.SCHEDULED));
tasks.addAll(taskDAO.<PullTask>findAll(TaskType.PULL));
tasks.addAll(taskDAO.<PushTask>findAll(TaskType.PUSH));
boolean loadException = false;
for (Iterator<SchedTask> it = tasks.iterator(); it.hasNext() && !loadException; ) {
SchedTask task = it.next();
try {
register(task, task.getStartAt(), conf.getRight());
} catch (Exception e) {
LOG.error("While loading job instance for task " + task.getKey(), e);
loadException = true;
}
}
if (loadException) {
LOG.debug("Errors while loading job instances for tasks, aborting");
} else {
// 2. jobs for Reports
for (Iterator<Report> it = reportDAO.findAll().iterator(); it.hasNext() && !loadException; ) {
Report report = it.next();
try {
register(report, null, conf.getRight());
} catch (Exception e) {
LOG.error("While loading job instance for report " + report.getName(), e);
loadException = true;
}
}
if (loadException) {
LOG.debug("Errors while loading job instances for reports, aborting");
}
}
return null;
});
}
Map<String, Object> jobMap = new HashMap<>();
jobMap.put(JobManager.DOMAIN_KEY, AuthContextUtils.getDomain());
// 3. NotificationJob
if (StringUtils.isBlank(conf.getLeft())) {
LOG.debug("Empty value provided for {}'s cron, not registering anything on Quartz", NotificationJob.class.getSimpleName());
} else {
LOG.debug("{}'s cron expression: {} - registering Quartz job and trigger", NotificationJob.class.getSimpleName(), conf.getLeft());
try {
NotificationJob job = createSpringBean(NotificationJob.class);
registerJob(NOTIFICATION_JOB.getName(), job, conf.getLeft(), null, jobMap);
} catch (Exception e) {
LOG.error("While loading {} instance", NotificationJob.class.getSimpleName(), e);
}
}
// 4. SystemLoadReporterJob (fixed schedule, every minute)
LOG.debug("Registering {}", SystemLoadReporterJob.class);
try {
SystemLoadReporterJob job = createSpringBean(SystemLoadReporterJob.class);
registerJob("systemLoadReporterJob", job, "0 * * * * ?", null, jobMap);
} catch (Exception e) {
LOG.error("While loading {} instance", SystemLoadReporterJob.class.getSimpleName(), e);
}
}
use of org.apache.syncope.core.persistence.api.entity.Report in project syncope by apache.
the class DefaultReportJobDelegate method execute.
@Transactional
@Override
public void execute(final String reportKey) throws JobExecutionException {
Report report = reportDAO.find(reportKey);
if (report == null) {
throw new JobExecutionException("Report " + reportKey + " not found");
}
if (!report.isActive()) {
LOG.info("Report {} not active, aborting...", reportKey);
return;
}
// 1. create execution
ReportExec execution = entityFactory.newEntity(ReportExec.class);
execution.setStatus(ReportExecStatus.STARTED);
execution.setStart(new Date());
execution.setReport(report);
execution = reportExecDAO.save(execution);
report.add(execution);
report = reportDAO.save(report);
// 2. define a SAX handler for generating result as XML
TransformerHandler handler;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
zos.setLevel(Deflater.BEST_COMPRESSION);
try {
handler = TRANSFORMER_FACTORY.newTransformerHandler();
Transformer serializer = handler.getTransformer();
serializer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name());
serializer.setOutputProperty(OutputKeys.INDENT, "yes");
// a single ZipEntry in the ZipOutputStream
zos.putNextEntry(new ZipEntry(report.getName()));
// streaming SAX handler in a compressed byte array stream
handler.setResult(new StreamResult(zos));
} catch (Exception e) {
throw new JobExecutionException("While configuring for SAX generation", e, true);
}
execution.setStatus(ReportExecStatus.RUNNING);
execution = reportExecDAO.save(execution);
status.set("Starting");
// 3. actual report execution
StringBuilder reportExecutionMessage = new StringBuilder();
try {
// report header
handler.startDocument();
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);
status.set("Generating report header");
// iterate over reportlet instances defined for this report
for (int i = 0; i < report.getReportlets().size() && !interrupt; i++) {
Optional<Reportlet> reportlet = ImplementationManager.buildReportlet(report.getReportlets().get(i));
if (reportlet.isPresent()) {
try {
status.set("Invoking reportlet " + report.getReportlets().get(i).getKey());
reportlet.get().extract(handler, status);
} catch (Throwable t) {
LOG.error("While executing reportlet {} for report {}", reportlet, reportKey, t);
execution.setStatus(ReportExecStatus.FAILURE);
Throwable effective = t instanceof ReportException ? t.getCause() : t;
reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(effective)).append("\n==================\n");
}
}
}
if (interrupt) {
LOG.debug("Report job {} interrupted", reportKey);
interrupted = true;
}
// report footer
status.set("Generating report footer");
handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
handler.endDocument();
if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
execution.setStatus(ReportExecStatus.SUCCESS);
}
} catch (Exception e) {
execution.setStatus(ReportExecStatus.FAILURE);
reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(e));
throw new JobExecutionException(e, true);
} finally {
status.set("Completed");
try {
zos.closeEntry();
zos.close();
baos.close();
} catch (IOException e) {
LOG.error("While closing StreamResult's backend", e);
}
execution.setExecResult(baos.toByteArray());
execution.setMessage(reportExecutionMessage.toString());
execution.setEnd(new Date());
reportExecDAO.save(execution);
}
}
use of org.apache.syncope.core.persistence.api.entity.Report in project syncope by apache.
the class ReportTemplateLogic method delete.
@PreAuthorize("hasRole('" + StandardEntitlement.REPORT_TEMPLATE_DELETE + "')")
public ReportTemplateTO delete(final String key) {
ReportTemplate reportTemplate = reportTemplateDAO.find(key);
if (reportTemplate == null) {
LOG.error("Could not find report template '" + key + "'");
throw new NotFoundException(key);
}
List<Report> reports = reportDAO.findByTemplate(reportTemplate);
if (!reports.isEmpty()) {
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InUse);
sce.getElements().addAll(reports.stream().map(Entity::getKey).collect(Collectors.toList()));
throw sce;
}
ReportTemplateTO deleted = getReportTemplateTO(key);
reportTemplateDAO.delete(key);
return deleted;
}
use of org.apache.syncope.core.persistence.api.entity.Report in project syncope by apache.
the class ReportLogic method deleteExecutions.
@PreAuthorize("hasRole('" + StandardEntitlement.REPORT_DELETE + "')")
@Override
public BulkActionResult deleteExecutions(final String key, final Date startedBefore, final Date startedAfter, final Date endedBefore, final Date endedAfter) {
Report report = reportDAO.find(key);
if (report == null) {
throw new NotFoundException("Report " + key);
}
BulkActionResult result = new BulkActionResult();
reportExecDAO.findAll(report, startedBefore, startedAfter, endedBefore, endedAfter).forEach(exec -> {
try {
reportExecDAO.delete(exec);
result.getResults().put(String.valueOf(exec.getKey()), BulkActionResult.Status.SUCCESS);
} catch (Exception e) {
LOG.error("Error deleting execution {} of report {}", exec.getKey(), key, e);
result.getResults().put(String.valueOf(exec.getKey()), BulkActionResult.Status.FAILURE);
}
});
return result;
}
Aggregations