use of org.quartz.JobExecutionException in project syncope by apache.
the class PushJobDelegate method doExecuteProvisioning.
@Override
protected String doExecuteProvisioning(final PushTask pushTask, final Connector connector, final boolean dryRun) throws JobExecutionException {
LOG.debug("Executing push on {}", pushTask.getResource());
List<PushActions> actions = new ArrayList<>();
pushTask.getActions().forEach(impl -> {
try {
actions.add(ImplementationManager.build(impl));
} catch (Exception e) {
LOG.warn("While building {}", impl, e);
}
});
profile = new ProvisioningProfile<>(connector, pushTask);
profile.getActions().addAll(actions);
profile.setDryRun(dryRun);
profile.setResAct(null);
if (!profile.isDryRun()) {
for (PushActions action : actions) {
action.beforeAll(profile);
}
}
status.set("Initialization completed");
// First realms...
if (pushTask.getResource().getOrgUnit() != null) {
status.set("Pushing realms");
rhandler = buildRealmHandler();
for (Realm realm : realmDAO.findDescendants(profile.getTask().getSourceRealm())) {
// Never push the root realm
if (realm.getParent() != null) {
try {
rhandler.handle(realm.getKey());
reportHandled(SyncopeConstants.REALM_ANYTYPE, realm.getName());
} catch (Exception e) {
LOG.warn("Failure pushing '{}' on '{}'", realm, pushTask.getResource(), e);
throw new JobExecutionException("While pushing " + realm + " on " + pushTask.getResource(), e);
}
}
}
}
// ...then provisions for any types
ahandler = buildAnyObjectHandler();
uhandler = buildUserHandler();
ghandler = buildGroupHandler();
for (Provision provision : pushTask.getResource().getProvisions()) {
if (provision.getMapping() != null) {
status.set("Pushing " + provision.getAnyType().getKey());
AnyDAO<?> anyDAO = getAnyDAO(provision.getAnyType().getKind());
SyncopePushResultHandler handler;
switch(provision.getAnyType().getKind()) {
case USER:
handler = uhandler;
break;
case GROUP:
handler = ghandler;
break;
case ANY_OBJECT:
default:
handler = ahandler;
}
Optional<? extends PushTaskAnyFilter> anyFilter = pushTask.getFilter(provision.getAnyType());
String filter = anyFilter.isPresent() ? anyFilter.get().getFIQLCond() : null;
SearchCond cond = StringUtils.isBlank(filter) ? anyDAO.getAllMatchingCond() : SearchCondConverter.convert(filter);
int count = searchDAO.count(Collections.singleton(profile.getTask().getSourceRealm().getFullPath()), cond, provision.getAnyType().getKind());
for (int page = 1; page <= (count / AnyDAO.DEFAULT_PAGE_SIZE) + 1 && !interrupt; page++) {
List<? extends Any<?>> anys = searchDAO.search(Collections.singleton(profile.getTask().getSourceRealm().getFullPath()), cond, page, AnyDAO.DEFAULT_PAGE_SIZE, Collections.<OrderByClause>emptyList(), provision.getAnyType().getKind());
doHandle(anys, handler, pushTask.getResource());
}
}
}
if (!profile.isDryRun() && !interrupt) {
for (PushActions action : actions) {
action.afterAll(profile);
}
}
if (interrupt) {
interrupted = true;
}
status.set("Push done");
String result = createReport(profile.getResults(), pushTask.getResource(), dryRun);
LOG.debug("Push result: {}", result);
return result;
}
use of org.quartz.JobExecutionException in project syncope by apache.
the class PushJobDelegate method doHandle.
protected void doHandle(final List<? extends Any<?>> anys, final SyncopePushResultHandler handler, final ExternalResource resource) throws JobExecutionException {
for (int i = 0; i < anys.size() && !interrupt; i++) {
try {
handler.handle(anys.get(i).getKey());
reportHandled(anys.get(i).getType().getKey(), (anys.get(i) instanceof User ? ((User) anys.get(i)).getUsername() : anys.get(i) instanceof Group ? ((Group) anys.get(i)).getName() : ((AnyObject) anys.get(i)).getName()));
} catch (Exception e) {
LOG.warn("Failure pushing '{}' on '{}'", anys.get(i), resource, e);
throw new JobExecutionException("While pushing " + anys.get(i) + " on " + resource, e);
}
}
}
use of org.quartz.JobExecutionException in project syncope by apache.
the class SetUMembershipsJob method execute.
@Override
public void execute(final JobExecutionContext context) throws JobExecutionException {
try {
AuthContextUtils.execWithAuthContext(context.getMergedJobDataMap().getString(JobManager.DOMAIN_KEY), () -> {
@SuppressWarnings("unchecked") Map<String, Set<String>> memberships = (Map<String, Set<String>>) context.getMergedJobDataMap().get(MEMBERSHIPS_KEY);
LOG.debug("About to set memberships (User -> Groups) {}", memberships);
memberships.entrySet().stream().map(membership -> {
UserPatch userPatch = new UserPatch();
userPatch.setKey(membership.getKey());
membership.getValue().forEach(groupKey -> {
userPatch.getMemberships().add(new MembershipPatch.Builder().operation(PatchOperation.ADD_REPLACE).group(groupKey).build());
});
return userPatch;
}).filter(userPatch -> (!userPatch.isEmpty())).map((userPatch) -> {
LOG.debug("About to update User {}", userPatch.getKey());
return userPatch;
}).forEachOrdered((userPatch) -> {
userProvisioningManager.update(userPatch, true);
});
return null;
});
} catch (RuntimeException e) {
LOG.error("While setting memberships", e);
throw new JobExecutionException("While executing memberships", e);
}
}
use of org.quartz.JobExecutionException in project syncope by apache.
the class NotificationJob method execute.
@Override
public void execute(final JobExecutionContext context) throws JobExecutionException {
LOG.debug("Waking up...");
for (String domain : domainsHolder.getDomains().keySet()) {
try {
AuthContextUtils.execWithAuthContext(domain, () -> {
try {
delegate.execute();
} catch (Exception e) {
LOG.error("While sending out notifications", e);
throw new RuntimeException(e);
}
return null;
});
} catch (RuntimeException e) {
LOG.error("While sending out notifications", e);
throw new JobExecutionException("While sending out notifications", e);
}
}
LOG.debug("Sleeping again...");
}
use of org.quartz.JobExecutionException in project syncope by apache.
the class DefaultReportJobDelegate method execute.
@Transactional
@Override
public void execute(final String reportKey) throws JobExecutionException {
Report report = reportDAO.find(reportKey);
if (report == null) {
throw new JobExecutionException("Report " + reportKey + " not found");
}
if (!report.isActive()) {
LOG.info("Report {} not active, aborting...", reportKey);
return;
}
// 1. create execution
ReportExec execution = entityFactory.newEntity(ReportExec.class);
execution.setStatus(ReportExecStatus.STARTED);
execution.setStart(new Date());
execution.setReport(report);
execution = reportExecDAO.save(execution);
report.add(execution);
report = reportDAO.save(report);
// 2. define a SAX handler for generating result as XML
TransformerHandler handler;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
zos.setLevel(Deflater.BEST_COMPRESSION);
try {
handler = TRANSFORMER_FACTORY.newTransformerHandler();
Transformer serializer = handler.getTransformer();
serializer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name());
serializer.setOutputProperty(OutputKeys.INDENT, "yes");
// a single ZipEntry in the ZipOutputStream
zos.putNextEntry(new ZipEntry(report.getName()));
// streaming SAX handler in a compressed byte array stream
handler.setResult(new StreamResult(zos));
} catch (Exception e) {
throw new JobExecutionException("While configuring for SAX generation", e, true);
}
execution.setStatus(ReportExecStatus.RUNNING);
execution = reportExecDAO.save(execution);
status.set("Starting");
// 3. actual report execution
StringBuilder reportExecutionMessage = new StringBuilder();
try {
// report header
handler.startDocument();
AttributesImpl atts = new AttributesImpl();
atts.addAttribute("", "", ReportXMLConst.ATTR_NAME, ReportXMLConst.XSD_STRING, report.getName());
handler.startElement("", "", ReportXMLConst.ELEMENT_REPORT, atts);
status.set("Generating report header");
// iterate over reportlet instances defined for this report
for (int i = 0; i < report.getReportlets().size() && !interrupt; i++) {
Optional<Reportlet> reportlet = ImplementationManager.buildReportlet(report.getReportlets().get(i));
if (reportlet.isPresent()) {
try {
status.set("Invoking reportlet " + report.getReportlets().get(i).getKey());
reportlet.get().extract(handler, status);
} catch (Throwable t) {
LOG.error("While executing reportlet {} for report {}", reportlet, reportKey, t);
execution.setStatus(ReportExecStatus.FAILURE);
Throwable effective = t instanceof ReportException ? t.getCause() : t;
reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(effective)).append("\n==================\n");
}
}
}
if (interrupt) {
LOG.debug("Report job {} interrupted", reportKey);
interrupted = true;
}
// report footer
status.set("Generating report footer");
handler.endElement("", "", ReportXMLConst.ELEMENT_REPORT);
handler.endDocument();
if (!ReportExecStatus.FAILURE.name().equals(execution.getStatus())) {
execution.setStatus(ReportExecStatus.SUCCESS);
}
} catch (Exception e) {
execution.setStatus(ReportExecStatus.FAILURE);
reportExecutionMessage.append(ExceptionUtils2.getFullStackTrace(e));
throw new JobExecutionException(e, true);
} finally {
status.set("Completed");
try {
zos.closeEntry();
zos.close();
baos.close();
} catch (IOException e) {
LOG.error("While closing StreamResult's backend", e);
}
execution.setExecResult(baos.toByteArray());
execution.setMessage(reportExecutionMessage.toString());
execution.setEnd(new Date());
reportExecDAO.save(execution);
}
}
Aggregations