use of org.apache.syncope.core.persistence.api.entity.task.SchedTask in project syncope by apache.
the class TaskDataBinderImpl method getTaskTO.
@Override
public <T extends TaskTO> T getTaskTO(final Task task, final TaskUtils taskUtils, final boolean details) {
T taskTO = taskUtils.newTaskTO();
BeanUtils.copyProperties(task, taskTO, IGNORE_TASK_PROPERTIES);
TaskExec latestExec = taskExecDAO.findLatestStarted(task);
if (latestExec == null) {
taskTO.setLatestExecStatus(StringUtils.EMPTY);
} else {
taskTO.setLatestExecStatus(latestExec.getStatus());
taskTO.setStart(latestExec.getStart());
taskTO.setEnd(latestExec.getEnd());
}
if (details) {
task.getExecs().stream().filter(execution -> execution != null).forEachOrdered(execution -> taskTO.getExecutions().add(getExecTO(execution)));
}
switch(taskUtils.getType()) {
case PROPAGATION:
PropagationTask propagationTask = (PropagationTask) task;
PropagationTaskTO propagationTaskTO = (PropagationTaskTO) taskTO;
propagationTaskTO.setAnyTypeKind(propagationTask.getAnyTypeKind());
propagationTaskTO.setEntityKey(propagationTask.getEntityKey());
propagationTaskTO.setResource(propagationTask.getResource().getKey());
propagationTaskTO.setAttributes(propagationTask.getSerializedAttributes());
break;
case SCHEDULED:
SchedTask schedTask = (SchedTask) task;
SchedTaskTO schedTaskTO = (SchedTaskTO) taskTO;
setExecTime(schedTaskTO, task);
if (schedTask.getJobDelegate() != null) {
schedTaskTO.setJobDelegate(schedTask.getJobDelegate().getKey());
}
break;
case PULL:
PullTask pullTask = (PullTask) task;
PullTaskTO pullTaskTO = (PullTaskTO) taskTO;
setExecTime(pullTaskTO, task);
pullTaskTO.setDestinationRealm(pullTask.getDestinatioRealm().getFullPath());
pullTaskTO.setResource(pullTask.getResource().getKey());
pullTaskTO.setMatchingRule(pullTask.getMatchingRule() == null ? MatchingRule.UPDATE : pullTask.getMatchingRule());
pullTaskTO.setUnmatchingRule(pullTask.getUnmatchingRule() == null ? UnmatchingRule.PROVISION : pullTask.getUnmatchingRule());
if (pullTask.getReconFilterBuilder() != null) {
pullTaskTO.setReconFilterBuilder(pullTask.getReconFilterBuilder().getKey());
}
pullTaskTO.getActions().addAll(pullTask.getActions().stream().map(Entity::getKey).collect(Collectors.toList()));
pullTask.getTemplates().forEach(template -> {
pullTaskTO.getTemplates().put(template.getAnyType().getKey(), template.get());
});
pullTaskTO.setRemediation(pullTask.isRemediation());
break;
case PUSH:
PushTask pushTask = (PushTask) task;
PushTaskTO pushTaskTO = (PushTaskTO) taskTO;
setExecTime(pushTaskTO, task);
pushTaskTO.setSourceRealm(pushTask.getSourceRealm().getFullPath());
pushTaskTO.setResource(pushTask.getResource().getKey());
pushTaskTO.setMatchingRule(pushTask.getMatchingRule() == null ? MatchingRule.LINK : pushTask.getMatchingRule());
pushTaskTO.setUnmatchingRule(pushTask.getUnmatchingRule() == null ? UnmatchingRule.ASSIGN : pushTask.getUnmatchingRule());
pushTaskTO.getActions().addAll(pushTask.getActions().stream().map(Entity::getKey).collect(Collectors.toList()));
pushTask.getFilters().forEach(filter -> {
pushTaskTO.getFilters().put(filter.getAnyType().getKey(), filter.getFIQLCond());
});
break;
case NOTIFICATION:
NotificationTask notificationTask = (NotificationTask) task;
NotificationTaskTO notificationTaskTO = (NotificationTaskTO) taskTO;
notificationTaskTO.setNotification(notificationTask.getNotification().getKey());
notificationTaskTO.setAnyTypeKind(notificationTask.getAnyTypeKind());
notificationTaskTO.setEntityKey(notificationTask.getEntityKey());
if (notificationTask.isExecuted() && StringUtils.isBlank(taskTO.getLatestExecStatus())) {
taskTO.setLatestExecStatus("[EXECUTED]");
}
break;
default:
}
return taskTO;
}
use of org.apache.syncope.core.persistence.api.entity.task.SchedTask in project syncope by apache.
the class TaskLogic method getReference.
@Override
protected Triple<JobType, String, String> getReference(final JobKey jobKey) {
String key = JobNamer.getTaskKeyFromJobName(jobKey.getName());
Task task = taskDAO.find(key);
return task == null || !(task instanceof SchedTask) ? null : Triple.of(JobType.TASK, key, binder.buildRefDesc(task));
}
use of org.apache.syncope.core.persistence.api.entity.task.SchedTask in project syncope by apache.
the class JobManagerImpl method register.
@Override
public Map<String, Object> register(final SchedTask task, final Date startAt, final long interruptMaxRetries) throws SchedulerException {
TaskJob job = createSpringBean(TaskJob.class);
job.setTaskKey(task.getKey());
Implementation jobDelegate = task.getJobDelegate() == null ? task instanceof PullTask ? implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> PullJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null) : task instanceof PushTask ? implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> PushJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null) : null : task.getJobDelegate();
if (jobDelegate == null) {
throw new IllegalArgumentException("Task " + task + " does not provide any " + SchedTaskJobDelegate.class.getSimpleName());
}
Map<String, Object> jobMap = new HashMap<>();
jobMap.put(JobManager.DOMAIN_KEY, AuthContextUtils.getDomain());
jobMap.put(TaskJob.DELEGATE_IMPLEMENTATION, jobDelegate.getKey());
registerJob(JobNamer.getJobKey(task).getName(), job, task.getCronExpression(), startAt, jobMap);
return jobMap;
}
use of org.apache.syncope.core.persistence.api.entity.task.SchedTask in project syncope by apache.
the class JobManagerImpl method load.
@Transactional
@Override
public void load() {
if (disableQuartzInstance) {
String instanceId = "AUTO";
try {
instanceId = scheduler.getScheduler().getSchedulerInstanceId();
scheduler.getScheduler().standby();
LOG.info("Successfully put Quartz instance {} in standby", instanceId);
} catch (SchedulerException e) {
LOG.error("Could not put Quartz instance {} in standby", instanceId, e);
}
return;
}
final Pair<String, Long> conf = AuthContextUtils.execWithAuthContext(SyncopeConstants.MASTER_DOMAIN, () -> {
String notificationJobCronExpression = StringUtils.EMPTY;
Optional<? extends CPlainAttr> notificationJobCronExp = confDAO.find("notificationjob.cronExpression");
if (!notificationJobCronExp.isPresent()) {
notificationJobCronExpression = NotificationJob.DEFAULT_CRON_EXP;
} else if (!notificationJobCronExp.get().getValuesAsStrings().isEmpty()) {
notificationJobCronExpression = notificationJobCronExp.get().getValuesAsStrings().get(0);
}
long interruptMaxRetries = confDAO.find("tasks.interruptMaxRetries", 1L);
return Pair.of(notificationJobCronExpression, interruptMaxRetries);
});
for (String domain : domainsHolder.getDomains().keySet()) {
AuthContextUtils.execWithAuthContext(domain, () -> {
// 1. jobs for SchedTasks
Set<SchedTask> tasks = new HashSet<>(taskDAO.<SchedTask>findAll(TaskType.SCHEDULED));
tasks.addAll(taskDAO.<PullTask>findAll(TaskType.PULL));
tasks.addAll(taskDAO.<PushTask>findAll(TaskType.PUSH));
boolean loadException = false;
for (Iterator<SchedTask> it = tasks.iterator(); it.hasNext() && !loadException; ) {
SchedTask task = it.next();
try {
register(task, task.getStartAt(), conf.getRight());
} catch (Exception e) {
LOG.error("While loading job instance for task " + task.getKey(), e);
loadException = true;
}
}
if (loadException) {
LOG.debug("Errors while loading job instances for tasks, aborting");
} else {
// 2. jobs for Reports
for (Iterator<Report> it = reportDAO.findAll().iterator(); it.hasNext() && !loadException; ) {
Report report = it.next();
try {
register(report, null, conf.getRight());
} catch (Exception e) {
LOG.error("While loading job instance for report " + report.getName(), e);
loadException = true;
}
}
if (loadException) {
LOG.debug("Errors while loading job instances for reports, aborting");
}
}
return null;
});
}
Map<String, Object> jobMap = new HashMap<>();
jobMap.put(JobManager.DOMAIN_KEY, AuthContextUtils.getDomain());
// 3. NotificationJob
if (StringUtils.isBlank(conf.getLeft())) {
LOG.debug("Empty value provided for {}'s cron, not registering anything on Quartz", NotificationJob.class.getSimpleName());
} else {
LOG.debug("{}'s cron expression: {} - registering Quartz job and trigger", NotificationJob.class.getSimpleName(), conf.getLeft());
try {
NotificationJob job = createSpringBean(NotificationJob.class);
registerJob(NOTIFICATION_JOB.getName(), job, conf.getLeft(), null, jobMap);
} catch (Exception e) {
LOG.error("While loading {} instance", NotificationJob.class.getSimpleName(), e);
}
}
// 4. SystemLoadReporterJob (fixed schedule, every minute)
LOG.debug("Registering {}", SystemLoadReporterJob.class);
try {
SystemLoadReporterJob job = createSpringBean(SystemLoadReporterJob.class);
registerJob("systemLoadReporterJob", job, "0 * * * * ?", null, jobMap);
} catch (Exception e) {
LOG.error("While loading {} instance", SystemLoadReporterJob.class.getSimpleName(), e);
}
}
use of org.apache.syncope.core.persistence.api.entity.task.SchedTask in project syncope by apache.
the class TaskDataBinderImpl method createSchedTask.
@Override
public SchedTask createSchedTask(final SchedTaskTO taskTO, final TaskUtils taskUtils) {
Class<? extends TaskTO> taskTOClass = taskUtils.taskTOClass();
if (taskTOClass == null || !taskTOClass.equals(taskTO.getClass())) {
throw new IllegalArgumentException(String.format("Expected %s, found %s", taskTOClass, taskTO.getClass()));
}
SchedTask task = taskUtils.newTask();
task.setStartAt(taskTO.getStartAt());
task.setCronExpression(taskTO.getCronExpression());
task.setName(taskTO.getName());
task.setDescription(taskTO.getDescription());
task.setActive(taskTO.isActive());
if (taskUtils.getType() == TaskType.SCHEDULED) {
Implementation implementation = implementationDAO.find(taskTO.getJobDelegate());
if (implementation == null) {
throw new NotFoundException("Implementation " + taskTO.getJobDelegate());
}
task.setJobDelegate(implementation);
} else if (taskTO instanceof ProvisioningTaskTO) {
ProvisioningTaskTO provisioningTaskTO = (ProvisioningTaskTO) taskTO;
ExternalResource resource = resourceDAO.find(provisioningTaskTO.getResource());
if (resource == null) {
throw new NotFoundException("Resource " + provisioningTaskTO.getResource());
}
((ProvisioningTask) task).setResource(resource);
fill((ProvisioningTask) task, provisioningTaskTO);
}
return task;
}
Aggregations