use of org.apache.syncope.core.persistence.api.entity.Implementation in project syncope by apache.
the class ITImplementationLookup method load.
@Override
public void load() {
// in case the Elasticsearch extension is enabled, reinit a clean index for all available domains
if (AopUtils.getTargetClass(anySearchDAO).getName().contains("Elasticsearch")) {
for (Map.Entry<String, DataSource> entry : domainsHolder.getDomains().entrySet()) {
AuthContextUtils.execWithAuthContext(entry.getKey(), () -> {
Implementation reindex = implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> impl.getEngine() == ImplementationEngine.JAVA && ES_REINDEX.equals(impl.getBody())).findAny().orElse(null);
if (reindex == null) {
reindex = entityFactory.newEntity(Implementation.class);
reindex.setEngine(ImplementationEngine.JAVA);
reindex.setType(ImplementationType.TASKJOB_DELEGATE);
reindex.setBody(ES_REINDEX);
reindex = implementationDAO.save(reindex);
}
SchedTaskTO task = new SchedTaskTO();
task.setJobDelegate(reindex.getKey());
task.setName("Elasticsearch Reindex");
task = taskLogic.createSchedTask(TaskType.SCHEDULED, task);
taskLogic.execute(task.getKey(), null, false);
return null;
});
}
}
}
use of org.apache.syncope.core.persistence.api.entity.Implementation in project syncope by apache.
the class JobManagerImpl method register.
@Override
public Map<String, Object> register(final SchedTask task, final Date startAt, final long interruptMaxRetries) throws SchedulerException {
TaskJob job = createSpringBean(TaskJob.class);
job.setTaskKey(task.getKey());
Implementation jobDelegate = task.getJobDelegate() == null ? task instanceof PullTask ? implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> PullJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null) : task instanceof PushTask ? implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> PushJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null) : null : task.getJobDelegate();
if (jobDelegate == null) {
throw new IllegalArgumentException("Task " + task + " does not provide any " + SchedTaskJobDelegate.class.getSimpleName());
}
Map<String, Object> jobMap = new HashMap<>();
jobMap.put(JobManager.DOMAIN_KEY, AuthContextUtils.getDomain());
jobMap.put(TaskJob.DELEGATE_IMPLEMENTATION, jobDelegate.getKey());
registerJob(JobNamer.getJobKey(task).getName(), job, task.getCronExpression(), startAt, jobMap);
return jobMap;
}
use of org.apache.syncope.core.persistence.api.entity.Implementation in project syncope by apache.
the class TaskDataBinderImpl method createSchedTask.
@Override
public SchedTask createSchedTask(final SchedTaskTO taskTO, final TaskUtils taskUtils) {
Class<? extends TaskTO> taskTOClass = taskUtils.taskTOClass();
if (taskTOClass == null || !taskTOClass.equals(taskTO.getClass())) {
throw new IllegalArgumentException(String.format("Expected %s, found %s", taskTOClass, taskTO.getClass()));
}
SchedTask task = taskUtils.newTask();
task.setStartAt(taskTO.getStartAt());
task.setCronExpression(taskTO.getCronExpression());
task.setName(taskTO.getName());
task.setDescription(taskTO.getDescription());
task.setActive(taskTO.isActive());
if (taskUtils.getType() == TaskType.SCHEDULED) {
Implementation implementation = implementationDAO.find(taskTO.getJobDelegate());
if (implementation == null) {
throw new NotFoundException("Implementation " + taskTO.getJobDelegate());
}
task.setJobDelegate(implementation);
} else if (taskTO instanceof ProvisioningTaskTO) {
ProvisioningTaskTO provisioningTaskTO = (ProvisioningTaskTO) taskTO;
ExternalResource resource = resourceDAO.find(provisioningTaskTO.getResource());
if (resource == null) {
throw new NotFoundException("Resource " + provisioningTaskTO.getResource());
}
((ProvisioningTask) task).setResource(resource);
fill((ProvisioningTask) task, provisioningTaskTO);
}
return task;
}
use of org.apache.syncope.core.persistence.api.entity.Implementation in project syncope by apache.
the class TaskDataBinderImpl method fill.
private void fill(final ProvisioningTask task, final ProvisioningTaskTO taskTO) {
if (task instanceof PushTask && taskTO instanceof PushTaskTO) {
PushTask pushTask = (PushTask) task;
PushTaskTO pushTaskTO = (PushTaskTO) taskTO;
Implementation jobDelegate = pushTaskTO.getJobDelegate() == null ? implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> PushJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null) : implementationDAO.find(pushTaskTO.getJobDelegate());
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(PushJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(ImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(PushJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
pushTask.setJobDelegate(jobDelegate);
pushTask.setSourceRealm(realmDAO.findByFullPath(pushTaskTO.getSourceRealm()));
pushTask.setMatchingRule(pushTaskTO.getMatchingRule() == null ? MatchingRule.LINK : pushTaskTO.getMatchingRule());
pushTask.setUnmatchingRule(pushTaskTO.getUnmatchingRule() == null ? UnmatchingRule.ASSIGN : pushTaskTO.getUnmatchingRule());
pushTaskTO.getFilters().forEach((type, fiql) -> {
AnyType anyType = anyTypeDAO.find(type);
if (anyType == null) {
LOG.debug("Invalid AnyType {} specified, ignoring...", type);
} else {
PushTaskAnyFilter filter = pushTask.getFilter(anyType).orElse(null);
if (filter == null) {
filter = entityFactory.newEntity(PushTaskAnyFilter.class);
filter.setAnyType(anyType);
filter.setPushTask(pushTask);
pushTask.add(filter);
}
filter.setFIQLCond(fiql);
}
});
// remove all filters not contained in the TO
pushTask.getFilters().removeIf(anyFilter -> !pushTaskTO.getFilters().containsKey(anyFilter.getAnyType().getKey()));
} else if (task instanceof PullTask && taskTO instanceof PullTaskTO) {
PullTask pullTask = (PullTask) task;
PullTaskTO pullTaskTO = (PullTaskTO) taskTO;
Implementation jobDelegate = pullTaskTO.getJobDelegate() == null ? implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> PullJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null) : implementationDAO.find(pullTaskTO.getJobDelegate());
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(PullJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(ImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(PullJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
pullTask.setJobDelegate(jobDelegate);
pullTask.setPullMode(pullTaskTO.getPullMode());
if (pullTaskTO.getReconFilterBuilder() == null) {
pullTask.setReconFilterBuilder(null);
} else {
Implementation reconFilterBuilder = implementationDAO.find(pullTaskTO.getReconFilterBuilder());
if (reconFilterBuilder == null) {
LOG.debug("Invalid " + Implementation.class.getSimpleName() + " {}, ignoring...", pullTaskTO.getReconFilterBuilder());
} else {
pullTask.setReconFilterBuilder(reconFilterBuilder);
}
}
pullTask.setDestinationRealm(realmDAO.findByFullPath(pullTaskTO.getDestinationRealm()));
pullTask.setMatchingRule(pullTaskTO.getMatchingRule() == null ? MatchingRule.UPDATE : pullTaskTO.getMatchingRule());
pullTask.setUnmatchingRule(pullTaskTO.getUnmatchingRule() == null ? UnmatchingRule.PROVISION : pullTaskTO.getUnmatchingRule());
// validate JEXL expressions from templates and proceed if fine
templateUtils.check(pullTaskTO.getTemplates(), ClientExceptionType.InvalidPullTask);
pullTaskTO.getTemplates().forEach((type, template) -> {
AnyType anyType = anyTypeDAO.find(type);
if (anyType == null) {
LOG.debug("Invalid AnyType {} specified, ignoring...", type);
} else {
AnyTemplatePullTask anyTemplate = pullTask.getTemplate(anyType).orElse(null);
if (anyTemplate == null) {
anyTemplate = entityFactory.newEntity(AnyTemplatePullTask.class);
anyTemplate.setAnyType(anyType);
anyTemplate.setPullTask(pullTask);
pullTask.add(anyTemplate);
}
anyTemplate.set(template);
}
});
// remove all templates not contained in the TO
pullTask.getTemplates().removeIf(anyTemplate -> !pullTaskTO.getTemplates().containsKey(anyTemplate.getAnyType().getKey()));
pullTask.setRemediation(pullTaskTO.isRemediation());
}
// 3. fill the remaining fields
task.setPerformCreate(taskTO.isPerformCreate());
task.setPerformUpdate(taskTO.isPerformUpdate());
task.setPerformDelete(taskTO.isPerformDelete());
task.setSyncStatus(taskTO.isSyncStatus());
taskTO.getActions().forEach(action -> {
Implementation implementation = implementationDAO.find(action);
if (implementation == null) {
LOG.debug("Invalid " + Implementation.class.getSimpleName() + " {}, ignoring...", action);
} else {
task.add(implementation);
}
});
// remove all implementations not contained in the TO
task.getActions().removeIf(implementation -> !taskTO.getActions().contains(implementation.getKey()));
}
use of org.apache.syncope.core.persistence.api.entity.Implementation in project syncope by apache.
the class GroupLogic method bulkMembersAction.
@PreAuthorize("hasRole('" + StandardEntitlement.TASK_CREATE + "') " + "and hasRole('" + StandardEntitlement.TASK_EXECUTE + "')")
@Transactional
public ExecTO bulkMembersAction(final String key, final BulkMembersActionType actionType) {
Group group = groupDAO.find(key);
if (group == null) {
throw new NotFoundException("Group " + key);
}
Implementation jobDelegate = implementationDAO.find(ImplementationType.TASKJOB_DELEGATE).stream().filter(impl -> GroupMemberProvisionTaskJobDelegate.class.getName().equals(impl.getBody())).findFirst().orElse(null);
if (jobDelegate == null) {
jobDelegate = entityFactory.newEntity(Implementation.class);
jobDelegate.setKey(GroupMemberProvisionTaskJobDelegate.class.getSimpleName());
jobDelegate.setEngine(ImplementationEngine.JAVA);
jobDelegate.setType(ImplementationType.TASKJOB_DELEGATE);
jobDelegate.setBody(GroupMemberProvisionTaskJobDelegate.class.getName());
jobDelegate = implementationDAO.save(jobDelegate);
}
SchedTask task = entityFactory.newEntity(SchedTask.class);
task.setName("Bulk member provision for group " + group.getName());
task.setActive(true);
task.setJobDelegate(jobDelegate);
task = taskDAO.save(task);
try {
Map<String, Object> jobDataMap = jobManager.register(task, null, confDAO.find("tasks.interruptMaxRetries", 1L));
jobDataMap.put(TaskJob.DRY_RUN_JOBDETAIL_KEY, false);
jobDataMap.put(GroupMemberProvisionTaskJobDelegate.GROUP_KEY_JOBDETAIL_KEY, key);
jobDataMap.put(GroupMemberProvisionTaskJobDelegate.ACTION_TYPE_JOBDETAIL_KEY, actionType);
scheduler.getScheduler().triggerJob(JobNamer.getJobKey(task), new JobDataMap(jobDataMap));
} catch (Exception e) {
LOG.error("While executing task {}", task, e);
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
sce.getElements().add(e.getMessage());
throw sce;
}
ExecTO result = new ExecTO();
result.setJobType(JobType.TASK);
result.setRefKey(task.getKey());
result.setRefDesc(taskDataBinder.buildRefDesc(task));
result.setStart(new Date());
result.setStatus("JOB_FIRED");
result.setMessage("Job fired; waiting for results...");
return result;
}
Aggregations