use of org.opencastproject.security.api.AccessControlList in project opencast by opencast.
the class SchedulerServiceImpl method addMultipleEventInternal.
private Map<String, Period> addMultipleEventInternal(List<Period> periods, String captureAgentId, Set<String> userIds, MediaPackage templateMp, Map<String, String> wfProperties, Map<String, String> caMetadata, String modificationOrigin, Opt<Boolean> optOutStatus, Opt<String> schedulingSource, Opt<String> trxId) throws SchedulerException {
notNull(periods, "periods");
requireTrue(periods.size() > 0, "periods");
notEmpty(captureAgentId, "captureAgentId");
notNull(userIds, "userIds");
notNull(templateMp, "mediaPackages");
notNull(wfProperties, "wfProperties");
notNull(caMetadata, "caMetadata");
notEmpty(modificationOrigin, "modificationOrigin");
notNull(optOutStatus, "optOutStatus");
notNull(schedulingSource, "schedulingSource");
notNull(trxId, "trxId");
Map<String, Period> scheduledEvents = new LinkedHashMap<>();
try {
LinkedList<Id> ids = new LinkedList<>();
AQueryBuilder qb = assetManager.createQuery();
Predicate p = null;
// While we don't have a list of IDs equal to the number of periods
while (ids.size() <= periods.size()) {
// Create a list of IDs equal to the number of periods, along with a set of AM predicates
while (ids.size() <= periods.size()) {
Id id = new IdImpl(UUID.randomUUID().toString());
ids.add(id);
Predicate np = qb.mediaPackageId(id.compact());
// Haha, p = np jokes with the AM query language. Ha. Haha. Ha. (Sob...)
if (null == p) {
p = np;
} else {
p = p.or(np);
}
}
// Select the list of ids which alread exist. Hint: this needs to be zero
AResult result = qb.select(qb.nothing()).where(withOrganization(qb).and(p).and(qb.version().isLatest())).run();
// If there is conflict, clear the list and start over
if (result.getTotalSize() > 0) {
ids.clear();
}
}
Opt<String> seriesId = Opt.nul(StringUtils.trimToNull(templateMp.getSeries()));
// Get opt out status
boolean optOut = getOptOutStatus(seriesId, optOutStatus);
if (trxId.isNone()) {
// Check for locked transactions
if (schedulingSource.isSome() && persistence.hasTransaction(schedulingSource.get())) {
logger.warn("Unable to add events, source '{}' is currently locked due to an active transaction!", schedulingSource.get());
throw new SchedulerTransactionLockException("Unable to add event, locked source " + schedulingSource.get());
}
// Check for conflicting events if not opted out
if (!optOut) {
List<MediaPackage> conflictingEvents = findConflictingEvents(periods, captureAgentId, TimeZone.getDefault());
if (conflictingEvents.size() > 0) {
logger.info("Unable to add events, conflicting events found: {}", conflictingEvents);
throw new SchedulerConflictException("Unable to add event, conflicting events found");
}
}
}
// counter for index into the list of mediapackages
int counter = 0;
for (Period event : periods) {
MediaPackage mediaPackage = (MediaPackage) templateMp.clone();
Date startDate = new Date(event.getStart().getTime());
Date endDate = new Date(event.getEnd().getTime());
Id id = ids.get(counter);
// Get, or make, the DC catalog
DublinCoreCatalog dc;
Opt<DublinCoreCatalog> dcOpt = DublinCoreUtil.loadEpisodeDublinCore(workspace, templateMp);
if (dcOpt.isSome()) {
dc = dcOpt.get();
dc = (DublinCoreCatalog) dc.clone();
// make sure to bind the OC_PROPERTY namespace
dc.addBindings(XmlNamespaceContext.mk(XmlNamespaceBinding.mk(DublinCores.OC_PROPERTY_NS_PREFIX, DublinCores.OC_PROPERTY_NS_URI)));
} else {
dc = DublinCores.mkOpencastEpisode().getCatalog();
}
// Set the new media package identifier
mediaPackage.setIdentifier(id);
// Update dublincore title and temporal
String newTitle = dc.getFirst(DublinCore.PROPERTY_TITLE) + String.format(" %0" + Integer.toString(periods.size()).length() + "d", ++counter);
dc.set(DublinCore.PROPERTY_TITLE, newTitle);
DublinCoreValue eventTime = EncodingSchemeUtils.encodePeriod(new DCMIPeriod(startDate, endDate), Precision.Second);
dc.set(DublinCore.PROPERTY_TEMPORAL, eventTime);
mediaPackage = updateDublincCoreCatalog(mediaPackage, dc);
mediaPackage.setTitle(newTitle);
String mediaPackageId = mediaPackage.getIdentifier().compact();
// Converting from iCal4j DateTime objects to plain Date objects to prevent AMQ issues below
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
cal.setTime(event.getStart());
Date startDateTime = cal.getTime();
cal.setTime(event.getEnd());
Date endDateTime = cal.getTime();
// Load dublincore and acl for update
Opt<DublinCoreCatalog> dublinCore = DublinCoreUtil.loadEpisodeDublinCore(workspace, mediaPackage);
Option<AccessControlList> acl = authorizationService.getAcl(mediaPackage, AclScope.Episode);
// Get updated agent properties
Map<String, String> finalCaProperties = getFinalAgentProperties(caMetadata, wfProperties, captureAgentId, seriesId, dublinCore);
// Persist asset
String checksum = calculateChecksum(workspace, getEventCatalogUIAdapterFlavors(), startDateTime, endDateTime, captureAgentId, userIds, mediaPackage, dublinCore, wfProperties, finalCaProperties, optOut, acl.toOpt().getOr(new AccessControlList()));
persistEvent(mediaPackageId, modificationOrigin, checksum, Opt.some(startDateTime), Opt.some(endDateTime), Opt.some(captureAgentId), Opt.some(userIds), Opt.some(mediaPackage), Opt.some(wfProperties), Opt.some(finalCaProperties), Opt.some(optOut), schedulingSource, trxId);
if (trxId.isNone()) {
// Send updates
sendUpdateAddEvent(mediaPackageId, acl.toOpt(), dublinCore, Opt.some(startDateTime), Opt.some(endDateTime), Opt.some(userIds), Opt.some(captureAgentId), Opt.some(finalCaProperties), Opt.some(optOut));
// Update last modified
touchLastEntry(captureAgentId);
}
scheduledEvents.put(mediaPackageId, event);
for (MediaPackageElement mediaPackageElement : mediaPackage.getElements()) {
try {
workspace.delete(mediaPackage.getIdentifier().toString(), mediaPackageElement.getIdentifier());
} catch (NotFoundException | IOException e) {
logger.warn("Failed to delete media package element", e);
}
}
}
return scheduledEvents;
} catch (SchedulerException e) {
throw e;
} catch (Exception e) {
logger.error("Failed to create events: {}", getStackTrace(e));
throw new SchedulerException(e);
}
}
use of org.opencastproject.security.api.AccessControlList in project opencast by opencast.
the class SchedulerServiceImpl method getAccessControlList.
@Override
public AccessControlList getAccessControlList(String mediaPackageId) throws NotFoundException, SchedulerException {
notEmpty(mediaPackageId, "mediaPackageId");
try {
AQueryBuilder query = assetManager.createQuery();
Props p = new Props(query);
AResult result = query.select(query.snapshot()).where(withOrganization(query).and(query.mediaPackageId(mediaPackageId)).and(withVersion(query)).and(query.hasPropertiesOf(p.namespace()))).run();
Opt<ARecord> record = result.getRecords().head();
if (record.isNone())
throw new NotFoundException();
Opt<AccessControlList> acl = loadEpisodeAclFromAsset(record.get().getSnapshot().get());
if (acl.isNone())
return null;
return acl.get();
} catch (NotFoundException e) {
throw e;
} catch (Exception e) {
logger.error("Failed to get access control list of event '{}': {}", mediaPackageId, getStackTrace(e));
throw new SchedulerException(e);
}
}
use of org.opencastproject.security.api.AccessControlList in project opencast by opencast.
the class OaiPmhRepositoryPersistenceTest method oaiPmhDatabase.
private static AbstractOaiPmhDatabase oaiPmhDatabase(MediaPackage... mps) {
try {
final Organization org = new DefaultOrganization();
final SecurityService secSvc = EasyMock.createNiceMock(SecurityService.class);
// security service
final User user = createSystemUser("admin", org);
expect(secSvc.getOrganization()).andReturn(org).anyTimes();
expect(secSvc.getUser()).andReturn(user).anyTimes();
EasyMock.replay(secSvc);
// series service
final SeriesService seriesService = EasyMock.createNiceMock(SeriesService.class);
final String xacml = IOUtils.toString(OaiPmhRepositoryPersistenceTest.class.getResource("/xacml.xml").toURI());
final AccessControlList securityACL = AccessControlParser.parseAcl(xacml);
EasyMock.expect(seriesService.getSeriesAccessControl("10.0000/1")).andReturn(securityACL).anyTimes();
EasyMock.replay(seriesService);
// workspace
final Workspace workspace = EasyMock.createNiceMock(Workspace.class);
final File episodeDublinCore = new File(OaiPmhRepositoryPersistenceTest.class.getResource("/episode-dublincore.xml").toURI());
final File seriesDublinCore = new File(OaiPmhRepositoryPersistenceTest.class.getResource("/series-dublincore.xml").toURI());
expect(workspace.read(EasyMock.anyObject())).andAnswer(() -> {
final String uri = getCurrentArguments()[0].toString();
if ("dublincore.xml".equals(uri))
return new FileInputStream(episodeDublinCore);
if ("series-dublincore.xml".equals(uri))
return new FileInputStream(seriesDublinCore);
throw new Error("Workspace mock does not know about file " + uri);
}).anyTimes();
EasyMock.replay(workspace);
// oai-pmh database
final EntityManagerFactory emf = PersistenceUtil.newTestEntityManagerFactory(OaiPmhDatabaseImpl.PERSISTENCE_UNIT_NAME);
final AbstractOaiPmhDatabase db = new AbstractOaiPmhDatabase() {
@Override
public EntityManagerFactory getEmf() {
return emf;
}
@Override
public SecurityService getSecurityService() {
return secSvc;
}
@Override
public Workspace getWorkspace() {
return workspace;
}
@Override
public Date currentDate() {
return new Date();
}
};
for (MediaPackage mp : mps) db.store(mp, REPOSITORY_ID);
return db;
} catch (Exception e) {
return chuck(e);
}
}
use of org.opencastproject.security.api.AccessControlList in project opencast by opencast.
the class WorkflowServiceImpl method update.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.WorkflowService#update(org.opencastproject.workflow.api.WorkflowInstance)
*/
@Override
public void update(final WorkflowInstance workflowInstance) throws WorkflowException, UnauthorizedException {
final Lock lock = updateLock.get(workflowInstance.getId());
lock.lock();
try {
WorkflowInstance originalWorkflowInstance = null;
try {
originalWorkflowInstance = getWorkflowById(workflowInstance.getId());
} catch (NotFoundException e) {
// That's fine, it's a new workflow instance
}
if (originalWorkflowInstance != null) {
try {
assertPermission(originalWorkflowInstance, Permissions.Action.WRITE.toString());
} catch (MediaPackageException e) {
throw new WorkflowParsingException(e);
}
}
MediaPackage updatedMediaPackage = null;
try {
// Before we persist this, extract the metadata
updatedMediaPackage = workflowInstance.getMediaPackage();
populateMediaPackageMetadata(updatedMediaPackage);
String seriesId = updatedMediaPackage.getSeries();
if (seriesId != null && workflowInstance.getCurrentOperation() != null) {
// If the mediapackage contains a series, find the series ACLs and add the security information to the
// mediapackage
AccessControlList acl = seriesService.getSeriesAccessControl(seriesId);
Option<AccessControlList> activeSeriesAcl = authorizationService.getAcl(updatedMediaPackage, AclScope.Series);
if (activeSeriesAcl.isNone() || !AccessControlUtil.equals(activeSeriesAcl.get(), acl))
authorizationService.setAcl(updatedMediaPackage, AclScope.Series, acl);
}
} catch (SeriesException e) {
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
logger.warn("Metadata for mediapackage {} could not be updated because it wasn't found", updatedMediaPackage, e);
} catch (Exception e) {
logger.error("Metadata for mediapackage {} could not be updated", updatedMediaPackage, e);
}
// Synchronize the job status with the workflow
WorkflowState workflowState = workflowInstance.getState();
String xml;
try {
xml = WorkflowParser.toXml(workflowInstance);
} catch (Exception e) {
// Can't happen, since we are converting from an in-memory object
throw new IllegalStateException("In-memory workflow instance could not be serialized", e);
}
Job job = null;
try {
job = serviceRegistry.getJob(workflowInstance.getId());
job.setPayload(xml);
// Synchronize workflow and job state
switch(workflowState) {
case FAILED:
job.setStatus(Status.FAILED);
break;
case FAILING:
break;
case INSTANTIATED:
job.setDispatchable(true);
job.setStatus(Status.QUEUED);
break;
case PAUSED:
job.setStatus(Status.PAUSED);
break;
case RUNNING:
job.setStatus(Status.RUNNING);
break;
case STOPPED:
job.setStatus(Status.CANCELED);
break;
case SUCCEEDED:
job.setStatus(Status.FINISHED);
break;
default:
throw new IllegalStateException("Found a workflow state that is not handled");
}
} catch (ServiceRegistryException e) {
logger.error(e, "Unable to read workflow job %s from service registry", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
logger.error("Job for workflow %s not found in service registry", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
}
// Update both workflow and workflow job
try {
job = serviceRegistry.updateJob(job);
messageSender.sendObjectMessage(WorkflowItem.WORKFLOW_QUEUE, MessageSender.DestinationType.Queue, WorkflowItem.updateInstance(workflowInstance));
index(workflowInstance);
} catch (ServiceRegistryException e) {
logger.error("Update of workflow job %s in the service registry failed, service registry and workflow index may be out of sync", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
} catch (NotFoundException e) {
logger.error("Job for workflow %s not found in service registry", workflowInstance.getId());
throw new WorkflowDatabaseException(e);
} catch (Exception e) {
logger.error("Update of workflow job %s in the service registry failed, service registry and workflow index may be out of sync", job.getId());
throw new WorkflowException(e);
}
if (workflowStatsCollect) {
workflowsStatistics.updateWorkflow(getBeanStatistics(), getHoldWorkflows());
}
try {
WorkflowInstance clone = WorkflowParser.parseWorkflowInstance(WorkflowParser.toXml(workflowInstance));
fireListeners(originalWorkflowInstance, clone);
} catch (Exception e) {
// Can't happen, since we are converting from an in-memory object
throw new IllegalStateException("In-memory workflow instance could not be serialized", e);
}
} finally {
lock.unlock();
}
}
use of org.opencastproject.security.api.AccessControlList in project opencast by opencast.
the class WorkflowServiceSolrIndex method addAuthorization.
/**
* Adds authorization fields to the solr document.
*
* @param doc
* the solr document
* @param acl
* the access control list
*/
protected void addAuthorization(SolrInputDocument doc, AccessControlList acl) {
Map<String, List<String>> permissions = new HashMap<String, List<String>>();
// Define containers for common permissions
List<String> reads = new ArrayList<String>();
permissions.put(Permissions.Action.READ.toString(), reads);
List<String> writes = new ArrayList<String>();
permissions.put(Permissions.Action.WRITE.toString(), writes);
String adminRole = securityService.getOrganization().getAdminRole();
// The admin user can read and write
if (adminRole != null) {
reads.add(adminRole);
writes.add(adminRole);
}
for (AccessControlEntry entry : acl.getEntries()) {
if (!entry.isAllow()) {
logger.warn("Workflow service does not support denial via ACL, ignoring {}", entry);
continue;
}
List<String> actionPermissions = permissions.get(entry.getAction());
if (actionPermissions == null) {
actionPermissions = new ArrayList<String>();
permissions.put(entry.getAction(), actionPermissions);
}
actionPermissions.add(entry.getRole());
}
// Write the permissions to the solr document
for (Map.Entry<String, List<String>> entry : permissions.entrySet()) {
String fieldName = ACL_KEY_PREFIX + entry.getKey();
doc.setField(fieldName, entry.getValue());
}
}
Aggregations