use of org.opencastproject.metadata.dublincore.DublinCoreValue in project opencast by opencast.
the class IngestServiceImpl method schedule.
@Override
public void schedule(MediaPackage mediaPackage, String workflowDefinitionID, Map<String, String> properties) throws IllegalStateException, IngestException, NotFoundException, UnauthorizedException, SchedulerException {
MediaPackageElement[] mediaPackageElements = mediaPackage.getElementsByFlavor(MediaPackageElements.EPISODE);
if (mediaPackageElements.length != 1) {
logger.debug("There can be only one (and exactly one) episode dublin core catalog: https://youtu.be/_J3VeogFUOs");
throw new IngestException("There can be only one (and exactly one) episode dublin core catalog");
}
InputStream inputStream;
DublinCoreCatalog dublinCoreCatalog;
try {
inputStream = workingFileRepository.get(mediaPackage.getIdentifier().toString(), mediaPackageElements[0].getIdentifier());
dublinCoreCatalog = dublinCoreService.load(inputStream);
} catch (IOException e) {
throw new IngestException(e);
}
EName temporal = new EName(DublinCore.TERMS_NS_URI, "temporal");
List<DublinCoreValue> periods = dublinCoreCatalog.get(temporal);
if (periods.size() != 1) {
logger.debug("There can be only one (and exactly one) period");
throw new IngestException("There can be only one (and exactly one) period");
}
DCMIPeriod period = EncodingSchemeUtils.decodeMandatoryPeriod(periods.get(0));
if (!period.hasStart() || !period.hasEnd()) {
logger.debug("A scheduled recording needs to have a start and end.");
throw new IngestException("A scheduled recording needs to have a start and end.");
}
EName createdEName = new EName(DublinCore.TERMS_NS_URI, "created");
List<DublinCoreValue> created = dublinCoreCatalog.get(createdEName);
if (created.size() == 0) {
logger.debug("Created not set");
} else if (created.size() == 1) {
Date date = EncodingSchemeUtils.decodeMandatoryDate(created.get(0));
if (date.getTime() != period.getStart().getTime()) {
logger.debug("start and created date differ ({} vs {})", date.getTime(), period.getStart().getTime());
throw new IngestException("Temporal start and created date differ");
}
} else {
logger.debug("There can be only one created date");
throw new IngestException("There can be only one created date");
}
// spatial
EName spatial = new EName(DublinCore.TERMS_NS_URI, "spatial");
List<DublinCoreValue> captureAgents = dublinCoreCatalog.get(spatial);
if (captureAgents.size() != 1) {
logger.debug("Exactly one capture agent needs to be set");
throw new IngestException("Exactly one capture agent needs to be set");
}
String captureAgent = captureAgents.get(0).getValue();
// Go through properties
Map<String, String> agentProperties = new HashMap<>();
Map<String, String> workflowProperties = new HashMap<>();
for (String key : properties.keySet()) {
if (key.startsWith("org.opencastproject.workflow.config.")) {
workflowProperties.put(key, properties.get(key));
} else {
agentProperties.put(key, properties.get(key));
}
}
try {
schedulerService.addEvent(period.getStart(), period.getEnd(), captureAgent, new HashSet<>(), mediaPackage, workflowProperties, agentProperties, Opt.none(), Opt.none(), "ingest-service");
} finally {
for (MediaPackageElement mediaPackageElement : mediaPackage.getElements()) {
try {
workingFileRepository.delete(mediaPackage.getIdentifier().toString(), mediaPackageElement.getIdentifier());
} catch (IOException e) {
logger.warn("Failed to delete media package element", e);
}
}
}
}
use of org.opencastproject.metadata.dublincore.DublinCoreValue in project opencast by opencast.
the class SeriesWorkflowOperationHandlerTest method testExtraMetadata.
@Test
public void testExtraMetadata() throws WorkflowOperationException {
final EName otherProperty = new EName(DublinCore.TERMS_NS_URI, "my-custom-property");
final String otherValue = "foobar";
// Add extra metadata to the series catalog.
seriesCatalog.set(DublinCore.PROPERTY_LANGUAGE, "Opencastian");
seriesCatalog.set(otherProperty, otherValue);
seriesCatalog.set(DublinCore.PROPERTY_CONTRIBUTOR, Arrays.asList(new DublinCoreValue[] { DublinCoreValue.mk("Mr. Contry Bute"), DublinCoreValue.mk("Mrs. Jane Doe") }));
// Prepare "copy metadata" property
String[] extraMetadata = { // Append a full metadata field, with NS
DublinCore.PROPERTY_LANGUAGE.toString(), // Field without namespace
DublinCore.PROPERTY_CONTRIBUTOR.getLocalName(), // Field with a namespace different than the default
otherProperty.toString(), // Field that does not exist in the series catalog
"does-not-exist" };
WorkflowInstanceImpl instance = new WorkflowInstanceImpl();
List<WorkflowOperationInstance> ops = new ArrayList<WorkflowOperationInstance>();
WorkflowOperationInstanceImpl operation = new WorkflowOperationInstanceImpl("test", OperationState.INSTANTIATED);
ops.add(operation);
instance.setOperations(ops);
instance.setMediaPackage(mp);
MediaPackage clone = (MediaPackage) mp.clone();
operation.setConfiguration(SeriesWorkflowOperationHandler.SERIES_PROPERTY, "series1");
operation.setConfiguration(SeriesWorkflowOperationHandler.ATTACH_PROPERTY, "*");
operation.setConfiguration(SeriesWorkflowOperationHandler.APPLY_ACL_PROPERTY, "false");
operation.setConfiguration(SeriesWorkflowOperationHandler.COPY_METADATA_PROPERTY, StringUtils.join(extraMetadata, ", "));
WorkflowOperationResult result = operationHandler.start(instance, null);
Assert.assertEquals(Action.CONTINUE, result.getAction());
MediaPackage resultingMediapackage = result.getMediaPackage();
// Get episode DublinCore
DublinCoreCatalog episodeCatalog = DublinCores.read(capturedStream.getValue());
Assert.assertEquals("series1", resultingMediapackage.getSeries());
Assert.assertEquals("Series 1", resultingMediapackage.getSeriesTitle());
Assert.assertEquals(clone.getElements().length + 1, resultingMediapackage.getElements().length);
// Check the extra metadata were copied into the dublincore (only those present in the series catalog)
Assert.assertTrue(episodeCatalog.hasValue(DublinCore.PROPERTY_CONTRIBUTOR));
Assert.assertEquals(seriesCatalog.get(DublinCore.PROPERTY_CONTRIBUTOR), episodeCatalog.get(DublinCore.PROPERTY_CONTRIBUTOR));
Assert.assertTrue(episodeCatalog.hasValue(DublinCore.PROPERTY_LANGUAGE));
Assert.assertEquals(seriesCatalog.get(DublinCore.PROPERTY_LANGUAGE), episodeCatalog.get(DublinCore.PROPERTY_LANGUAGE));
Assert.assertTrue(episodeCatalog.hasValue(otherProperty));
Assert.assertEquals(seriesCatalog.get(otherProperty), episodeCatalog.get(otherProperty));
Assert.assertFalse(episodeCatalog.hasValue(new EName(DublinCore.TERMS_NS_URI, "does-not-exist")));
}
use of org.opencastproject.metadata.dublincore.DublinCoreValue in project opencast by opencast.
the class IndexServiceImpl method createEvent.
@Override
public String createEvent(EventHttpServletRequest eventHttpServletRequest) throws ParseException, IOException, MediaPackageException, IngestException, NotFoundException, SchedulerException, UnauthorizedException {
// Preconditions
if (eventHttpServletRequest.getAcl().isNone()) {
throw new IllegalArgumentException("No access control list available to create new event.");
}
if (eventHttpServletRequest.getMediaPackage().isNone()) {
throw new IllegalArgumentException("No mediapackage available to create new event.");
}
if (eventHttpServletRequest.getMetadataList().isNone()) {
throw new IllegalArgumentException("No metadata list available to create new event.");
}
if (eventHttpServletRequest.getProcessing().isNone()) {
throw new IllegalArgumentException("No processing metadata available to create new event.");
}
if (eventHttpServletRequest.getSource().isNone()) {
throw new IllegalArgumentException("No source field metadata available to create new event.");
}
// Get Workflow
String workflowTemplate = (String) eventHttpServletRequest.getProcessing().get().get("workflow");
if (workflowTemplate == null)
throw new IllegalArgumentException("No workflow template in metadata");
// Get Type of Source
SourceType type = getSourceType(eventHttpServletRequest.getSource().get());
MetadataCollection eventMetadata = eventHttpServletRequest.getMetadataList().get().getMetadataByAdapter(eventCatalogUIAdapter).get();
JSONObject sourceMetadata = (JSONObject) eventHttpServletRequest.getSource().get().get("metadata");
if (sourceMetadata != null && (type.equals(SourceType.SCHEDULE_SINGLE) || type.equals(SourceType.SCHEDULE_MULTIPLE))) {
try {
MetadataField<?> current = eventMetadata.getOutputFields().get("location");
eventMetadata.updateStringField(current, (String) sourceMetadata.get("device"));
} catch (Exception e) {
logger.warn("Unable to parse device {}", sourceMetadata.get("device"));
throw new IllegalArgumentException("Unable to parse device");
}
}
Date currentStartDate = null;
MetadataField<?> starttime = eventMetadata.getOutputFields().get(DublinCore.PROPERTY_TEMPORAL.getLocalName());
if (starttime != null && starttime.isUpdated() && starttime.getValue().isSome()) {
DCMIPeriod period = EncodingSchemeUtils.decodeMandatoryPeriod((DublinCoreValue) starttime.getValue().get());
currentStartDate = period.getStart();
}
MetadataField<?> created = eventMetadata.getOutputFields().get(DublinCore.PROPERTY_CREATED.getLocalName());
if (created == null || !created.isUpdated() || created.getValue().isNone()) {
eventMetadata.removeField(created);
MetadataField<String> newCreated = MetadataUtils.copyMetadataField(created);
if (currentStartDate != null) {
newCreated.setValue(EncodingSchemeUtils.encodeDate(currentStartDate, Precision.Second).getValue());
} else {
newCreated.setValue(EncodingSchemeUtils.encodeDate(new Date(), Precision.Second).getValue());
}
eventMetadata.addField(newCreated);
}
// Get presenter usernames for use as technical presenters
Set<String> presenterUsernames = new HashSet<>();
Opt<Set<String>> technicalPresenters = updatePresenters(eventMetadata);
if (technicalPresenters.isSome()) {
presenterUsernames = technicalPresenters.get();
}
eventHttpServletRequest.getMetadataList().get().add(eventCatalogUIAdapter, eventMetadata);
updateMediaPackageMetadata(eventHttpServletRequest.getMediaPackage().get(), eventHttpServletRequest.getMetadataList().get());
DublinCoreCatalog dc = getDublinCoreCatalog(eventHttpServletRequest);
String captureAgentId = null;
TimeZone tz = null;
org.joda.time.DateTime start = null;
org.joda.time.DateTime end = null;
long duration = 0L;
Properties caProperties = new Properties();
RRule rRule = null;
if (sourceMetadata != null && (type.equals(SourceType.SCHEDULE_SINGLE) || type.equals(SourceType.SCHEDULE_MULTIPLE))) {
Properties configuration;
try {
captureAgentId = (String) sourceMetadata.get("device");
configuration = captureAgentStateService.getAgentConfiguration((String) sourceMetadata.get("device"));
} catch (Exception e) {
logger.warn("Unable to parse device {}: because: {}", sourceMetadata.get("device"), getStackTrace(e));
throw new IllegalArgumentException("Unable to parse device");
}
String durationString = (String) sourceMetadata.get("duration");
if (StringUtils.isBlank(durationString))
throw new IllegalArgumentException("No duration in source metadata");
// Create timezone based on CA's reported TZ.
String agentTimeZone = configuration.getProperty("capture.device.timezone");
if (StringUtils.isNotBlank(agentTimeZone)) {
tz = TimeZone.getTimeZone(agentTimeZone);
dc.set(DublinCores.OC_PROPERTY_AGENT_TIMEZONE, tz.getID());
} else {
// No timezone was present, assume the serve's local timezone.
tz = TimeZone.getDefault();
logger.debug("The field 'capture.device.timezone' has not been set in the agent configuration. The default server timezone will be used.");
}
org.joda.time.DateTime now = new org.joda.time.DateTime(DateTimeZone.UTC);
start = now.withMillis(DateTimeSupport.fromUTC((String) sourceMetadata.get("start")));
end = now.withMillis(DateTimeSupport.fromUTC((String) sourceMetadata.get("end")));
duration = Long.parseLong(durationString);
DublinCoreValue period = EncodingSchemeUtils.encodePeriod(new DCMIPeriod(start.toDate(), start.plus(duration).toDate()), Precision.Second);
String inputs = (String) sourceMetadata.get("inputs");
caProperties.putAll(configuration);
dc.set(DublinCore.PROPERTY_TEMPORAL, period);
caProperties.put(CaptureParameters.CAPTURE_DEVICE_NAMES, inputs);
}
if (type.equals(SourceType.SCHEDULE_MULTIPLE)) {
rRule = new RRule((String) sourceMetadata.get("rrule"));
}
Map<String, String> configuration = new HashMap<>();
if (eventHttpServletRequest.getProcessing().get().get("configuration") != null) {
configuration = new HashMap<>((JSONObject) eventHttpServletRequest.getProcessing().get().get("configuration"));
}
for (Entry<String, String> entry : configuration.entrySet()) {
caProperties.put(WORKFLOW_CONFIG_PREFIX.concat(entry.getKey()), entry.getValue());
}
caProperties.put(CaptureParameters.INGEST_WORKFLOW_DEFINITION, workflowTemplate);
eventHttpServletRequest.setMediaPackage(authorizationService.setAcl(eventHttpServletRequest.getMediaPackage().get(), AclScope.Episode, eventHttpServletRequest.getAcl().get()).getA());
MediaPackage mediaPackage;
switch(type) {
case UPLOAD:
case UPLOAD_LATER:
eventHttpServletRequest.setMediaPackage(updateDublincCoreCatalog(eventHttpServletRequest.getMediaPackage().get(), dc));
configuration.put("workflowDefinitionId", workflowTemplate);
WorkflowInstance ingest = ingestService.ingest(eventHttpServletRequest.getMediaPackage().get(), workflowTemplate, configuration);
return eventHttpServletRequest.getMediaPackage().get().getIdentifier().compact();
case SCHEDULE_SINGLE:
mediaPackage = updateDublincCoreCatalog(eventHttpServletRequest.getMediaPackage().get(), dc);
eventHttpServletRequest.setMediaPackage(mediaPackage);
try {
schedulerService.addEvent(start.toDate(), start.plus(duration).toDate(), captureAgentId, presenterUsernames, mediaPackage, configuration, (Map) caProperties, Opt.<Boolean>none(), Opt.<String>none(), SchedulerService.ORIGIN);
} finally {
for (MediaPackageElement mediaPackageElement : mediaPackage.getElements()) {
try {
workspace.delete(mediaPackage.getIdentifier().toString(), mediaPackageElement.getIdentifier());
} catch (NotFoundException | IOException e) {
logger.warn("Failed to delete media package element", e);
}
}
}
return mediaPackage.getIdentifier().compact();
case SCHEDULE_MULTIPLE:
List<Period> periods = schedulerService.calculatePeriods(rRule, start.toDate(), end.toDate(), duration, tz);
Map<String, Period> scheduled = new LinkedHashMap<>();
scheduled = schedulerService.addMultipleEvents(rRule, start.toDate(), end.toDate(), duration, tz, captureAgentId, presenterUsernames, eventHttpServletRequest.getMediaPackage().get(), configuration, (Map) caProperties, Opt.none(), Opt.none(), SchedulerService.ORIGIN);
return StringUtils.join(scheduled.keySet(), ",");
default:
logger.warn("Unknown source type {}", type);
throw new IllegalArgumentException("Unknown source type");
}
}
use of org.opencastproject.metadata.dublincore.DublinCoreValue in project opencast by opencast.
the class ConfigurableSeriesDCCatalogUIAdapter method getFieldValuesFromDublinCoreCatalog.
private void getFieldValuesFromDublinCoreCatalog(DublinCoreMetadataCollection dublinCoreMetadata, Set<String> emptyFields, DublinCoreCatalog dc) {
for (EName propertyKey : dc.getValues().keySet()) {
for (String metdataFieldKey : dublinCoreProperties.keySet()) {
MetadataField<?> metadataField = dublinCoreProperties.get(metdataFieldKey);
String namespace = DublinCore.TERMS_NS_URI;
if (metadataField.getNamespace().isSome()) {
namespace = metadataField.getNamespace().get();
}
if (namespace.equalsIgnoreCase(propertyKey.getNamespaceURI()) && metadataField.getInputID().equalsIgnoreCase(propertyKey.getLocalName())) {
for (DublinCoreValue dublinCoreValue : dc.get(propertyKey)) {
emptyFields.remove(metdataFieldKey);
try {
dublinCoreMetadata.addField(metadataField, dublinCoreValue.getValue(), listProvidersService);
} catch (IllegalArgumentException e) {
logger.error("Skipping metadata field '{}' because of error: {}", metadataField.getInputID(), ExceptionUtils.getStackTrace(e));
}
}
}
}
}
}
use of org.opencastproject.metadata.dublincore.DublinCoreValue in project opencast by opencast.
the class SchedulerServiceImpl method addMultipleEventInternal.
private Map<String, Period> addMultipleEventInternal(List<Period> periods, String captureAgentId, Set<String> userIds, MediaPackage templateMp, Map<String, String> wfProperties, Map<String, String> caMetadata, String modificationOrigin, Opt<Boolean> optOutStatus, Opt<String> schedulingSource, Opt<String> trxId) throws SchedulerException {
notNull(periods, "periods");
requireTrue(periods.size() > 0, "periods");
notEmpty(captureAgentId, "captureAgentId");
notNull(userIds, "userIds");
notNull(templateMp, "mediaPackages");
notNull(wfProperties, "wfProperties");
notNull(caMetadata, "caMetadata");
notEmpty(modificationOrigin, "modificationOrigin");
notNull(optOutStatus, "optOutStatus");
notNull(schedulingSource, "schedulingSource");
notNull(trxId, "trxId");
Map<String, Period> scheduledEvents = new LinkedHashMap<>();
try {
LinkedList<Id> ids = new LinkedList<>();
AQueryBuilder qb = assetManager.createQuery();
Predicate p = null;
// While we don't have a list of IDs equal to the number of periods
while (ids.size() <= periods.size()) {
// Create a list of IDs equal to the number of periods, along with a set of AM predicates
while (ids.size() <= periods.size()) {
Id id = new IdImpl(UUID.randomUUID().toString());
ids.add(id);
Predicate np = qb.mediaPackageId(id.compact());
// Haha, p = np jokes with the AM query language. Ha. Haha. Ha. (Sob...)
if (null == p) {
p = np;
} else {
p = p.or(np);
}
}
// Select the list of ids which alread exist. Hint: this needs to be zero
AResult result = qb.select(qb.nothing()).where(withOrganization(qb).and(p).and(qb.version().isLatest())).run();
// If there is conflict, clear the list and start over
if (result.getTotalSize() > 0) {
ids.clear();
}
}
Opt<String> seriesId = Opt.nul(StringUtils.trimToNull(templateMp.getSeries()));
// Get opt out status
boolean optOut = getOptOutStatus(seriesId, optOutStatus);
if (trxId.isNone()) {
// Check for locked transactions
if (schedulingSource.isSome() && persistence.hasTransaction(schedulingSource.get())) {
logger.warn("Unable to add events, source '{}' is currently locked due to an active transaction!", schedulingSource.get());
throw new SchedulerTransactionLockException("Unable to add event, locked source " + schedulingSource.get());
}
// Check for conflicting events if not opted out
if (!optOut) {
List<MediaPackage> conflictingEvents = findConflictingEvents(periods, captureAgentId, TimeZone.getDefault());
if (conflictingEvents.size() > 0) {
logger.info("Unable to add events, conflicting events found: {}", conflictingEvents);
throw new SchedulerConflictException("Unable to add event, conflicting events found");
}
}
}
// counter for index into the list of mediapackages
int counter = 0;
for (Period event : periods) {
MediaPackage mediaPackage = (MediaPackage) templateMp.clone();
Date startDate = new Date(event.getStart().getTime());
Date endDate = new Date(event.getEnd().getTime());
Id id = ids.get(counter);
// Get, or make, the DC catalog
DublinCoreCatalog dc;
Opt<DublinCoreCatalog> dcOpt = DublinCoreUtil.loadEpisodeDublinCore(workspace, templateMp);
if (dcOpt.isSome()) {
dc = dcOpt.get();
dc = (DublinCoreCatalog) dc.clone();
// make sure to bind the OC_PROPERTY namespace
dc.addBindings(XmlNamespaceContext.mk(XmlNamespaceBinding.mk(DublinCores.OC_PROPERTY_NS_PREFIX, DublinCores.OC_PROPERTY_NS_URI)));
} else {
dc = DublinCores.mkOpencastEpisode().getCatalog();
}
// Set the new media package identifier
mediaPackage.setIdentifier(id);
// Update dublincore title and temporal
String newTitle = dc.getFirst(DublinCore.PROPERTY_TITLE) + String.format(" %0" + Integer.toString(periods.size()).length() + "d", ++counter);
dc.set(DublinCore.PROPERTY_TITLE, newTitle);
DublinCoreValue eventTime = EncodingSchemeUtils.encodePeriod(new DCMIPeriod(startDate, endDate), Precision.Second);
dc.set(DublinCore.PROPERTY_TEMPORAL, eventTime);
mediaPackage = updateDublincCoreCatalog(mediaPackage, dc);
mediaPackage.setTitle(newTitle);
String mediaPackageId = mediaPackage.getIdentifier().compact();
// Converting from iCal4j DateTime objects to plain Date objects to prevent AMQ issues below
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
cal.setTime(event.getStart());
Date startDateTime = cal.getTime();
cal.setTime(event.getEnd());
Date endDateTime = cal.getTime();
// Load dublincore and acl for update
Opt<DublinCoreCatalog> dublinCore = DublinCoreUtil.loadEpisodeDublinCore(workspace, mediaPackage);
Option<AccessControlList> acl = authorizationService.getAcl(mediaPackage, AclScope.Episode);
// Get updated agent properties
Map<String, String> finalCaProperties = getFinalAgentProperties(caMetadata, wfProperties, captureAgentId, seriesId, dublinCore);
// Persist asset
String checksum = calculateChecksum(workspace, getEventCatalogUIAdapterFlavors(), startDateTime, endDateTime, captureAgentId, userIds, mediaPackage, dublinCore, wfProperties, finalCaProperties, optOut, acl.toOpt().getOr(new AccessControlList()));
persistEvent(mediaPackageId, modificationOrigin, checksum, Opt.some(startDateTime), Opt.some(endDateTime), Opt.some(captureAgentId), Opt.some(userIds), Opt.some(mediaPackage), Opt.some(wfProperties), Opt.some(finalCaProperties), Opt.some(optOut), schedulingSource, trxId);
if (trxId.isNone()) {
// Send updates
sendUpdateAddEvent(mediaPackageId, acl.toOpt(), dublinCore, Opt.some(startDateTime), Opt.some(endDateTime), Opt.some(userIds), Opt.some(captureAgentId), Opt.some(finalCaProperties), Opt.some(optOut));
// Update last modified
touchLastEntry(captureAgentId);
}
scheduledEvents.put(mediaPackageId, event);
for (MediaPackageElement mediaPackageElement : mediaPackage.getElements()) {
try {
workspace.delete(mediaPackage.getIdentifier().toString(), mediaPackageElement.getIdentifier());
} catch (NotFoundException | IOException e) {
logger.warn("Failed to delete media package element", e);
}
}
}
return scheduledEvents;
} catch (SchedulerException e) {
throw e;
} catch (Exception e) {
logger.error("Failed to create events: {}", getStackTrace(e));
throw new SchedulerException(e);
}
}
Aggregations