use of org.opencastproject.workflow.api.WorkflowOperationException in project opencast by opencast.
the class SeriesWorkflowOperationHandler method addDublinCoreCatalog.
private MediaPackage addDublinCoreCatalog(DublinCoreCatalog catalog, MediaPackageElementFlavor flavor, MediaPackage mediaPackage) throws WorkflowOperationException {
try (InputStream in = IOUtils.toInputStream(catalog.toXmlString(), "UTF-8")) {
String elementId = UUID.randomUUID().toString();
URI catalogUrl = workspace.put(mediaPackage.getIdentifier().compact(), elementId, "dublincore.xml", in);
logger.info("Adding catalog with flavor {} to mediapackage {}", flavor, mediaPackage);
MediaPackageElement mpe = mediaPackage.add(catalogUrl, MediaPackageElement.Type.Catalog, flavor);
mpe.setIdentifier(elementId);
mpe.setChecksum(Checksum.create(ChecksumType.DEFAULT_TYPE, workspace.get(catalogUrl)));
return mediaPackage;
} catch (IOException | NotFoundException e) {
throw new WorkflowOperationException(e);
}
}
use of org.opencastproject.workflow.api.WorkflowOperationException in project opencast by opencast.
the class SeriesWorkflowOperationHandler method start.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.WorkflowOperationHandler#start(org.opencastproject.workflow.api.WorkflowInstance,
* JobContext)
*/
@Override
public WorkflowOperationResult start(final WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
logger.debug("Running series workflow operation");
MediaPackage mediaPackage = workflowInstance.getMediaPackage();
Opt<String> optSeries = getOptConfig(workflowInstance.getCurrentOperation(), SERIES_PROPERTY);
Opt<String> optAttachFlavors = getOptConfig(workflowInstance.getCurrentOperation(), ATTACH_PROPERTY);
Boolean applyAcl = getOptConfig(workflowInstance.getCurrentOperation(), APPLY_ACL_PROPERTY).map(toBoolean).getOr(false);
Opt<String> optCopyMetadata = getOptConfig(workflowInstance.getCurrentOperation(), COPY_METADATA_PROPERTY);
String defaultNamespace = getOptConfig(workflowInstance.getCurrentOperation(), DEFAULT_NS_PROPERTY).getOr(DublinCore.TERMS_NS_URI);
logger.debug("Using default namespace: '{}'", defaultNamespace);
if (optSeries.isSome() && !optSeries.get().equals(mediaPackage.getSeries())) {
logger.info("Changing series id from '{}' to '{}'", StringUtils.trimToEmpty(mediaPackage.getSeries()), optSeries.get());
mediaPackage.setSeries(optSeries.get());
}
String seriesId = mediaPackage.getSeries();
if (seriesId == null) {
logger.info("No series set, skip operation");
return createResult(mediaPackage, Action.SKIP);
}
DublinCoreCatalog series;
try {
series = seriesService.getSeries(seriesId);
} catch (NotFoundException e) {
logger.info("No series with the identifier '{}' found, skip operation", seriesId);
return createResult(mediaPackage, Action.SKIP);
} catch (UnauthorizedException e) {
logger.warn("Not authorized to get series with identifier '{}' found, skip operation", seriesId);
return createResult(mediaPackage, Action.SKIP);
} catch (SeriesException e) {
logger.error("Unable to get series with identifier '{}', skip operation: {}", seriesId, ExceptionUtils.getStackTrace(e));
throw new WorkflowOperationException(e);
}
mediaPackage.setSeriesTitle(series.getFirst(DublinCore.PROPERTY_TITLE));
// Process extra metadata
HashSet<EName> extraMetadata = new HashSet<>();
if (optCopyMetadata.isSome()) {
for (String strEName : optCopyMetadata.get().split(",+\\s*")) try {
if (!strEName.isEmpty()) {
extraMetadata.add(EName.fromString(strEName, defaultNamespace));
}
} catch (IllegalArgumentException iae) {
logger.warn("Ignoring incorrect dublincore metadata property: '{}'", strEName);
}
}
// Update the episode catalog
for (Catalog episodeCatalog : mediaPackage.getCatalogs(MediaPackageElements.EPISODE)) {
DublinCoreCatalog episodeDublinCore = DublinCoreUtil.loadDublinCore(workspace, episodeCatalog);
// Make sure the MP catalog has bindings defined
episodeDublinCore.addBindings(XmlNamespaceContext.mk(XmlNamespaceBinding.mk(DublinCore.TERMS_NS_PREFIX, DublinCore.TERMS_NS_URI)));
episodeDublinCore.addBindings(XmlNamespaceContext.mk(XmlNamespaceBinding.mk(DublinCore.ELEMENTS_1_1_NS_PREFIX, DublinCore.ELEMENTS_1_1_NS_URI)));
episodeDublinCore.addBindings(XmlNamespaceContext.mk(XmlNamespaceBinding.mk(DublinCores.OC_PROPERTY_NS_PREFIX, DublinCores.OC_PROPERTY_NS_URI)));
episodeDublinCore.set(DublinCore.PROPERTY_IS_PART_OF, seriesId);
for (EName property : extraMetadata) {
if (!episodeDublinCore.hasValue(property) && series.hasValue(property)) {
episodeDublinCore.set(property, series.get(property));
}
}
try (InputStream in = IOUtils.toInputStream(episodeDublinCore.toXmlString(), "UTF-8")) {
String filename = FilenameUtils.getName(episodeCatalog.getURI().toString());
URI uri = workspace.put(mediaPackage.getIdentifier().toString(), episodeCatalog.getIdentifier(), filename, in);
episodeCatalog.setURI(uri);
// setting the URI to a new source so the checksum will most like be invalid
episodeCatalog.setChecksum(null);
} catch (Exception e) {
logger.error("Unable to update episode catalog isPartOf field: {}", ExceptionUtils.getStackTrace(e));
throw new WorkflowOperationException(e);
}
}
// Attach series catalogs
if (optAttachFlavors.isSome()) {
// Remove existing series catalogs
AbstractMediaPackageElementSelector<Catalog> catalogSelector = new CatalogSelector();
String[] seriesFlavors = StringUtils.split(optAttachFlavors.get(), ",");
for (String flavor : seriesFlavors) {
if ("*".equals(flavor)) {
catalogSelector.addFlavor("*/*");
} else {
catalogSelector.addFlavor(flavor);
}
}
for (Catalog c : catalogSelector.select(mediaPackage, false)) {
if (MediaPackageElements.SERIES.equals(c.getFlavor()) || "series".equals(c.getFlavor().getSubtype())) {
mediaPackage.remove(c);
}
}
List<SeriesCatalogUIAdapter> adapters = getSeriesCatalogUIAdapters();
for (String flavorString : seriesFlavors) {
MediaPackageElementFlavor flavor;
if ("*".equals(flavorString)) {
flavor = MediaPackageElementFlavor.parseFlavor("*/*");
} else {
flavor = MediaPackageElementFlavor.parseFlavor(flavorString);
}
for (SeriesCatalogUIAdapter a : adapters) {
MediaPackageElementFlavor adapterFlavor = MediaPackageElementFlavor.parseFlavor(a.getFlavor());
if (flavor.matches(adapterFlavor)) {
if (MediaPackageElements.SERIES.eq(a.getFlavor())) {
addDublinCoreCatalog(series, MediaPackageElements.SERIES, mediaPackage);
} else {
try {
Opt<byte[]> seriesElementData = seriesService.getSeriesElementData(seriesId, adapterFlavor.getType());
if (seriesElementData.isSome()) {
DublinCoreCatalog catalog = DublinCores.read(new ByteArrayInputStream(seriesElementData.get()));
addDublinCoreCatalog(catalog, adapterFlavor, mediaPackage);
} else {
logger.warn("No extended series catalog found for flavor '{}' and series '{}', skip adding catalog", adapterFlavor.getType(), seriesId);
}
} catch (SeriesException e) {
logger.error("Unable to load extended series metadata for flavor {}", adapterFlavor.getType());
throw new WorkflowOperationException(e);
}
}
}
}
}
}
if (applyAcl) {
try {
AccessControlList acl = seriesService.getSeriesAccessControl(seriesId);
if (acl != null)
authorizationService.setAcl(mediaPackage, AclScope.Series, acl);
} catch (Exception e) {
logger.error("Unable to update series ACL: {}", ExceptionUtils.getStackTrace(e));
throw new WorkflowOperationException(e);
}
}
return createResult(mediaPackage, Action.CONTINUE);
}
use of org.opencastproject.workflow.api.WorkflowOperationException in project opencast by opencast.
the class AnalyzeTracksWorkflowOperationHandlerTest method testStart.
@Test
public void testStart() throws MediaPackageException, WorkflowOperationException {
MediaPackage mediaPackage = MediaPackageBuilderFactory.newInstance().newMediaPackageBuilder().createNew();
VideoStreamImpl videoStream = new VideoStreamImpl("234");
videoStream.setFrameWidth(1280);
videoStream.setFrameHeight(720);
videoStream.setFrameRate(30.0f);
TrackImpl track = new TrackImpl();
track.setFlavor(MediaPackageElementFlavor.parseFlavor("presenter/source"));
track.addStream(videoStream);
JobContext jobContext = EasyMock.createMock(JobContext.class);
EasyMock.replay(jobContext);
WorkflowOperationInstance operationInstance = EasyMock.createMock(WorkflowOperationInstance.class);
String[][] config = { { AnalyzeTracksWorkflowOperationHandler.OPT_SOURCE_FLAVOR, "*/source" }, { AnalyzeTracksWorkflowOperationHandler.OPT_VIDEO_ASPECT, "4/3,16/9" } };
for (String[] cfg : config) {
EasyMock.expect(operationInstance.getConfiguration(cfg[0])).andReturn(cfg[1]).anyTimes();
}
EasyMock.expect(operationInstance.getConfiguration(AnalyzeTracksWorkflowOperationHandler.OPT_FAIL_NO_TRACK)).andReturn("true");
EasyMock.expect(operationInstance.getConfiguration(AnalyzeTracksWorkflowOperationHandler.OPT_FAIL_NO_TRACK)).andReturn("false").anyTimes();
EasyMock.replay(operationInstance);
WorkflowInstance workflowInstance = EasyMock.createMock(WorkflowInstance.class);
EasyMock.expect(workflowInstance.getMediaPackage()).andReturn(mediaPackage).anyTimes();
EasyMock.expect(workflowInstance.getId()).andReturn(0L).anyTimes();
EasyMock.expect(workflowInstance.getCurrentOperation()).andReturn(operationInstance).anyTimes();
EasyMock.replay(workflowInstance);
// With no matching track (should fail)
try {
operationHandler.start(workflowInstance, jobContext);
fail();
} catch (WorkflowOperationException e) {
logger.info("Fail on no tracks works");
}
WorkflowOperationResult workflowOperationResult = operationHandler.start(workflowInstance, jobContext);
Map<String, String> properties = workflowOperationResult.getProperties();
assertTrue(properties.isEmpty());
// With matching track
mediaPackage.add(track);
workflowOperationResult = operationHandler.start(workflowInstance, jobContext);
properties = workflowOperationResult.getProperties();
String[][] props = { { "presenter_source_media", "true" }, { "presenter_source_audio", "false" }, { "presenter_source_aspect", "16/9" }, { "presenter_source_resolution_y", "720" }, { "presenter_source_resolution_x", "1280" }, { "presenter_source_aspect_snap", "16/9" }, { "presenter_source_video", "true" }, { "presenter_source_framerate", "30.0" } };
for (String[] prop : props) {
assertEquals(prop[1], properties.get(prop[0]));
}
}
use of org.opencastproject.workflow.api.WorkflowOperationException in project opencast by opencast.
the class ZipWorkflowOperationHandler method start.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.AbstractWorkflowOperationHandler#start(org.opencastproject.workflow.api.WorkflowInstance,
* JobContext)
*/
@Override
public WorkflowOperationResult start(final WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
if (workflowInstance == null) {
throw new WorkflowOperationException("Invalid workflow instance");
}
final MediaPackage mediaPackage = workflowInstance.getMediaPackage();
final WorkflowOperationInstance currentOperation = workflowInstance.getCurrentOperation();
if (currentOperation == null) {
throw new WorkflowOperationException("Cannot get current workflow operation");
}
String flavors = currentOperation.getConfiguration(INCLUDE_FLAVORS_PROPERTY);
final List<MediaPackageElementFlavor> flavorsToZip = new ArrayList<MediaPackageElementFlavor>();
MediaPackageElementFlavor targetFlavor = DEFAULT_ARCHIVE_FLAVOR;
// Read the target flavor
String targetFlavorOption = currentOperation.getConfiguration(TARGET_FLAVOR_PROPERTY);
try {
targetFlavor = targetFlavorOption == null ? DEFAULT_ARCHIVE_FLAVOR : MediaPackageElementFlavor.parseFlavor(targetFlavorOption);
logger.trace("Using '{}' as the target flavor for the zip archive of recording {}", targetFlavor, mediaPackage);
} catch (IllegalArgumentException e) {
throw new WorkflowOperationException("Flavor '" + targetFlavorOption + "' is not valid", e);
}
// Read the target tags
String targetTagsOption = StringUtils.trimToEmpty(currentOperation.getConfiguration(TARGET_TAGS_PROPERTY));
String[] targetTags = StringUtils.split(targetTagsOption, ",");
// If the configuration does not specify flavors, just zip them all
if (flavors == null) {
flavorsToZip.add(MediaPackageElementFlavor.parseFlavor("*/*"));
} else {
for (String flavor : asList(flavors)) {
flavorsToZip.add(MediaPackageElementFlavor.parseFlavor(flavor));
}
}
logger.info("Archiving mediapackage {} in workflow {}", mediaPackage, workflowInstance);
String compressProperty = currentOperation.getConfiguration(COMPRESS_PROPERTY);
boolean compress = compressProperty == null ? false : Boolean.valueOf(compressProperty);
// Zip the contents of the mediapackage
File zip = null;
try {
logger.info("Creating zipped archive of recording {}", mediaPackage);
zip = zip(mediaPackage, flavorsToZip, compress);
} catch (Exception e) {
throw new WorkflowOperationException("Unable to create a zip archive from mediapackage " + mediaPackage, e);
}
// Get the collection for storing the archived mediapackage
String configuredCollectionId = currentOperation.getConfiguration(ZIP_COLLECTION_PROPERTY);
String collectionId = configuredCollectionId == null ? DEFAULT_ZIP_COLLECTION : configuredCollectionId;
// Add the zip as an attachment to the mediapackage
logger.info("Moving zipped archive of recording {} to the working file repository collection '{}'", mediaPackage, collectionId);
InputStream in = null;
URI uri = null;
try {
in = new FileInputStream(zip);
uri = workspace.putInCollection(collectionId, mediaPackage.getIdentifier().compact() + ".zip", in);
logger.info("Zipped archive of recording {} is available from {}", mediaPackage, uri);
} catch (FileNotFoundException e) {
throw new WorkflowOperationException("zip file " + zip + " not found", e);
} catch (IOException e) {
throw new WorkflowOperationException(e);
} finally {
IOUtils.closeQuietly(in);
}
Attachment attachment = (Attachment) MediaPackageElementBuilderFactory.newInstance().newElementBuilder().elementFromURI(uri, Type.Attachment, targetFlavor);
try {
attachment.setChecksum(Checksum.create(ChecksumType.DEFAULT_TYPE, zip));
} catch (IOException e) {
throw new WorkflowOperationException(e);
}
attachment.setMimeType(MimeTypes.ZIP);
// Apply the target tags
for (String tag : targetTags) {
attachment.addTag(tag);
logger.trace("Tagging the archive of recording '{}' with '{}'", mediaPackage, tag);
}
attachment.setMimeType(MimeTypes.ZIP);
// The zip file is safely in the archive, so it's now safe to attempt to remove the original zip
try {
FileUtils.forceDelete(zip);
} catch (Exception e) {
throw new WorkflowOperationException(e);
}
mediaPackage.add(attachment);
return createResult(mediaPackage, Action.CONTINUE);
}
use of org.opencastproject.workflow.api.WorkflowOperationException in project opencast by opencast.
the class IncludeWorkflowOperationHandler method insertWorkflow.
/**
* Adds the operations found in the workflow defined by <code>workflowDefinitionId</code> to the workflow instance and
* returns <code>true</code> if everything worked fine, <code>false</code> otherwise.
*
* @param wi
* the instance to insert the workflow identified by <code>workflowDefinitionId</code> into
* @param workflowDefinitionId
* id of the workflow definition to insert
* @throws WorkflowOperationException
* in case of any error
*/
public void insertWorkflow(final WorkflowInstance wi, final String workflowDefinitionId) throws WorkflowOperationException {
try {
final WorkflowDefinition definition = workflowService.getWorkflowDefinitionById(workflowDefinitionId);
if (definition != null) {
logger.info(format("Insert workflow %s into the current workflow instance", workflowDefinitionId));
wi.insert(definition, wi.getCurrentOperation());
} else {
logger.warn(format("Workflow definition %s cannot be found", workflowDefinitionId));
}
} catch (Exception e) {
throw new WorkflowOperationException("Error inserting workflow " + workflowDefinitionId, e);
}
}
Aggregations