use of org.opencastproject.mediapackage.MediaPackageElement in project opencast by opencast.
the class IngestServiceImpl method addZippedMediaPackage.
/**
* {@inheritDoc}
*
* @see org.opencastproject.ingest.api.IngestService#addZippedMediaPackage(java.io.InputStream, java.lang.String,
* java.util.Map, java.lang.Long)
*/
@Override
public WorkflowInstance addZippedMediaPackage(InputStream zipStream, String workflowDefinitionId, Map<String, String> workflowConfig, Long workflowInstanceId) throws MediaPackageException, IOException, IngestException, NotFoundException, UnauthorizedException {
// Start a job synchronously. We can't keep the open input stream waiting around.
Job job = null;
if (StringUtils.isNotBlank(workflowDefinitionId)) {
try {
workflowService.getWorkflowDefinitionById(workflowDefinitionId);
} catch (WorkflowDatabaseException e) {
throw new IngestException(e);
} catch (NotFoundException nfe) {
logger.warn("Workflow definition {} not found, using default workflow {} instead", workflowDefinitionId, defaultWorkflowDefinionId);
workflowDefinitionId = defaultWorkflowDefinionId;
}
}
if (workflowInstanceId != null) {
logger.warn("Deprecated method! Ingesting zipped mediapackage with workflow {}", workflowInstanceId);
} else {
logger.info("Ingesting zipped mediapackage");
}
ZipArchiveInputStream zis = null;
Set<String> collectionFilenames = new HashSet<>();
try {
// We don't need anybody to do the dispatching for us. Therefore we need to make sure that the job is never in
// QUEUED state but set it to INSTANTIATED in the beginning and then manually switch it to RUNNING.
job = serviceRegistry.createJob(JOB_TYPE, INGEST_ZIP, null, null, false, ingestZipJobLoad);
job.setStatus(Status.RUNNING);
job = serviceRegistry.updateJob(job);
// Create the working file target collection for this ingest operation
String wfrCollectionId = Long.toString(job.getId());
zis = new ZipArchiveInputStream(zipStream);
ZipArchiveEntry entry;
MediaPackage mp = null;
Map<String, URI> uris = new HashMap<>();
// Sequential number to append to file names so that, if two files have the same
// name, one does not overwrite the other (see MH-9688)
int seq = 1;
// Folder name to compare with next one to figure out if there's a root folder
String folderName = null;
// Indicates if zip has a root folder or not, initialized as true
boolean hasRootFolder = true;
// While there are entries write them to a collection
while ((entry = zis.getNextZipEntry()) != null) {
try {
if (entry.isDirectory() || entry.getName().contains("__MACOSX"))
continue;
if (entry.getName().endsWith("manifest.xml") || entry.getName().endsWith("index.xml")) {
// Build the mediapackage
mp = loadMediaPackageFromManifest(new ZipEntryInputStream(zis, entry.getSize()));
} else {
logger.info("Storing zip entry {}/{} in working file repository collection '{}'", job.getId(), entry.getName(), wfrCollectionId);
// Since the directory structure is not being mirrored, makes sure the file
// name is different than the previous one(s) by adding a sequential number
String fileName = FilenameUtils.getBaseName(entry.getName()) + "_" + seq++ + "." + FilenameUtils.getExtension(entry.getName());
URI contentUri = workingFileRepository.putInCollection(wfrCollectionId, fileName, new ZipEntryInputStream(zis, entry.getSize()));
collectionFilenames.add(fileName);
// Key is the zip entry name as it is
String key = entry.getName();
uris.put(key, contentUri);
ingestStatistics.add(entry.getSize());
logger.info("Zip entry {}/{} stored at {}", job.getId(), entry.getName(), contentUri);
// Figures out if there's a root folder. Does entry name starts with a folder?
int pos = entry.getName().indexOf('/');
if (pos == -1) {
// No, we can conclude there's no root folder
hasRootFolder = false;
} else if (hasRootFolder && folderName != null && !folderName.equals(entry.getName().substring(0, pos))) {
// Folder name different from previous so there's no root folder
hasRootFolder = false;
} else if (folderName == null) {
// Just initialize folder name
folderName = entry.getName().substring(0, pos);
}
}
} catch (IOException e) {
logger.warn("Unable to process zip entry {}: {}", entry.getName(), e);
throw e;
}
}
if (mp == null)
throw new MediaPackageException("No manifest found in this zip");
// Determine the mediapackage identifier
if (mp.getIdentifier() == null || isBlank(mp.getIdentifier().toString()))
mp.setIdentifier(new UUIDIdBuilderImpl().createNew());
String mediaPackageId = mp.getIdentifier().toString();
logger.info("Ingesting mediapackage {} is named '{}'", mediaPackageId, mp.getTitle());
// Make sure there are tracks in the mediapackage
if (mp.getTracks().length == 0) {
logger.warn("Mediapackage {} has no media tracks", mediaPackageId);
}
// Update the element uris to point to their working file repository location
for (MediaPackageElement element : mp.elements()) {
// Key has root folder name if there is one
URI uri = uris.get((hasRootFolder ? folderName + "/" : "") + element.getURI().toString());
if (uri == null)
throw new MediaPackageException("Unable to map element name '" + element.getURI() + "' to workspace uri");
logger.info("Ingested mediapackage element {}/{} located at {}", mediaPackageId, element.getIdentifier(), uri);
URI dest = workingFileRepository.moveTo(wfrCollectionId, FilenameUtils.getName(uri.toString()), mediaPackageId, element.getIdentifier(), FilenameUtils.getName(element.getURI().toString()));
element.setURI(dest);
// TODO: This should be triggered somehow instead of being handled here
if (MediaPackageElements.SERIES.equals(element.getFlavor())) {
logger.info("Ingested mediapackage {} contains updated series information", mediaPackageId);
updateSeries(element.getURI());
}
}
// Now that all elements are in place, start with ingest
logger.info("Initiating processing of ingested mediapackage {}", mediaPackageId);
WorkflowInstance workflowInstance = ingest(mp, workflowDefinitionId, workflowConfig, workflowInstanceId);
logger.info("Ingest of mediapackage {} done", mediaPackageId);
job.setStatus(Job.Status.FINISHED);
return workflowInstance;
} catch (ServiceRegistryException e) {
throw new IngestException(e);
} catch (MediaPackageException e) {
job.setStatus(Job.Status.FAILED, Job.FailureReason.DATA);
throw e;
} catch (Exception e) {
if (e instanceof IngestException)
throw (IngestException) e;
throw new IngestException(e);
} finally {
IOUtils.closeQuietly(zis);
finallyUpdateJob(job);
for (String filename : collectionFilenames) {
workingFileRepository.deleteFromCollection(Long.toString(job.getId()), filename, true);
}
}
}
use of org.opencastproject.mediapackage.MediaPackageElement in project opencast by opencast.
the class IndexServiceImplTest method testCreateEventInputNormalExpectsCreatedEvent.
@Test
public void testCreateEventInputNormalExpectsCreatedEvent() throws Exception {
String expectedTitle = "Test Event Creation";
String username = "akm220";
String org = "mh_default_org";
String[] creators = new String[] {};
Id mpId = new IdImpl("mp-id");
String testResourceLocation = "/events/create-event.json";
JSONObject metadataJson = (JSONObject) parser.parse(IOUtils.toString(IndexServiceImplTest.class.getResourceAsStream(testResourceLocation)));
Capture<Catalog> result = EasyMock.newCapture();
Capture<String> mediapackageIdResult = EasyMock.newCapture();
Capture<String> catalogIdResult = EasyMock.newCapture();
Capture<String> filenameResult = EasyMock.newCapture();
Capture<InputStream> catalogResult = EasyMock.newCapture();
Capture<String> mediapackageTitleResult = EasyMock.newCapture();
SecurityService securityService = setupSecurityService(username, org);
Workspace workspace = EasyMock.createMock(Workspace.class);
EasyMock.expect(workspace.put(EasyMock.capture(mediapackageIdResult), EasyMock.capture(catalogIdResult), EasyMock.capture(filenameResult), EasyMock.capture(catalogResult))).andReturn(new URI("catalog.xml"));
EasyMock.replay(workspace);
// Create Common Event Catalog UI Adapter
CommonEventCatalogUIAdapter commonEventCatalogUIAdapter = setupCommonCatalogUIAdapter(workspace).getA();
// Setup mediapackage.
MediaPackage mediapackage = EasyMock.createMock(MediaPackage.class);
mediapackage.add(EasyMock.capture(result));
EasyMock.expectLastCall();
EasyMock.expect(mediapackage.getCatalogs(EasyMock.anyObject(MediaPackageElementFlavor.class))).andReturn(new Catalog[] {});
EasyMock.expect(mediapackage.getIdentifier()).andReturn(mpId).anyTimes();
EasyMock.expect(mediapackage.getCreators()).andReturn(creators);
mediapackage.addCreator("");
EasyMock.expectLastCall();
mediapackage.setTitle(EasyMock.capture(mediapackageTitleResult));
EasyMock.expectLastCall();
EasyMock.expect(mediapackage.getElements()).andReturn(new MediaPackageElement[] {}).anyTimes();
EasyMock.expect(mediapackage.getCatalogs(EasyMock.anyObject(MediaPackageElementFlavor.class))).andReturn(new Catalog[] {}).anyTimes();
EasyMock.expect(mediapackage.getSeries()).andReturn(null).anyTimes();
mediapackage.setSeries(EasyMock.anyString());
mediapackage.setSeriesTitle(EasyMock.anyString());
EasyMock.expectLastCall();
EasyMock.replay(mediapackage);
IngestService ingestService = setupIngestService(mediapackage, Capture.<InputStream>newInstance());
// Setup Authorization Service
Tuple<MediaPackage, Attachment> returnValue = new Tuple<MediaPackage, Attachment>(mediapackage, null);
AuthorizationService authorizationService = EasyMock.createMock(AuthorizationService.class);
EasyMock.expect(authorizationService.setAcl(EasyMock.anyObject(MediaPackage.class), EasyMock.anyObject(AclScope.class), EasyMock.anyObject(AccessControlList.class))).andReturn(returnValue);
EasyMock.replay(authorizationService);
// Run Test
IndexServiceImpl indexServiceImpl = new IndexServiceImpl();
indexServiceImpl.setAuthorizationService(setupAuthorizationService(mediapackage));
indexServiceImpl.setIngestService(ingestService);
indexServiceImpl.setCommonEventCatalogUIAdapter(commonEventCatalogUIAdapter);
indexServiceImpl.addCatalogUIAdapter(commonEventCatalogUIAdapter);
indexServiceImpl.setUserDirectoryService(noUsersUserDirectoryService);
indexServiceImpl.setSecurityService(securityService);
indexServiceImpl.setWorkspace(workspace);
indexServiceImpl.createEvent(metadataJson, mediapackage);
assertTrue("The catalog must be added to the mediapackage", result.hasCaptured());
assertEquals("The catalog should have been added to the correct mediapackage", mpId.toString(), mediapackageIdResult.getValue());
assertTrue("The catalog should have a new id", catalogIdResult.hasCaptured());
assertTrue("The catalog should have a new filename", filenameResult.hasCaptured());
assertTrue("The catalog should have been added to the input stream", catalogResult.hasCaptured());
assertTrue("The mediapackage should have had its title updated", catalogResult.hasCaptured());
assertEquals("The mediapackage title should have been updated.", expectedTitle, mediapackageTitleResult.getValue());
assertTrue("The catalog should have been created", catalogResult.hasCaptured());
}
use of org.opencastproject.mediapackage.MediaPackageElement in project opencast by opencast.
the class IngestDownloadWorkflowOperationHandler method start.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.AbstractWorkflowOperationHandler#start(org.opencastproject.workflow.api.WorkflowInstance,
* JobContext)
*/
@Override
public WorkflowOperationResult start(WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
MediaPackage mediaPackage = workflowInstance.getMediaPackage();
WorkflowOperationInstance currentOperation = workflowInstance.getCurrentOperation();
boolean deleteExternal = BooleanUtils.toBoolean(currentOperation.getConfiguration(DELETE_EXTERNAL));
String baseUrl = workspace.getBaseUri().toString();
List<URI> externalUris = new ArrayList<URI>();
for (MediaPackageElement element : mediaPackage.getElements()) {
if (element.getURI() == null)
continue;
if (element.getElementType() == MediaPackageElement.Type.Publication) {
logger.debug("Skipping downloading media package element {} from media package {} " + "because it is a publication: {}", element.getIdentifier(), mediaPackage.getIdentifier().compact(), element.getURI());
continue;
}
URI originalElementUri = element.getURI();
if (originalElementUri.toString().startsWith(baseUrl)) {
logger.info("Skipping downloading already existing element {}", originalElementUri);
continue;
}
// Download the external URI
File file;
try {
file = workspace.get(element.getURI());
} catch (Exception e) {
logger.warn("Unable to download the external element {}", element.getURI());
throw new WorkflowOperationException("Unable to download the external element " + element.getURI(), e);
}
// Put to working file repository and rewrite URI on element
InputStream in = null;
try {
in = new FileInputStream(file);
URI uri = workspace.put(mediaPackage.getIdentifier().compact(), element.getIdentifier(), FilenameUtils.getName(element.getURI().getPath()), in);
element.setURI(uri);
} catch (Exception e) {
logger.warn("Unable to store downloaded element '{}': {}", element.getURI(), e.getMessage());
throw new WorkflowOperationException("Unable to store downloaded element " + element.getURI(), e);
} finally {
IOUtils.closeQuietly(in);
try {
workspace.delete(originalElementUri);
} catch (Exception e) {
logger.warn("Unable to delete ingest-downloaded element {}: {}", element.getURI(), e);
}
}
logger.info("Downloaded the external element {}", originalElementUri);
// Store origianl URI for deletion
externalUris.add(originalElementUri);
}
if (!deleteExternal || externalUris.size() == 0)
return createResult(mediaPackage, Action.CONTINUE);
// Find all external working file repository base Urls
logger.debug("Assembling list of external working file repositories");
List<String> externalWfrBaseUrls = new ArrayList<String>();
try {
for (ServiceRegistration reg : serviceRegistry.getServiceRegistrationsByType(WorkingFileRepository.SERVICE_TYPE)) {
if (baseUrl.startsWith(reg.getHost())) {
logger.trace("Skpping local working file repository");
continue;
}
externalWfrBaseUrls.add(UrlSupport.concat(reg.getHost(), reg.getPath()));
}
logger.debug("{} external working file repositories found", externalWfrBaseUrls.size());
} catch (ServiceRegistryException e) {
logger.error("Unable to load WFR services from service registry: {}", e.getMessage());
throw new WorkflowOperationException(e);
}
for (URI uri : externalUris) {
String elementUri = uri.toString();
// Delete external working file repository URI's
String wfrBaseUrl = null;
for (String url : externalWfrBaseUrls) {
if (elementUri.startsWith(url)) {
wfrBaseUrl = url;
break;
}
}
if (wfrBaseUrl == null) {
logger.info("Unable to delete external URI {}, no working file repository found", elementUri);
continue;
}
HttpDelete delete;
if (elementUri.startsWith(UrlSupport.concat(wfrBaseUrl, WorkingFileRepository.MEDIAPACKAGE_PATH_PREFIX))) {
String wfrDeleteUrl = elementUri.substring(0, elementUri.lastIndexOf("/"));
delete = new HttpDelete(wfrDeleteUrl);
} else if (elementUri.startsWith(UrlSupport.concat(wfrBaseUrl, WorkingFileRepository.COLLECTION_PATH_PREFIX))) {
delete = new HttpDelete(elementUri);
} else {
logger.info("Unable to handle working file repository URI {}", elementUri);
continue;
}
HttpResponse response = null;
try {
response = client.execute(delete);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_NO_CONTENT || statusCode == HttpStatus.SC_OK) {
logger.info("Sucessfully deleted external URI {}", delete.getURI());
} else if (statusCode == HttpStatus.SC_NOT_FOUND) {
logger.info("External URI {} has already been deleted", delete.getURI());
} else {
logger.info("Unable to delete external URI {}, status code '{}' returned", delete.getURI(), statusCode);
}
} catch (TrustedHttpClientException e) {
logger.warn("Unable to execute DELETE request on external URI {}", delete.getURI());
throw new WorkflowOperationException(e);
} finally {
client.close(response);
}
}
return createResult(mediaPackage, Action.CONTINUE);
}
use of org.opencastproject.mediapackage.MediaPackageElement in project opencast by opencast.
the class MediaInspectionServiceImpl method process.
/**
* {@inheritDoc}
*
* @see org.opencastproject.job.api.AbstractJobProducer#process(org.opencastproject.job.api.Job)
*/
@Override
protected String process(Job job) throws Exception {
Operation op = null;
String operation = job.getOperation();
List<String> arguments = job.getArguments();
try {
op = Operation.valueOf(operation);
MediaPackageElement inspectedElement = null;
Map<String, String> options = null;
switch(op) {
case Inspect:
URI uri = URI.create(arguments.get(0));
options = Options.fromJson(arguments.get(1));
inspectedElement = inspector.inspectTrack(uri, options);
break;
case Enrich:
MediaPackageElement element = MediaPackageElementParser.getFromXml(arguments.get(0));
boolean overwrite = Boolean.parseBoolean(arguments.get(1));
options = Options.fromJson(arguments.get(2));
inspectedElement = inspector.enrich(element, overwrite, options);
break;
default:
throw new IllegalStateException("Don't know how to handle operation '" + operation + "'");
}
return MediaPackageElementParser.getAsXml(inspectedElement);
} catch (IllegalArgumentException e) {
throw new ServiceRegistryException("This service can't handle operations of type '" + op + "'", e);
} catch (IndexOutOfBoundsException e) {
throw new ServiceRegistryException("This argument list for operation '" + op + "' does not meet expectations", e);
} catch (Exception e) {
throw new ServiceRegistryException("Error handling operation '" + op + "'", e);
}
}
use of org.opencastproject.mediapackage.MediaPackageElement in project opencast by opencast.
the class DuplicateEventWorkflowOperationHandlerTest method mockDependencies.
private void mockDependencies(int numberOfCopies) throws Exception {
clonedMediaPackages = Capture.newInstance(CaptureType.ALL);
reset(workspace, assetManager, distributionService);
URI uriDc = getClass().getResource("/dublincore.xml").toURI();
for (int i = 0; i < numberOfCopies; i++) {
expect(workspace.read(eq(URI.create("dublincore.xml")))).andReturn(new FileInputStream(new File(uriDc))).times(1);
}
expect(workspace.get(anyObject())).andReturn(new File(getClass().getResource("/av.mov").toURI())).anyTimes();
expect(workspace.put(anyString(), anyString(), eq("dublincore.xml"), anyObject())).andReturn(uriDc).times(numberOfCopies);
replay(workspace);
final AResult qResult = createNiceMock(AResult.class);
expect(qResult.getRecords()).andReturn(Stream.empty()).anyTimes();
replay(qResult);
final ASelectQuery qSelect = createNiceMock(ASelectQuery.class);
expect(qSelect.where(anyObject())).andReturn(qSelect).anyTimes();
expect(qSelect.run()).andReturn(qResult).anyTimes();
replay(qSelect);
final AQueryBuilder qBuilder = createNiceMock(AQueryBuilder.class);
expect(qBuilder.select(anyObject())).andReturn(qSelect).anyTimes();
replay(qBuilder);
expect(assetManager.createQuery()).andReturn(qBuilder).anyTimes();
expect(assetManager.takeSnapshot(eq(AssetManager.DEFAULT_OWNER), capture(clonedMediaPackages))).andReturn(createNiceMock(Snapshot.class)).times(numberOfCopies);
replay(assetManager);
final Job distributionJob = createNiceMock(Job.class);
final Publication internalPub = (Publication) mp.getElementById("pub-int");
final List<MediaPackageElement> internalPubElements = new ArrayList<>();
Collections.addAll(internalPubElements, (internalPub.getAttachments()));
Collections.addAll(internalPubElements, (internalPub.getCatalogs()));
Collections.addAll(internalPubElements, (internalPub.getTracks()));
expect(distributionJob.getStatus()).andReturn(Job.Status.FINISHED).anyTimes();
for (MediaPackageElement e : internalPubElements) {
expect(distributionJob.getPayload()).andReturn(MediaPackageElementParser.getAsXml(e)).times(numberOfCopies);
}
replay(distributionJob);
expect(distributionService.distribute(eq(InternalPublicationChannel.CHANNEL_ID), anyObject(), anyString())).andReturn(distributionJob).anyTimes();
replay(distributionService);
}
Aggregations