Search in sources :

Example 6 with Attachment

use of org.opencastproject.mediapackage.Attachment in project opencast by opencast.

the class SolrIndexManager method addMpeg7Metadata.

/**
 * Add the mpeg 7 catalog data to the solr document.
 *
 * @param doc
 *          the input document to the solr index
 * @param mpeg7
 *          the mpeg7 catalog
 */
@SuppressWarnings("unchecked")
static void addMpeg7Metadata(SolrInputDocument doc, MediaPackage mediaPackage, Mpeg7Catalog mpeg7) {
    // Check for multimedia content
    if (!mpeg7.multimediaContent().hasNext()) {
        logger.warn("Mpeg-7 doesn't contain  multimedia content");
        return;
    }
    // Get the content duration by looking at the first content track. This
    // of course assumes that all tracks are equally long.
    MultimediaContent<? extends MultimediaContentType> mc = mpeg7.multimediaContent().next();
    MultimediaContentType mct = mc.elements().next();
    MediaTime mediaTime = mct.getMediaTime();
    Schema.setDcExtent(doc, mediaTime.getMediaDuration().getDurationInMilliseconds());
    // Check if the keywords have been filled by (manually) added dublin
    // core data. If not, look for the most relevant fields in mpeg-7.
    SortedSet<TextAnnotation> sortedAnnotations = null;
    if (!"".equals(Schema.getOcKeywords(doc))) {
        sortedAnnotations = new TreeSet<TextAnnotation>(new Comparator<TextAnnotation>() {

            @Override
            public int compare(TextAnnotation a1, TextAnnotation a2) {
                if ((RELEVANCE_BOOST * a1.getRelevance() + a1.getConfidence()) > (RELEVANCE_BOOST * a2.getRelevance() + a2.getConfidence()))
                    return -1;
                else if ((RELEVANCE_BOOST * a1.getRelevance() + a1.getConfidence()) < (RELEVANCE_BOOST * a2.getRelevance() + a2.getConfidence()))
                    return 1;
                return 0;
            }
        });
    }
    // Iterate over the tracks and extract keywords and hints
    Iterator<MultimediaContent<? extends MultimediaContentType>> mmIter = mpeg7.multimediaContent();
    int segmentCount = 0;
    while (mmIter.hasNext()) {
        MultimediaContent<?> multimediaContent = mmIter.next();
        // We need to process visual segments first, due to the way they are handled in the ui.
        for (Iterator<?> iterator = multimediaContent.elements(); iterator.hasNext(); ) {
            MultimediaContentType type = (MultimediaContentType) iterator.next();
            if (!(type instanceof Video) && !(type instanceof AudioVisual))
                continue;
            // for every segment in the current multimedia content track
            Video video = (Video) type;
            Iterator<VideoSegment> vsegments = (Iterator<VideoSegment>) video.getTemporalDecomposition().segments();
            while (vsegments.hasNext()) {
                VideoSegment segment = vsegments.next();
                StringBuffer segmentText = new StringBuffer();
                StringBuffer hintField = new StringBuffer();
                // Collect the video text elements to a segment text
                SpatioTemporalDecomposition spt = segment.getSpatioTemporalDecomposition();
                if (spt != null) {
                    for (VideoText videoText : spt.getVideoText()) {
                        if (segmentText.length() > 0)
                            segmentText.append(" ");
                        segmentText.append(videoText.getText().getText());
                    // TODO: Add hint on bounding box
                    }
                }
                // Add keyword annotations
                Iterator<TextAnnotation> textAnnotations = segment.textAnnotations();
                while (textAnnotations.hasNext()) {
                    TextAnnotation textAnnotation = textAnnotations.next();
                    Iterator<?> kwIter = textAnnotation.keywordAnnotations();
                    while (kwIter.hasNext()) {
                        KeywordAnnotation keywordAnnotation = (KeywordAnnotation) kwIter.next();
                        if (segmentText.length() > 0)
                            segmentText.append(" ");
                        segmentText.append(keywordAnnotation.getKeyword());
                    }
                }
                // Add free text annotations
                Iterator<TextAnnotation> freeIter = segment.textAnnotations();
                if (freeIter.hasNext()) {
                    Iterator<FreeTextAnnotation> freeTextIter = freeIter.next().freeTextAnnotations();
                    while (freeTextIter.hasNext()) {
                        FreeTextAnnotation freeTextAnnotation = freeTextIter.next();
                        if (segmentText.length() > 0)
                            segmentText.append(" ");
                        segmentText.append(freeTextAnnotation.getText());
                    }
                }
                // add segment text to solr document
                Schema.setSegmentText(doc, new DField<String>(segmentText.toString(), Integer.toString(segmentCount)));
                // get the segments time properties
                MediaTimePoint timepoint = segment.getMediaTime().getMediaTimePoint();
                MediaDuration duration = segment.getMediaTime().getMediaDuration();
                // TODO: define a class with hint field constants
                hintField.append("time=" + timepoint.getTimeInMilliseconds() + "\n");
                hintField.append("duration=" + duration.getDurationInMilliseconds() + "\n");
                // Look for preview images. Their characteristics are that they are
                // attached as attachments with a flavor of preview/<something>.
                String time = timepoint.toString();
                for (Attachment slide : mediaPackage.getAttachments(MediaPackageElements.PRESENTATION_SEGMENT_PREVIEW)) {
                    MediaPackageReference ref = slide.getReference();
                    if (ref != null && time.equals(ref.getProperty("time"))) {
                        hintField.append("preview");
                        hintField.append(".");
                        hintField.append(ref.getIdentifier());
                        hintField.append("=");
                        hintField.append(slide.getURI().toString());
                        hintField.append("\n");
                    }
                }
                logger.trace("Adding segment: " + timepoint.toString());
                Schema.setSegmentHint(doc, new DField<String>(hintField.toString(), Integer.toString(segmentCount)));
                // increase segment counter
                segmentCount++;
            }
        }
    }
    // Put the most important keywords into a special solr field
    if (sortedAnnotations != null) {
        Schema.setOcKeywords(doc, importantKeywordsString(sortedAnnotations).toString());
    }
}
Also used : Attachment(org.opencastproject.mediapackage.Attachment) VideoText(org.opencastproject.metadata.mpeg7.VideoText) FreeTextAnnotation(org.opencastproject.metadata.mpeg7.FreeTextAnnotation) Comparator(java.util.Comparator) Iterator(java.util.Iterator) KeywordAnnotation(org.opencastproject.metadata.mpeg7.KeywordAnnotation) TextAnnotation(org.opencastproject.metadata.mpeg7.TextAnnotation) FreeTextAnnotation(org.opencastproject.metadata.mpeg7.FreeTextAnnotation) AudioVisual(org.opencastproject.metadata.mpeg7.AudioVisual) MultimediaContentType(org.opencastproject.metadata.mpeg7.MultimediaContentType) SpatioTemporalDecomposition(org.opencastproject.metadata.mpeg7.SpatioTemporalDecomposition) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) MediaPackageReference(org.opencastproject.mediapackage.MediaPackageReference) VideoSegment(org.opencastproject.metadata.mpeg7.VideoSegment) Video(org.opencastproject.metadata.mpeg7.Video) MediaTime(org.opencastproject.metadata.mpeg7.MediaTime) MediaDuration(org.opencastproject.metadata.mpeg7.MediaDuration) MultimediaContent(org.opencastproject.metadata.mpeg7.MultimediaContent)

Example 7 with Attachment

use of org.opencastproject.mediapackage.Attachment in project opencast by opencast.

the class PartialImportWorkflowOperationHandler method extractImage.

private Attachment extractImage(Track presentationTrack, double time, List<MediaPackageElement> elementsToClean) throws EncoderException, MediaPackageException, WorkflowOperationException, NotFoundException {
    Job extractImageJob = composerService.image(presentationTrack, PREVIEW_PROFILE, time);
    if (!waitForStatus(extractImageJob).isSuccess())
        throw new WorkflowOperationException("Extract image frame video job did not complete successfully");
    // Get the latest copy
    try {
        extractImageJob = serviceRegistry.getJob(extractImageJob.getId());
    } catch (ServiceRegistryException e) {
        throw new WorkflowOperationException(e);
    }
    Attachment composedImages = (Attachment) MediaPackageElementParser.getArrayFromXml(extractImageJob.getPayload()).get(0);
    elementsToClean.add(composedImages);
    return composedImages;
}
Also used : WorkflowOperationException(org.opencastproject.workflow.api.WorkflowOperationException) Attachment(org.opencastproject.mediapackage.Attachment) Job(org.opencastproject.job.api.Job) ServiceRegistryException(org.opencastproject.serviceregistry.api.ServiceRegistryException)

Example 8 with Attachment

use of org.opencastproject.mediapackage.Attachment in project opencast by opencast.

the class PartialImportWorkflowOperationHandler method concat.

private WorkflowOperationResult concat(MediaPackage src, WorkflowOperationInstance operation, List<MediaPackageElement> elementsToClean) throws EncoderException, IOException, NotFoundException, MediaPackageException, WorkflowOperationException, ServiceRegistryException {
    final MediaPackage mediaPackage = (MediaPackage) src.clone();
    final Long operationId = operation.getId();
    // 
    // read config options
    final Opt<String> presenterFlavor = getOptConfig(operation, SOURCE_PRESENTER_FLAVOR);
    final Opt<String> presentationFlavor = getOptConfig(operation, SOURCE_PRESENTATION_FLAVOR);
    final String smilFlavor = getConfig(operation, SOURCE_SMIL_FLAVOR);
    final String concatEncodingProfile = getConfig(operation, CONCAT_ENCODING_PROFILE);
    final Opt<String> concatOutputFramerate = getOptConfig(operation, CONCAT_OUTPUT_FRAMERATE);
    final String trimEncodingProfile = getConfig(operation, TRIM_ENCODING_PROFILE);
    final MediaPackageElementFlavor targetPresenterFlavor = parseTargetFlavor(getConfig(operation, TARGET_PRESENTER_FLAVOR), "presenter");
    final MediaPackageElementFlavor targetPresentationFlavor = parseTargetFlavor(getConfig(operation, TARGET_PRESENTATION_FLAVOR), "presentation");
    final Opt<EncodingProfile> forceProfile = getForceEncodingProfile(operation);
    final boolean forceEncoding = BooleanUtils.toBoolean(getOptConfig(operation, FORCE_ENCODING).getOr("false"));
    final boolean forceDivisible = BooleanUtils.toBoolean(getOptConfig(operation, ENFORCE_DIVISIBLE_BY_TWO).getOr("false"));
    final List<String> requiredExtensions = getRequiredExtensions(operation);
    // Skip the worklow if no presenter and presentation flavor has been configured
    if (presenterFlavor.isNone() && presentationFlavor.isNone()) {
        logger.warn("No presenter and presentation flavor has been set.");
        return createResult(mediaPackage, Action.SKIP);
    }
    final EncodingProfile concatProfile = composerService.getProfile(concatEncodingProfile);
    if (concatProfile == null) {
        throw new WorkflowOperationException("Concat encoding profile '" + concatEncodingProfile + "' was not found");
    }
    float outputFramerate = -1.0f;
    if (concatOutputFramerate.isSome()) {
        if (NumberUtils.isNumber(concatOutputFramerate.get())) {
            logger.info("Using concat output framerate");
            outputFramerate = NumberUtils.toFloat(concatOutputFramerate.get());
        } else {
            throw new WorkflowOperationException("Unable to parse concat output frame rate!");
        }
    }
    final EncodingProfile trimProfile = composerService.getProfile(trimEncodingProfile);
    if (trimProfile == null) {
        throw new WorkflowOperationException("Trim encoding profile '" + trimEncodingProfile + "' was not found");
    }
    // 
    // get tracks
    final TrackSelector presenterTrackSelector = mkTrackSelector(presenterFlavor);
    final TrackSelector presentationTrackSelector = mkTrackSelector(presentationFlavor);
    final List<Track> originalTracks = new ArrayList<Track>();
    final List<Track> presenterTracks = new ArrayList<Track>();
    final List<Track> presentationTracks = new ArrayList<Track>();
    // Collecting presenter tracks
    for (Track t : presenterTrackSelector.select(mediaPackage, false)) {
        logger.info("Found partial presenter track {}", t);
        originalTracks.add(t);
        presenterTracks.add(t);
    }
    // Collecting presentation tracks
    for (Track t : presentationTrackSelector.select(mediaPackage, false)) {
        logger.info("Found partial presentation track {}", t);
        originalTracks.add(t);
        presentationTracks.add(t);
    }
    // flavor_type -> job
    final Map<String, Job> jobs = new HashMap<String, Job>();
    // get SMIL catalog
    final SMILDocument smilDocument = getSmilDocumentFromMediaPackage(mediaPackage, smilFlavor);
    final SMILParElement parallel = (SMILParElement) smilDocument.getBody().getChildNodes().item(0);
    final NodeList sequences = parallel.getTimeChildren();
    final float trackDurationInSeconds = parallel.getDur();
    final long trackDurationInMs = Math.round(trackDurationInSeconds * 1000f);
    for (int i = 0; i < sequences.getLength(); i++) {
        final SMILElement item = (SMILElement) sequences.item(i);
        for (final String mediaType : new String[] { NODE_TYPE_AUDIO, NODE_TYPE_VIDEO }) {
            final List<Track> tracks = new ArrayList<Track>();
            final VCell<String> sourceType = VCell.cell(EMPTY_VALUE);
            final long position = processChildren(0, tracks, item.getChildNodes(), originalTracks, sourceType, mediaType, elementsToClean, operationId);
            if (tracks.isEmpty()) {
                logger.debug("The tracks list was empty.");
                continue;
            }
            final Track lastTrack = tracks.get(tracks.size() - 1);
            if (position < trackDurationInMs) {
                final double extendingTime = (trackDurationInMs - position) / 1000d;
                if (extendingTime > 0) {
                    if (!lastTrack.hasVideo()) {
                        logger.info("Extending {} audio track end by {} seconds with silent audio", sourceType.get(), extendingTime);
                        tracks.add(getSilentAudio(extendingTime, elementsToClean, operationId));
                    } else {
                        logger.info("Extending {} track end with last image frame by {} seconds", sourceType.get(), extendingTime);
                        Attachment tempLastImageFrame = extractLastImageFrame(lastTrack, elementsToClean);
                        tracks.add(createVideoFromImage(tempLastImageFrame, extendingTime, elementsToClean));
                    }
                }
            }
            if (tracks.size() < 2) {
                logger.debug("There were less than 2 tracks, copying track...");
                if (sourceType.get().startsWith(PRESENTER_KEY)) {
                    createCopyOfTrack(mediaPackage, tracks.get(0), targetPresenterFlavor);
                } else if (sourceType.get().startsWith(PRESENTATION_KEY)) {
                    createCopyOfTrack(mediaPackage, tracks.get(0), targetPresentationFlavor);
                } else {
                    logger.warn("Can't handle unkown source type '{}' for unprocessed track", sourceType.get());
                }
                continue;
            }
            for (final Track t : tracks) {
                if (!t.hasVideo() && !t.hasAudio()) {
                    logger.error("No audio or video stream available in the track with flavor {}! {}", t.getFlavor(), t);
                    throw new WorkflowOperationException("No audio or video stream available in the track " + t.toString());
                }
            }
            if (sourceType.get().startsWith(PRESENTER_KEY)) {
                logger.info("Concatenating {} track", PRESENTER_KEY);
                jobs.put(sourceType.get(), startConcatJob(concatProfile, tracks, outputFramerate, forceDivisible));
            } else if (sourceType.get().startsWith(PRESENTATION_KEY)) {
                logger.info("Concatenating {} track", PRESENTATION_KEY);
                jobs.put(sourceType.get(), startConcatJob(concatProfile, tracks, outputFramerate, forceDivisible));
            } else {
                logger.warn("Can't handle unknown source type '{}'!", sourceType.get());
            }
        }
    }
    // Wait for the jobs to return
    if (jobs.size() > 0) {
        if (!JobUtil.waitForJobs(serviceRegistry, jobs.values()).isSuccess()) {
            throw new WorkflowOperationException("One of the concat jobs did not complete successfully");
        }
    } else {
        logger.info("No concatenating needed for presenter and presentation tracks, took partial source elements");
    }
    // All the jobs have passed, let's update the media package
    long queueTime = 0L;
    MediaPackageElementFlavor adjustedTargetPresenterFlavor = targetPresenterFlavor;
    MediaPackageElementFlavor adjustedTargetPresentationFlavor = targetPresentationFlavor;
    for (final Entry<String, Job> job : jobs.entrySet()) {
        final Opt<Job> concatJob = JobUtil.update(serviceRegistry, job.getValue());
        if (concatJob.isSome()) {
            final String concatPayload = concatJob.get().getPayload();
            if (concatPayload != null) {
                final Track concatTrack;
                try {
                    concatTrack = (Track) MediaPackageElementParser.getFromXml(concatPayload);
                } catch (MediaPackageException e) {
                    throw new WorkflowOperationException(e);
                }
                final String fileName;
                // Adjust the target flavor.
                if (job.getKey().startsWith(PRESENTER_KEY)) {
                    if (!concatTrack.hasVideo()) {
                        fileName = PRESENTER_KEY.concat(FLAVOR_AUDIO_SUFFIX);
                        adjustedTargetPresenterFlavor = deriveAudioFlavor(targetPresenterFlavor);
                    } else {
                        fileName = PRESENTER_KEY;
                        adjustedTargetPresenterFlavor = targetPresenterFlavor;
                    }
                    concatTrack.setFlavor(adjustedTargetPresenterFlavor);
                } else if (job.getKey().startsWith(PRESENTATION_KEY)) {
                    if (!concatTrack.hasVideo()) {
                        fileName = PRESENTATION_KEY.concat(FLAVOR_AUDIO_SUFFIX);
                        adjustedTargetPresentationFlavor = deriveAudioFlavor(targetPresentationFlavor);
                    } else {
                        fileName = PRESENTATION_KEY;
                        adjustedTargetPresentationFlavor = targetPresentationFlavor;
                    }
                    concatTrack.setFlavor(adjustedTargetPresentationFlavor);
                } else {
                    fileName = UNKNOWN_KEY;
                }
                concatTrack.setURI(workspace.moveTo(concatTrack.getURI(), mediaPackage.getIdentifier().toString(), concatTrack.getIdentifier(), fileName + "." + FilenameUtils.getExtension(concatTrack.getURI().toString())));
                logger.info("Concatenated track {} got flavor '{}'", concatTrack, concatTrack.getFlavor());
                mediaPackage.add(concatTrack);
                queueTime += concatJob.get().getQueueTime();
            } else {
                // If there is no payload, then the item has not been distributed.
                logger.warn("Concat job {} does not contain a payload", concatJob);
            }
        } else {
            logger.warn("Concat job {} could not be updated since it cannot be found", job.getValue());
        }
    }
    // Trim presenter and presentation source track if longer than the duration from the SMIL catalog
    queueTime += checkForTrimming(mediaPackage, trimProfile, targetPresentationFlavor, trackDurationInSeconds, elementsToClean);
    queueTime += checkForTrimming(mediaPackage, trimProfile, deriveAudioFlavor(targetPresentationFlavor), trackDurationInSeconds, elementsToClean);
    queueTime += checkForTrimming(mediaPackage, trimProfile, targetPresenterFlavor, trackDurationInSeconds, elementsToClean);
    queueTime += checkForTrimming(mediaPackage, trimProfile, deriveAudioFlavor(targetPresenterFlavor), trackDurationInSeconds, elementsToClean);
    adjustAudioTrackTargetFlavor(mediaPackage, targetPresenterFlavor);
    adjustAudioTrackTargetFlavor(mediaPackage, targetPresentationFlavor);
    queueTime += checkForMuxing(mediaPackage, targetPresenterFlavor, targetPresentationFlavor, false, elementsToClean);
    queueTime += checkForEncodeToStandard(mediaPackage, forceEncoding, forceProfile, requiredExtensions, targetPresenterFlavor, targetPresentationFlavor, elementsToClean);
    final WorkflowOperationResult result = createResult(mediaPackage, Action.CONTINUE, queueTime);
    logger.debug("Partial import operation completed");
    return result;
}
Also used : HashMap(java.util.HashMap) TrackSelector(org.opencastproject.mediapackage.selector.TrackSelector) ArrayList(java.util.ArrayList) SMILElement(org.w3c.dom.smil.SMILElement) Attachment(org.opencastproject.mediapackage.Attachment) WorkflowOperationResult(org.opencastproject.workflow.api.WorkflowOperationResult) WorkflowOperationException(org.opencastproject.workflow.api.WorkflowOperationException) SMILParElement(org.w3c.dom.smil.SMILParElement) Job(org.opencastproject.job.api.Job) MediaPackageException(org.opencastproject.mediapackage.MediaPackageException) SMILDocument(org.w3c.dom.smil.SMILDocument) NodeList(org.w3c.dom.NodeList) EncodingProfile(org.opencastproject.composer.api.EncodingProfile) MediaPackageElementFlavor(org.opencastproject.mediapackage.MediaPackageElementFlavor) MediaPackage(org.opencastproject.mediapackage.MediaPackage) Track(org.opencastproject.mediapackage.Track)

Example 9 with Attachment

use of org.opencastproject.mediapackage.Attachment in project opencast by opencast.

the class SegmentPreviewsWorkflowOperationHandler method createPreviews.

/**
 * Encode tracks from MediaPackage using profiles stored in properties and updates current MediaPackage.
 *
 * @param mediaPackage
 * @param properties
 * @return the operation result containing the updated mediapackage
 * @throws EncoderException
 * @throws ExecutionException
 * @throws InterruptedException
 * @throws IOException
 * @throws NotFoundException
 * @throws WorkflowOperationException
 */
private WorkflowOperationResult createPreviews(final MediaPackage mediaPackage, WorkflowOperationInstance operation) throws EncoderException, InterruptedException, ExecutionException, NotFoundException, MediaPackageException, IOException, WorkflowOperationException {
    long totalTimeInQueue = 0;
    // Read the configuration properties
    String sourceVideoFlavor = StringUtils.trimToNull(operation.getConfiguration("source-flavor"));
    String sourceTags = StringUtils.trimToNull(operation.getConfiguration("source-tags"));
    String targetImageTags = StringUtils.trimToNull(operation.getConfiguration("target-tags"));
    String targetImageFlavor = StringUtils.trimToNull(operation.getConfiguration("target-flavor"));
    String encodingProfileName = StringUtils.trimToNull(operation.getConfiguration("encoding-profile"));
    String referenceFlavor = StringUtils.trimToNull(operation.getConfiguration("reference-flavor"));
    String referenceTags = StringUtils.trimToNull(operation.getConfiguration("reference-tags"));
    // Find the encoding profile
    EncodingProfile profile = composerService.getProfile(encodingProfileName);
    if (profile == null)
        throw new IllegalStateException("Encoding profile '" + encodingProfileName + "' was not found");
    List<String> sourceTagSet = asList(sourceTags);
    // Select the tracks based on the tags and flavors
    Set<Track> videoTrackSet = new HashSet<>();
    for (Track track : mediaPackage.getTracksByTags(sourceTagSet)) {
        if (sourceVideoFlavor == null || (track.getFlavor() != null && sourceVideoFlavor.equals(track.getFlavor().toString()))) {
            if (!track.hasVideo())
                continue;
            videoTrackSet.add(track);
        }
    }
    if (videoTrackSet.size() == 0) {
        logger.debug("Mediapackage {} has no suitable tracks to extract images based on tags {} and flavor {}", mediaPackage, sourceTags, sourceVideoFlavor);
        return createResult(mediaPackage, Action.CONTINUE);
    } else {
        // Determine the tagset for the reference
        List<String> referenceTagSet = asList(referenceTags);
        // Determine the reference master
        for (Track t : videoTrackSet) {
            // Try to load the segments catalog
            MediaPackageReference trackReference = new MediaPackageReferenceImpl(t);
            Catalog[] segmentCatalogs = mediaPackage.getCatalogs(MediaPackageElements.SEGMENTS, trackReference);
            Mpeg7Catalog mpeg7 = null;
            if (segmentCatalogs.length > 0) {
                mpeg7 = loadMpeg7Catalog(segmentCatalogs[0]);
                if (segmentCatalogs.length > 1)
                    logger.warn("More than one segments catalog found for track {}. Resuming with the first one ({})", t, mpeg7);
            } else {
                logger.debug("No segments catalog found for track {}", t);
                continue;
            }
            // Check the catalog's consistency
            if (mpeg7.videoContent() == null || mpeg7.videoContent().next() == null) {
                logger.info("Segments catalog {} contains no video content", mpeg7);
                continue;
            }
            Video videoContent = mpeg7.videoContent().next();
            TemporalDecomposition<? extends Segment> decomposition = videoContent.getTemporalDecomposition();
            // Are there any segments?
            if (decomposition == null || !decomposition.hasSegments()) {
                logger.info("Segments catalog {} contains no video content", mpeg7);
                continue;
            }
            // Is a derived track with the configured reference flavor available?
            MediaPackageElement referenceMaster = getReferenceMaster(mediaPackage, t, referenceFlavor, referenceTagSet);
            // Create the preview images according to the mpeg7 segments
            if (t.hasVideo() && mpeg7 != null) {
                Iterator<? extends Segment> segmentIterator = decomposition.segments();
                List<MediaTimePoint> timePointList = new LinkedList<>();
                while (segmentIterator.hasNext()) {
                    Segment segment = segmentIterator.next();
                    MediaTimePoint tp = segment.getMediaTime().getMediaTimePoint();
                    timePointList.add(tp);
                }
                // convert to time array
                double[] timeArray = new double[timePointList.size()];
                for (int i = 0; i < timePointList.size(); i++) timeArray[i] = (double) timePointList.get(i).getTimeInMilliseconds() / 1000;
                Job job = composerService.image(t, profile.getIdentifier(), timeArray);
                if (!waitForStatus(job).isSuccess()) {
                    throw new WorkflowOperationException("Extracting preview image from " + t + " failed");
                }
                // Get the latest copy
                try {
                    job = serviceRegistry.getJob(job.getId());
                } catch (ServiceRegistryException e) {
                    throw new WorkflowOperationException(e);
                }
                // add this receipt's queue time to the total
                totalTimeInQueue += job.getQueueTime();
                List<? extends MediaPackageElement> composedImages = MediaPackageElementParser.getArrayFromXml(job.getPayload());
                Iterator<MediaTimePoint> it = timePointList.iterator();
                for (MediaPackageElement element : composedImages) {
                    Attachment composedImage = (Attachment) element;
                    if (composedImage == null)
                        throw new IllegalStateException("Unable to compose image");
                    // Add the flavor, either from the operation configuration or from the composer
                    if (targetImageFlavor != null) {
                        composedImage.setFlavor(MediaPackageElementFlavor.parseFlavor(targetImageFlavor));
                        logger.debug("Preview image has flavor '{}'", composedImage.getFlavor());
                    }
                    // Set the mimetype
                    if (profile.getMimeType() != null)
                        composedImage.setMimeType(MimeTypes.parseMimeType(profile.getMimeType()));
                    // Add tags
                    for (String tag : asList(targetImageTags)) {
                        logger.trace("Tagging image with '{}'", tag);
                        composedImage.addTag(tag);
                    }
                    // Refer to the original track including a timestamp
                    MediaPackageReferenceImpl ref = new MediaPackageReferenceImpl(referenceMaster);
                    ref.setProperty("time", it.next().toString());
                    composedImage.setReference(ref);
                    // store new image in the mediaPackage
                    mediaPackage.add(composedImage);
                    String fileName = getFileNameFromElements(t, composedImage);
                    composedImage.setURI(workspace.moveTo(composedImage.getURI(), mediaPackage.getIdentifier().toString(), composedImage.getIdentifier(), fileName));
                }
            }
        }
    }
    return createResult(mediaPackage, Action.CONTINUE, totalTimeInQueue);
}
Also used : Attachment(org.opencastproject.mediapackage.Attachment) Segment(org.opencastproject.metadata.mpeg7.Segment) MediaPackageElement(org.opencastproject.mediapackage.MediaPackageElement) WorkflowOperationException(org.opencastproject.workflow.api.WorkflowOperationException) Job(org.opencastproject.job.api.Job) HashSet(java.util.HashSet) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) EncodingProfile(org.opencastproject.composer.api.EncodingProfile) Catalog(org.opencastproject.mediapackage.Catalog) Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) LinkedList(java.util.LinkedList) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) ServiceRegistryException(org.opencastproject.serviceregistry.api.ServiceRegistryException) Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) MediaPackageReference(org.opencastproject.mediapackage.MediaPackageReference) Video(org.opencastproject.metadata.mpeg7.Video) MediaPackageReferenceImpl(org.opencastproject.mediapackage.MediaPackageReferenceImpl) Track(org.opencastproject.mediapackage.Track)

Example 10 with Attachment

use of org.opencastproject.mediapackage.Attachment in project opencast by opencast.

the class ComposerServiceImpl method convertImage.

/**
 * Converts an image from <code>sourceImage</code> to a new format.
 *
 * @param job
 *          the associated job
 * @param sourceImage
 *          the source image
 * @param profileId
 *          the identifer of the encoding profile to use
 * @return the image as an attachment or none if the operation does not return an image. This may happen for example
 *         when doing two pass encodings where the first pass only creates metadata for the second one
 * @throws EncoderException
 *           if converting the image fails
 */
private Option<Attachment> convertImage(Job job, Attachment sourceImage, String profileId) throws EncoderException, MediaPackageException {
    logger.info("Converting {}", sourceImage);
    // Get the encoding profile
    final EncodingProfile profile = getProfile(job, profileId);
    // Create the encoding engine
    final EncoderEngine encoderEngine = getEncoderEngine();
    // Finally get the file that needs to be encoded
    File imageFile;
    try {
        imageFile = workspace.get(sourceImage.getURI());
    } catch (NotFoundException e) {
        incident().recordFailure(job, WORKSPACE_GET_NOT_FOUND, e, getWorkspaceMediapackageParams("source image", sourceImage), NO_DETAILS);
        throw new EncoderException("Requested video track " + sourceImage + " was not found", e);
    } catch (IOException e) {
        incident().recordFailure(job, WORKSPACE_GET_IO_EXCEPTION, e, getWorkspaceMediapackageParams("source image", sourceImage), NO_DETAILS);
        throw new EncoderException("Error accessing video track " + sourceImage, e);
    }
    // Do the work
    File output;
    try {
        output = encoderEngine.encode(imageFile, profile, null);
    } catch (EncoderException e) {
        Map<String, String> params = new HashMap<>();
        params.put("image", sourceImage.getURI().toString());
        params.put("profile", profile.getIdentifier());
        incident().recordFailure(job, CONVERT_IMAGE_FAILED, e, params, detailsFor(e, encoderEngine));
        throw e;
    } finally {
        activeEncoder.remove(encoderEngine);
    }
    // encoding did not return a file
    if (!output.exists() || output.length() == 0)
        return none();
    // Put the file in the workspace
    URI workspaceURI = putToCollection(job, output, "converted image file");
    MediaPackageElementBuilder builder = MediaPackageElementBuilderFactory.newInstance().newElementBuilder();
    Attachment attachment = (Attachment) builder.elementFromURI(workspaceURI, Attachment.TYPE, null);
    return some(attachment);
}
Also used : EncoderException(org.opencastproject.composer.api.EncoderException) MediaPackageElementBuilder(org.opencastproject.mediapackage.MediaPackageElementBuilder) EncodingProfile(org.opencastproject.composer.api.EncodingProfile) NotFoundException(org.opencastproject.util.NotFoundException) Attachment(org.opencastproject.mediapackage.Attachment) IOException(java.io.IOException) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) URI(java.net.URI)

Aggregations

Attachment (org.opencastproject.mediapackage.Attachment)64 MediaPackage (org.opencastproject.mediapackage.MediaPackage)28 URI (java.net.URI)24 IOException (java.io.IOException)20 Job (org.opencastproject.job.api.Job)20 NotFoundException (org.opencastproject.util.NotFoundException)19 MediaPackageException (org.opencastproject.mediapackage.MediaPackageException)18 Track (org.opencastproject.mediapackage.Track)16 ServiceRegistryException (org.opencastproject.serviceregistry.api.ServiceRegistryException)16 Test (org.junit.Test)15 WorkflowOperationException (org.opencastproject.workflow.api.WorkflowOperationException)15 Catalog (org.opencastproject.mediapackage.Catalog)14 MediaPackageElement (org.opencastproject.mediapackage.MediaPackageElement)12 InputStream (java.io.InputStream)11 MediaPackageElementFlavor (org.opencastproject.mediapackage.MediaPackageElementFlavor)10 File (java.io.File)9 FileNotFoundException (java.io.FileNotFoundException)8 ArrayList (java.util.ArrayList)8 AttachmentImpl (org.opencastproject.mediapackage.attachment.AttachmentImpl)8 Workspace (org.opencastproject.workspace.api.Workspace)8