use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class ImageWorkflowOperationHandler method configure.
/**
* Get and parse the configuration options.
*/
private Cfg configure(MediaPackage mp, WorkflowOperationInstance woi) throws WorkflowOperationException {
final List<EncodingProfile> profiles = getOptConfig(woi, OPT_PROFILES).toStream().bind(asList.toFn()).map(fetchProfile(composerService)).toList();
final List<String> targetImageTags = getOptConfig(woi, OPT_TARGET_TAGS).toStream().bind(asList.toFn()).toList();
final Opt<MediaPackageElementFlavor> targetImageFlavor = getOptConfig(woi, OPT_TARGET_FLAVOR).map(MediaPackageElementFlavor.parseFlavor.toFn());
final List<Track> sourceTracks;
{
// get the source flavors
final Stream<MediaPackageElementFlavor> sourceFlavors = getOptConfig(woi, OPT_SOURCE_FLAVORS).toStream().bind(Strings.splitCsv).append(getOptConfig(woi, OPT_SOURCE_FLAVOR)).map(MediaPackageElementFlavor.parseFlavor.toFn());
// get the source tags
final Stream<String> sourceTags = getOptConfig(woi, OPT_SOURCE_TAGS).toStream().bind(Strings.splitCsv);
// fold both into a selector
final TrackSelector trackSelector = sourceTags.apply(tagFold(sourceFlavors.apply(flavorFold(new TrackSelector()))));
// select the tracks based on source flavors and tags and skip those that don't have video
sourceTracks = $(trackSelector.select(mp, true)).filter(Filters.hasVideo.toFn()).each(new Fx<Track>() {
@Override
public void apply(Track track) {
if (track.getDuration() == null) {
chuck(new WorkflowOperationException(format("Track %s cannot tell its duration", track)));
}
}
}).toList();
}
final List<MediaPosition> positions = parsePositions(getConfig(woi, OPT_POSITIONS));
final long endMargin = getOptConfig(woi, OPT_END_MARGIN).bind(Strings.toLong).getOr(END_MARGIN_DEFAULT);
//
return new Cfg(sourceTracks, positions, profiles, targetImageFlavor, targetImageTags, getTargetBaseNameFormat(woi, OPT_TARGET_BASE_NAME_FORMAT_SECOND), getTargetBaseNameFormat(woi, OPT_TARGET_BASE_NAME_FORMAT_PERCENT), endMargin);
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class PartialImportWorkflowOperationHandler method createVideoFromImage.
private Track createVideoFromImage(Attachment image, double time, List<MediaPackageElement> elementsToClean) throws EncoderException, MediaPackageException, WorkflowOperationException, NotFoundException {
Job imageToVideoJob = composerService.imageToVideo(image, IMAGE_MOVIE_PROFILE, time);
if (!waitForStatus(imageToVideoJob).isSuccess())
throw new WorkflowOperationException("Image to video job did not complete successfully");
// Get the latest copy
try {
imageToVideoJob = serviceRegistry.getJob(imageToVideoJob.getId());
} catch (ServiceRegistryException e) {
throw new WorkflowOperationException(e);
}
Track imageVideo = (Track) MediaPackageElementParser.getFromXml(imageToVideoJob.getPayload());
elementsToClean.add(imageVideo);
return imageVideo;
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class PartialImportWorkflowOperationHandler method checkForMuxing.
protected long checkForMuxing(MediaPackage mediaPackage, MediaPackageElementFlavor targetPresentationFlavor, MediaPackageElementFlavor targetPresenterFlavor, boolean useSuffix, List<MediaPackageElement> elementsToClean) throws EncoderException, MediaPackageException, WorkflowOperationException, NotFoundException, ServiceRegistryException, IOException {
long queueTime = 0L;
List<Track> videoElements = getPureVideoTracks(mediaPackage, targetPresentationFlavor);
List<Track> audioElements;
if (useSuffix) {
audioElements = getPureAudioTracks(mediaPackage, deriveAudioFlavor(targetPresentationFlavor));
} else {
audioElements = getPureAudioTracks(mediaPackage, targetPresentationFlavor);
}
Track videoTrack = null;
Track audioTrack = null;
if (videoElements.size() == 1 && audioElements.size() == 0) {
videoTrack = videoElements.get(0);
} else if (videoElements.size() == 0 && audioElements.size() == 1) {
audioTrack = audioElements.get(0);
}
videoElements = getPureVideoTracks(mediaPackage, targetPresenterFlavor);
if (useSuffix) {
audioElements = getPureAudioTracks(mediaPackage, deriveAudioFlavor(targetPresenterFlavor));
} else {
audioElements = getPureAudioTracks(mediaPackage, targetPresenterFlavor);
}
if (videoElements.size() == 1 && audioElements.size() == 0) {
videoTrack = videoElements.get(0);
} else if (videoElements.size() == 0 && audioElements.size() == 1) {
audioTrack = audioElements.get(0);
}
logger.debug("Check for mux between '{}' and '{}' flavors and found video track '{}' and audio track '{}'", targetPresentationFlavor, targetPresenterFlavor, videoTrack, audioTrack);
if (videoTrack != null && audioTrack != null) {
queueTime += mux(mediaPackage, videoTrack, audioTrack, elementsToClean);
return queueTime;
} else {
return queueTime;
}
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class PartialImportWorkflowOperationHandler method getLargestTrack.
/**
* Returns the track with the largest resolution from the list of tracks
*
* @param tracks
* the list of tracks
* @return a {@link Tuple} with the largest track and it's dimension
*/
private Tuple<Track, Dimension> getLargestTrack(List<Track> tracks) {
Track track = null;
Dimension dimension = null;
for (Track t : tracks) {
if (!t.hasVideo())
continue;
VideoStream[] videoStreams = TrackSupport.byType(t.getStreams(), VideoStream.class);
int frameWidth = videoStreams[0].getFrameWidth();
int frameHeight = videoStreams[0].getFrameHeight();
if (dimension == null || (frameWidth * frameHeight) > (dimension.getWidth() * dimension.getHeight())) {
dimension = Dimension.dimension(frameWidth, frameHeight);
track = t;
}
}
if (track == null || dimension == null)
return null;
return Tuple.tuple(track, dimension);
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class PartialImportWorkflowOperationHandler method mux.
/**
* Mux a video and an audio track. Add the result to media package <code>mediaPackage</code> with the same flavor as
* the <code>video</code>.
*
* @return the mux job's queue time
*/
protected long mux(MediaPackage mediaPackage, Track video, Track audio, List<MediaPackageElement> elementsToClean) throws EncoderException, MediaPackageException, WorkflowOperationException, NotFoundException, ServiceRegistryException, IOException {
logger.debug("Muxing video {} and audio {}", video.getURI(), audio.getURI());
Job muxJob = composerService.mux(video, audio, PrepareAVWorkflowOperationHandler.MUX_AV_PROFILE);
if (!waitForStatus(muxJob).isSuccess()) {
throw new WorkflowOperationException("Muxing of audio " + audio + " and video " + video + " failed");
}
muxJob = serviceRegistry.getJob(muxJob.getId());
final Track muxed = (Track) MediaPackageElementParser.getFromXml(muxJob.getPayload());
if (muxed == null) {
throw new WorkflowOperationException("Muxed job " + muxJob + " returned no payload!");
}
muxed.setFlavor(video.getFlavor());
muxed.setURI(workspace.moveTo(muxed.getURI(), mediaPackage.getIdentifier().toString(), muxed.getIdentifier(), FilenameUtils.getName(video.getURI().toString())));
elementsToClean.add(audio);
mediaPackage.remove(audio);
elementsToClean.add(video);
mediaPackage.remove(video);
mediaPackage.add(muxed);
return muxJob.getQueueTime();
}
Aggregations