use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class AbstractEventEndpoint method streamsToJSON.
private List<Field> streamsToJSON(org.opencastproject.mediapackage.Stream[] streams) {
List<Field> fields = new ArrayList<>();
List<JValue> audioList = new ArrayList<>();
List<JValue> videoList = new ArrayList<>();
for (org.opencastproject.mediapackage.Stream stream : streams) {
// TODO There is a bug with the stream ids, see MH-10325
if (stream instanceof AudioStreamImpl) {
List<Field> audio = new ArrayList<>();
AudioStream audioStream = (AudioStream) stream;
audio.add(f("id", v(audioStream.getIdentifier(), BLANK)));
audio.add(f("type", v(audioStream.getFormat(), BLANK)));
audio.add(f("channels", v(audioStream.getChannels(), BLANK)));
audio.add(f("bitrate", v(audioStream.getBitRate(), BLANK)));
audio.add(f("bitdepth", v(audioStream.getBitDepth(), BLANK)));
audio.add(f("samplingrate", v(audioStream.getSamplingRate(), BLANK)));
audio.add(f("framecount", v(audioStream.getFrameCount(), BLANK)));
audio.add(f("peakleveldb", v(audioStream.getPkLevDb(), BLANK)));
audio.add(f("rmsleveldb", v(audioStream.getRmsLevDb(), BLANK)));
audio.add(f("rmspeakdb", v(audioStream.getRmsPkDb(), BLANK)));
audioList.add(obj(audio));
} else if (stream instanceof VideoStreamImpl) {
List<Field> video = new ArrayList<>();
VideoStream videoStream = (VideoStream) stream;
video.add(f("id", v(videoStream.getIdentifier(), BLANK)));
video.add(f("type", v(videoStream.getFormat(), BLANK)));
video.add(f("bitrate", v(videoStream.getBitRate(), BLANK)));
video.add(f("framerate", v(videoStream.getFrameRate(), BLANK)));
video.add(f("resolution", v(videoStream.getFrameWidth() + "x" + videoStream.getFrameHeight(), BLANK)));
video.add(f("framecount", v(videoStream.getFrameCount(), BLANK)));
video.add(f("scantype", v(videoStream.getScanType(), BLANK)));
video.add(f("scanorder", v(videoStream.getScanOrder(), BLANK)));
videoList.add(obj(video));
} else {
throw new IllegalArgumentException("Stream must be either audio or video");
}
}
fields.add(f("audio", arr(audioList)));
fields.add(f("video", arr(videoList)));
return fields;
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class AbstractFeedGenerator method populateFeedEntry.
/**
* Populates the feed entry with metadata and the enclosures.
*
* @param entry
* the entry to enrich
* @param metadata
* the metadata
* @param enclosures
* the media enclosures
* @return the enriched item
*/
private FeedEntry populateFeedEntry(FeedEntry entry, SearchResultItem metadata, List<MediaPackageElement> enclosures) {
Date d = metadata.getDcCreated();
Date updatedDate = metadata.getModified();
String title = metadata.getDcTitle();
// Configure the iTunes extension
ITunesFeedEntryExtension iTunesEntry = new ITunesFeedEntryExtension();
iTunesEntry.setDuration(metadata.getDcExtent());
iTunesEntry.setBlocked(false);
iTunesEntry.setExplicit(false);
if (StringUtils.isNotBlank(metadata.getDcCreator()))
iTunesEntry.setAuthor(metadata.getDcCreator());
// TODO: Add iTunes keywords and subtitles
// iTunesEntry.setKeywords(keywords);
// iTunesEntry.setSubtitle(subtitle);
// Configure the DC extension
DublinCoreExtension dcExtension = new DublinCoreExtension();
dcExtension.setTitle(title);
dcExtension.setIdentifier(metadata.getId());
// Set contributor
if (!StringUtils.isEmpty(metadata.getDcContributor())) {
for (String contributor : metadata.getDcContributor().split(";;")) {
entry.addContributor(new PersonImpl(contributor));
dcExtension.addContributor(contributor);
}
}
// Set creator
if (!StringUtils.isEmpty(metadata.getDcCreator())) {
for (String creator : metadata.getDcCreator().split(";;")) {
if (iTunesEntry.getAuthor() == null)
iTunesEntry.setAuthor(creator);
entry.addAuthor(new PersonImpl(creator));
dcExtension.addCreator(creator);
}
}
// Set publisher
if (!StringUtils.isEmpty(metadata.getDcPublisher())) {
dcExtension.addPublisher(metadata.getDcPublisher());
}
// Set rights
if (!StringUtils.isEmpty(metadata.getDcAccessRights())) {
dcExtension.setRights(metadata.getDcAccessRights());
}
// Set description
if (!StringUtils.isEmpty(metadata.getDcDescription())) {
String summary = metadata.getDcDescription();
entry.setDescription(new ContentImpl(summary));
iTunesEntry.setSummary(summary);
dcExtension.setDescription(summary);
}
// Set the language
if (!StringUtils.isEmpty(metadata.getDcLanguage())) {
dcExtension.setLanguage(metadata.getDcLanguage());
}
// Set the publication date
if (d != null) {
entry.setPublishedDate(d);
dcExtension.setDate(d);
} else if (metadata.getModified() != null) {
entry.setPublishedDate(metadata.getModified());
dcExtension.setDate(metadata.getModified());
}
// Set the updated date
if (updatedDate == null)
updatedDate = d;
entry.setUpdatedDate(updatedDate);
// TODO: Finish dc support
// Set format
// if (!StringUtils.isEmpty(resultItem.getMediaType())) {
// dcExtension.setFormat(resultItem.getMediaType());
// }
// dcEntry.setCoverage(arg0);
// dcEntry.setRelation(arg0);
// dcEntry.setSource(arg0);
// dcEntry.setSubject(arg0);
// Set the cover image
String coverUrl = null;
if (!StringUtils.isEmpty(metadata.getCover())) {
coverUrl = metadata.getCover();
setImage(entry, coverUrl);
}
entry.addExtension(iTunesEntry);
entry.addExtension(dcExtension);
// Add the enclosures
for (MediaPackageElement element : enclosures) {
String trackMimeType = element.getMimeType().toString();
long trackLength = element.getSize();
if (trackLength <= 0 && element instanceof Track) {
// filesize unset so estimate from duration and bitrate
trackLength = 0;
if (((TrackImpl) element).hasVideo()) {
List<VideoStream> video = ((TrackImpl) element).getVideo();
if (video.get(0).getBitRate() != null) {
trackLength += metadata.getDcExtent() / 1000 * video.get(0).getBitRate() / 8;
}
}
if (((TrackImpl) element).hasAudio()) {
List<AudioStream> audio = ((TrackImpl) element).getAudio();
if (audio.get(0).getBitRate() != null) {
trackLength += metadata.getDcExtent() / 1000 * audio.get(0).getBitRate() / 8;
}
}
}
// order of magnitude correct
if (trackLength <= 0) {
trackLength = metadata.getDcExtent();
}
String trackFlavor = element.getFlavor().toString();
String trackUrl = null;
try {
trackUrl = element.getURI().toURL().toExternalForm();
} catch (MalformedURLException e) {
// Can't happen
}
Enclosure enclosure = new EnclosureImpl(trackUrl, trackMimeType, trackFlavor, trackLength);
entry.addEnclosure(enclosure);
}
return entry;
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class ProbeResolutionWorkflowOperationHandler method start.
@Override
public WorkflowOperationResult start(WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
logger.info("Running probe-resolution workflow operation");
final MediaPackage mediaPackage = workflowInstance.getMediaPackage();
final String sourceFlavorName = getConfig(workflowInstance, OPT_SOURCE_FLAVOR);
final MediaPackageElementFlavor sourceFlavor = MediaPackageElementFlavor.parseFlavor(sourceFlavorName);
// Ensure we have a matching track
final Track[] tracks = mediaPackage.getTracks(sourceFlavor);
if (tracks.length <= 0) {
logger.info("No tracks with specified flavor ({}).", sourceFlavorName);
return createResult(mediaPackage, Action.CONTINUE);
}
// Create mapping: resolution -> [varNames]
Map<Fraction, Set<String>> resolutionMapping = new HashMap<>();
for (String key : workflowInstance.getCurrentOperation().getConfigurationKeys()) {
if (key.startsWith(OPT_VAR_PREFIX)) {
String varName = key.substring(OPT_VAR_PREFIX.length());
for (Fraction resolution : getResolutions(getConfig(workflowInstance, key))) {
if (!resolutionMapping.containsKey(resolution)) {
resolutionMapping.put(resolution, new HashSet<String>());
}
resolutionMapping.get(resolution).add(varName);
}
}
}
// Create mapping: varName -> value
Map<String, String> valueMapping = new HashMap<>();
for (String key : workflowInstance.getCurrentOperation().getConfigurationKeys()) {
if (key.startsWith(OPT_VAL_PREFIX)) {
String varName = key.substring(OPT_VAL_PREFIX.length());
valueMapping.put(varName, getConfig(workflowInstance, key));
}
}
Map<String, String> properties = new HashMap<String, String>();
for (Track track : tracks) {
final String flavor = toVariableName(track.getFlavor());
// Check if resolution fits
if (track.hasVideo()) {
for (VideoStream video : ((TrackImpl) track).getVideo()) {
Fraction resolution = Fraction.getFraction(video.getFrameWidth(), video.getFrameHeight());
if (resolutionMapping.containsKey(resolution)) {
for (String varName : resolutionMapping.get(resolution)) {
String value = valueMapping.containsKey(varName) ? valueMapping.get(varName) : "true";
properties.put(flavor + varName, value);
}
}
}
}
}
logger.info("Finished workflow operation adding the properties: {}", properties);
return createResult(mediaPackage, properties, Action.CONTINUE, 0);
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class MediaInspectionServiceImplTest method testEnrichment.
@Test
public void testEnrichment() throws Exception {
final URI trackUri = getResource("/test.mp4");
for (MediaInspector mi : init(trackUri)) {
Track track = mi.inspectTrack(trackUri, Options.NO_OPTION);
// make changes to metadata
Checksum cs = track.getChecksum();
track.setChecksum(null);
MimeType mt = mimeType("video", "flash");
track.setMimeType(mt);
// test the enrich scenario
Track newTrack = (Track) mi.enrich(track, false, Options.NO_OPTION);
VideoStream[] videoStreams = TrackSupport.byType(newTrack.getStreams(), VideoStream.class);
assertTrue(videoStreams[0].getFrameCount().longValue() > 0);
AudioStream[] audioStreams = TrackSupport.byType(newTrack.getStreams(), AudioStream.class);
assertTrue(audioStreams[0].getFrameCount().longValue() > 0);
assertEquals(newTrack.getChecksum(), cs);
assertEquals(newTrack.getMimeType(), mt);
assertNotNull(newTrack.getDuration());
assertTrue(newTrack.getDuration() > 0);
// test the override scenario
newTrack = (Track) mi.enrich(track, true, Options.NO_OPTION);
assertEquals(newTrack.getChecksum(), cs);
assertNotSame(newTrack.getMimeType(), mt);
assertTrue(newTrack.getDuration() > 0);
}
for (MediaInspector mi : init(trackUri)) {
Track track = mi.inspectTrack(trackUri, Options.NO_OPTION);
// make changes to metadata
Checksum cs = track.getChecksum();
track.setChecksum(null);
MimeType mt = mimeType("video", "flash");
track.setMimeType(mt);
// test the enrich scenario
Track newTrack = (Track) mi.enrich(track, false, Options.NO_OPTION);
VideoStream[] videoStreams = TrackSupport.byType(newTrack.getStreams(), VideoStream.class);
assertTrue(videoStreams[0].getFrameCount().longValue() > 0);
AudioStream[] audioStreams = TrackSupport.byType(newTrack.getStreams(), AudioStream.class);
assertTrue(audioStreams[0].getFrameCount().longValue() > 0);
assertEquals(newTrack.getChecksum(), cs);
assertEquals(newTrack.getMimeType(), mt);
assertNotNull(newTrack.getDuration());
assertTrue(newTrack.getDuration() > 0);
// test the override scenario
newTrack = (Track) mi.enrich(track, true, Options.NO_OPTION);
assertEquals(newTrack.getChecksum(), cs);
assertNotSame(newTrack.getMimeType(), mt);
assertTrue(newTrack.getDuration() > 0);
}
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class WowzaAdaptiveStreamingDistributionService method addElementToSmil.
private void addElementToSmil(Document doc, String channelId, MediaPackage mediapackage, MediaPackageElement element) throws DOMException, URISyntaxException {
if (!(element instanceof TrackImpl))
return;
TrackImpl track = (TrackImpl) element;
NodeList switchElementsList = doc.getElementsByTagName("switch");
Node switchElement = null;
// If there is no switch element we need to create the xml first.
if (switchElementsList.getLength() > 0) {
switchElement = switchElementsList.item(0);
} else {
if (doc.getElementsByTagName("head").getLength() < 1)
doc.appendChild(doc.createElement("head"));
if (doc.getElementsByTagName("body").getLength() < 1)
doc.appendChild(doc.createElement("body"));
switchElement = doc.createElement("switch");
doc.getElementsByTagName("body").item(0).appendChild(switchElement);
}
Element video = doc.createElement("video");
video.setAttribute("src", getAdaptiveDistributionName(channelId, mediapackage, element));
float bitrate = 0;
// Add bitrate corresponding to the audio streams
for (AudioStream stream : track.getAudio()) {
bitrate += stream.getBitRate();
}
// Add bitrate corresponding to the video streams
// Also, set the video width and height values:
// In the rare case where there is more than one video stream, the values of the first stream
// have priority, but always prefer the first stream with both "frameWidth" and "frameHeight"
// parameters defined
Integer width = null;
Integer height = null;
for (VideoStream stream : track.getVideo()) {
bitrate += stream.getBitRate();
// Update if both width and height are defined for a stream or if we have no values at all
if (((stream.getFrameWidth() != null) && (stream.getFrameHeight() != null)) || ((width == null) && (height == null))) {
width = stream.getFrameWidth();
height = stream.getFrameHeight();
}
}
video.setAttribute(SMIL_ATTR_VIDEO_BITRATE, Integer.toString((int) bitrate));
if (width != null) {
video.setAttribute(SMIL_ATTR_VIDEO_WIDTH, Integer.toString(width));
} else {
logger.debug("Could not set video width in the SMIL file for element {} of mediapackage {}. The value was null", element.getIdentifier(), mediapackage.getIdentifier());
}
if (height != null) {
video.setAttribute(SMIL_ATTR_VIDEO_HEIGHT, Integer.toString(height));
} else {
logger.debug("Could not set video height in the SMIL file for element {} of mediapackage {}. The value was null", element.getIdentifier(), mediapackage.getIdentifier());
}
NodeList currentVideos = switchElement.getChildNodes();
for (int i = 0; i < currentVideos.getLength(); i++) {
Node current = currentVideos.item(i);
if ("video".equals(current.getNodeName())) {
Float currentBitrate = Float.parseFloat(current.getAttributes().getNamedItem(SMIL_ATTR_VIDEO_BITRATE).getTextContent());
if ((isSmilOrderDescending && (currentBitrate < bitrate)) || (!isSmilOrderDescending && (currentBitrate > bitrate))) {
switchElement.insertBefore(video, current);
return;
}
}
}
// If we get here, we could not insert the video before
switchElement.appendChild(video);
}
Aggregations