use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class PartialImportWorkflowOperationHandler method extractLastImageFrame.
private Attachment extractLastImageFrame(Track presentationTrack, List<MediaPackageElement> elementsToClean) throws EncoderException, MediaPackageException, WorkflowOperationException, NotFoundException {
VideoStream[] videoStreams = TrackSupport.byType(presentationTrack.getStreams(), VideoStream.class);
Map<String, String> properties = new HashMap<String, String>();
properties.put("frame", Long.toString(videoStreams[0].getFrameCount() - 1));
Job extractImageJob = composerService.image(presentationTrack, IMAGE_FRAME_PROFILE, properties);
if (!waitForStatus(extractImageJob).isSuccess())
throw new WorkflowOperationException("Extract image frame video job did not complete successfully");
// Get the latest copy
try {
extractImageJob = serviceRegistry.getJob(extractImageJob.getId());
} catch (ServiceRegistryException e) {
throw new WorkflowOperationException(e);
}
Attachment composedImages = (Attachment) MediaPackageElementParser.getArrayFromXml(extractImageJob.getPayload()).get(0);
elementsToClean.add(composedImages);
return composedImages;
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class PartialImportWorkflowOperationHandlerTest method testDetermineDimension.
@Test
public void testDetermineDimension() throws Exception {
// Setup tracks
VideoStreamImpl videoStream = new VideoStreamImpl("test1");
videoStream.setFrameWidth(80);
videoStream.setFrameHeight(30);
VideoStreamImpl videoStream2 = new VideoStreamImpl("test2");
videoStream2.setFrameWidth(101);
videoStream2.setFrameHeight(50);
TrackImpl videoTrack = new TrackImpl();
videoTrack.setURI(URI.create("/test"));
videoTrack.setVideo(Collections.list((VideoStream) videoStream));
TrackImpl videoTrack2 = new TrackImpl();
videoTrack2.setURI(URI.create("/test"));
videoTrack2.setVideo(Collections.list((VideoStream) videoStream2));
List<Track> tracks = Collections.list((Track) videoTrack, (Track) videoTrack2);
EncodingProfileImpl encodingProfile = new EncodingProfileImpl();
encodingProfile.setIdentifier("test");
ComposerService composerService = EasyMock.createMock(ComposerService.class);
EasyMock.expect(composerService.concat(encodingProfile.getIdentifier(), Dimension.dimension(101, 50), tracks.toArray(new Track[tracks.size()]))).andReturn(null).once();
EasyMock.expect(composerService.concat(encodingProfile.getIdentifier(), Dimension.dimension(100, 50), tracks.toArray(new Track[tracks.size()]))).andReturn(null).once();
EasyMock.replay(composerService);
PartialImportWorkflowOperationHandler handler = new PartialImportWorkflowOperationHandler();
handler.setComposerService(composerService);
handler.startConcatJob(encodingProfile, tracks, -1.0F, false);
handler.startConcatJob(encodingProfile, tracks, -1.0F, true);
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class CompositeWorkflowOperationHandler method handleSingleTrack.
private WorkflowOperationResult handleSingleTrack(MediaPackage mediaPackage, WorkflowOperationInstance operation, CompositeSettings compositeSettings, Option<Attachment> watermarkAttachment) throws EncoderException, IOException, NotFoundException, MediaPackageException, WorkflowOperationException {
if (compositeSettings.getSingleSourceLayout() == null) {
throw new WorkflowOperationException("Single video layout must be set! Please verify that you have a " + LAYOUT_SINGLE + " property in your composite operation in your workflow definition.");
}
try {
VideoStream[] videoStreams = TrackSupport.byType(compositeSettings.getSingleTrack().getStreams(), VideoStream.class);
if (videoStreams.length == 0) {
logger.warn("No video stream available to compose! {}", compositeSettings.getSingleTrack());
return createResult(mediaPackage, Action.SKIP);
}
// Read the video dimensions from the mediapackage stream information
Dimension videoDimension = Dimension.dimension(videoStreams[0].getFrameWidth(), videoStreams[0].getFrameHeight());
// Create the video layout definitions
List<Tuple<Dimension, HorizontalCoverageLayoutSpec>> shapes = new ArrayList<Tuple<Dimension, HorizontalCoverageLayoutSpec>>();
shapes.add(0, Tuple.tuple(videoDimension, compositeSettings.getSingleSourceLayout()));
// Determine dimension of output
Dimension outputDimension = null;
String outputResolutionSource = compositeSettings.getOutputResolutionSource();
if (outputResolutionSource.equals(CompositeSettings.OUTPUT_RESOLUTION_FIXED)) {
outputDimension = compositeSettings.getOutputDimension();
} else if (outputResolutionSource.equals(CompositeSettings.OUTPUT_RESOLUTION_LOWER)) {
outputDimension = videoDimension;
} else if (outputResolutionSource.equals(CompositeSettings.OUTPUT_RESOLUTION_UPPER)) {
outputDimension = videoDimension;
}
// Calculate the single layout
MultiShapeLayout multiShapeLayout = LayoutManager.multiShapeLayout(outputDimension, shapes);
// Create the laid out element for the videos
LaidOutElement<Track> lowerLaidOutElement = new LaidOutElement<Track>(compositeSettings.getSingleTrack(), multiShapeLayout.getShapes().get(0));
// Create the optionally laid out element for the watermark
Option<LaidOutElement<Attachment>> watermarkOption = createWatermarkLaidOutElement(compositeSettings, outputDimension, watermarkAttachment);
Job compositeJob = composerService.composite(outputDimension, Option.<LaidOutElement<Track>>none(), lowerLaidOutElement, watermarkOption, compositeSettings.getProfile().getIdentifier(), compositeSettings.getOutputBackground());
// Wait for the jobs to return
if (!waitForStatus(compositeJob).isSuccess())
throw new WorkflowOperationException("The composite job did not complete successfully");
if (compositeJob.getPayload().length() > 0) {
Track compoundTrack = (Track) MediaPackageElementParser.getFromXml(compositeJob.getPayload());
compoundTrack.setURI(workspace.moveTo(compoundTrack.getURI(), mediaPackage.getIdentifier().toString(), compoundTrack.getIdentifier(), "composite." + FilenameUtils.getExtension(compoundTrack.getURI().toString())));
// Adjust the target tags
for (String tag : compositeSettings.getTargetTags()) {
logger.trace("Tagging compound track with '{}'", tag);
compoundTrack.addTag(tag);
}
// Adjust the target flavor.
compoundTrack.setFlavor(compositeSettings.getTargetFlavor());
logger.debug("Compound track has flavor '{}'", compoundTrack.getFlavor());
// store new tracks to mediaPackage
mediaPackage.add(compoundTrack);
WorkflowOperationResult result = createResult(mediaPackage, Action.CONTINUE, compositeJob.getQueueTime());
logger.debug("Composite operation completed");
return result;
} else {
logger.info("Composite operation unsuccessful, no payload returned: {}", compositeJob);
return createResult(mediaPackage, Action.SKIP);
}
} finally {
if (compositeSettings.getSourceUrlWatermark() != null)
workspace.deleteFromCollection(COLLECTION, compositeSettings.getWatermarkIdentifier() + "." + FilenameUtils.getExtension(compositeSettings.getSourceUrlWatermark()));
}
}
use of org.opencastproject.mediapackage.VideoStream in project opencast by opencast.
the class AnalyzeTracksWorkflowOperationHandler method start.
@Override
public WorkflowOperationResult start(WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
logger.info("Running analyze-tracks workflow operation on workflow {}", workflowInstance.getId());
final MediaPackage mediaPackage = workflowInstance.getMediaPackage();
final String sourceFlavor = getConfig(workflowInstance, OPT_SOURCE_FLAVOR);
Map<String, String> properties = new HashMap<>();
final MediaPackageElementFlavor flavor = MediaPackageElementFlavor.parseFlavor(sourceFlavor);
final Track[] tracks = mediaPackage.getTracks(flavor);
if (tracks.length <= 0) {
if (BooleanUtils.toBoolean(getConfig(workflowInstance, OPT_FAIL_NO_TRACK, "false"))) {
throw new WorkflowOperationException("No matching tracks for flavor " + sourceFlavor);
}
logger.info("No tracks with specified flavors ({}) to analyse.", sourceFlavor);
return createResult(mediaPackage, properties, Action.CONTINUE, 0);
}
List<Fraction> aspectRatios = getAspectRatio(getConfig(workflowInstance, OPT_VIDEO_ASPECT, ""));
for (Track track : tracks) {
final String varName = toVariableName(track.getFlavor());
properties.put(varName + "_media", "true");
properties.put(varName + "_video", Boolean.toString(track.hasVideo()));
properties.put(varName + "_audio", Boolean.toString(track.hasAudio()));
// Check resolution
if (track.hasVideo()) {
for (VideoStream video : ((TrackImpl) track).getVideo()) {
// Set resolution variables
properties.put(varName + "_resolution_x", video.getFrameWidth().toString());
properties.put(varName + "_resolution_y", video.getFrameHeight().toString());
Fraction trackAspect = Fraction.getReducedFraction(video.getFrameWidth(), video.getFrameHeight());
properties.put(varName + "_aspect", trackAspect.toString());
properties.put(varName + "_framerate", video.getFrameRate().toString());
// Check if we should fall back to nearest defined aspect ratio
if (!aspectRatios.isEmpty()) {
trackAspect = getNearestAspectRatio(trackAspect, aspectRatios);
properties.put(varName + "_aspect_snap", trackAspect.toString());
}
}
}
}
logger.info("Finished analyze-tracks workflow operation adding the properties: {}", properties);
return createResult(mediaPackage, properties, Action.CONTINUE, 0);
}
Aggregations