use of org.opencastproject.composer.layout.Layout in project opencast by opencast.
the class ComposerServiceImpl method buildCompositeCommand.
/**
* Example composite command below. Use with `-filter_complex` option of ffmpeg if upper video is available otherwise
* use -filver:v option for a single video.
*
* Dual video sample: The ffmpeg command needs two source files set with the `-i` option. The first media file is the
* `lower`, the second the `upper` one. Example filter: -filter_complex
* [0:v]scale=909:682,pad=1280:720:367:4:0x444345FF[lower];[1:v]scale=358:151[upper];[lower][upper]overlay=4:4[out]
*
* Single video sample: The ffmpeg command needs one source files set with the `-i` option. Example filter: filter:v
* [in]scale=909:682,pad=1280:720:367:4:0x444345FF[out]
*
* @return commandline part with -filter_complex and -map options
*/
private static String buildCompositeCommand(Dimension compositeTrackSize, LaidOutElement<Track> lowerLaidOutElement, Option<LaidOutElement<Track>> upperLaidOutElement, Option<File> upperFile, Option<LaidOutElement<Attachment>> watermarkOption, File watermarkFile, String backgroundColor) {
final StringBuilder cmd = new StringBuilder();
final String videoId = watermarkOption.isNone() ? "[out]" : "[video]";
if (upperLaidOutElement.isNone()) {
// There is only one video track and possibly one watermark.
final Layout videoLayout = lowerLaidOutElement.getLayout();
final String videoPosition = videoLayout.getOffset().getX() + ":" + videoLayout.getOffset().getY();
final String scaleVideo = videoLayout.getDimension().getWidth() + ":" + videoLayout.getDimension().getHeight();
final String padLower = compositeTrackSize.getWidth() + ":" + compositeTrackSize.getHeight() + ":" + videoPosition + ":" + backgroundColor;
cmd.append("-filter:v [in]scale=").append(scaleVideo).append(",pad=").append(padLower).append(videoId);
} else if (upperFile.isSome() && upperLaidOutElement.isSome()) {
// There are two video tracks to handle.
final Layout lowerLayout = lowerLaidOutElement.getLayout();
final Layout upperLayout = upperLaidOutElement.get().getLayout();
final String upperPosition = upperLayout.getOffset().getX() + ":" + upperLayout.getOffset().getY();
final String lowerPosition = lowerLayout.getOffset().getX() + ":" + lowerLayout.getOffset().getY();
final String scaleUpper = upperLayout.getDimension().getWidth() + ":" + upperLayout.getDimension().getHeight();
final String scaleLower = lowerLayout.getDimension().getWidth() + ":" + lowerLayout.getDimension().getHeight();
final String padLower = compositeTrackSize.getWidth() + ":" + compositeTrackSize.getHeight() + ":" + lowerPosition + ":" + backgroundColor;
// Add input file for the upper track
cmd.append("-i ").append(upperFile.get().getAbsolutePath()).append(" ");
// Add filter complex mode
cmd.append("-filter_complex").append(" [0:v]scale=").append(scaleLower).append(",pad=").append(padLower).append("[lower]").append(";[1:v]scale=").append(scaleUpper).append("[upper]").append(";[lower][upper]overlay=").append(upperPosition).append(videoId);
}
for (final LaidOutElement<Attachment> watermarkLayout : watermarkOption) {
String watermarkPosition = watermarkLayout.getLayout().getOffset().getX() + ":" + watermarkLayout.getLayout().getOffset().getY();
cmd.append(";").append("movie=").append(watermarkFile.getAbsoluteFile()).append("[watermark];").append(videoId).append("[watermark]overlay=").append(watermarkPosition).append("[out]");
}
if (upperLaidOutElement.isSome()) {
// handle audio
// if both videos contain audio mix it into a single audio stream
final boolean lowerAudio = lowerLaidOutElement.getElement().hasAudio();
final boolean upperAudio = upperLaidOutElement.get().getElement().hasAudio();
if (lowerAudio && upperAudio) {
cmd.append(";[0:a][1:a]amix=inputs=2[aout] -map [out] -map [aout]");
} else if (lowerAudio) {
cmd.append(" -map [out] -map 0:a");
} else if (upperAudio) {
cmd.append(" -map [out] -map 1:a");
} else {
cmd.append(" -map [out]");
}
}
return cmd.toString();
}
use of org.opencastproject.composer.layout.Layout in project opencast by opencast.
the class ComposerRestService method composite.
/**
* Compose two videos into one with an optional watermark.
*
* @param compositeSizeJson
* The composite track dimension as JSON
* @param lowerTrackXml
* The lower track of the composition as XML
* @param lowerLayoutJson
* The lower layout as JSON
* @param upperTrackXml
* The upper track of the composition as XML
* @param upperLayoutJson
* The upper layout as JSON
* @param watermarkAttachmentXml
* The watermark image attachment of the composition as XML
* @param watermarkLayoutJson
* The watermark layout as JSON
* @param profileId
* The encoding profile to use
* @param background
* The background color
* @return A {@link Response} with the resulting track in the response body
* @throws Exception
*/
@POST
@Path("composite")
@Produces(MediaType.TEXT_XML)
@RestQuery(name = "composite", description = "Starts a video compositing process, based on the specified resolution, encoding profile ID, the source elements and their layouts", restParameters = { @RestParameter(description = "The resolution size of the resulting video as JSON", isRequired = true, name = "compositeSize", type = Type.STRING), @RestParameter(description = "The lower source track containing the lower video", isRequired = true, name = "lowerTrack", type = Type.TEXT), @RestParameter(description = "The lower layout containing the JSON definition of the layout", isRequired = true, name = "lowerLayout", type = Type.TEXT), @RestParameter(description = "The upper source track containing the upper video", isRequired = false, name = "upperTrack", type = Type.TEXT), @RestParameter(description = "The upper layout containing the JSON definition of the layout", isRequired = false, name = "upperLayout", type = Type.TEXT), @RestParameter(description = "The watermark source attachment containing watermark image", isRequired = false, name = "watermarkTrack", type = Type.TEXT), @RestParameter(description = "The watermark layout containing the JSON definition of the layout", isRequired = false, name = "watermarkLayout", type = Type.TEXT), @RestParameter(description = "The background color", isRequired = false, name = "background", type = Type.TEXT, defaultValue = "black"), @RestParameter(description = "The encoding profile to use", isRequired = true, name = "profileId", type = Type.STRING) }, reponses = { @RestResponse(description = "Results in an xml document containing the compound video track", responseCode = HttpServletResponse.SC_OK), @RestResponse(description = "If required parameters aren't set or if the source elements aren't from the right type", responseCode = HttpServletResponse.SC_BAD_REQUEST) }, returnDescription = "")
public Response composite(@FormParam("compositeSize") String compositeSizeJson, @FormParam("lowerTrack") String lowerTrackXml, @FormParam("lowerLayout") String lowerLayoutJson, @FormParam("upperTrack") String upperTrackXml, @FormParam("upperLayout") String upperLayoutJson, @FormParam("watermarkAttachment") String watermarkAttachmentXml, @FormParam("watermarkLayout") String watermarkLayoutJson, @FormParam("profileId") String profileId, @FormParam("background") @DefaultValue("black") String background) throws Exception {
// Ensure that the POST parameters are present
if (StringUtils.isBlank(compositeSizeJson) || StringUtils.isBlank(lowerTrackXml) || StringUtils.isBlank(lowerLayoutJson) || StringUtils.isBlank(profileId))
return Response.status(Response.Status.BAD_REQUEST).entity("One of the required parameters must not be null").build();
// Deserialize the source elements
MediaPackageElement lowerTrack = MediaPackageElementParser.getFromXml(lowerTrackXml);
Layout lowerLayout = Serializer.layout(JsonObj.jsonObj(lowerLayoutJson));
if (!Track.TYPE.equals(lowerTrack.getElementType()))
return Response.status(Response.Status.BAD_REQUEST).entity("lowerTrack element must be of type track").build();
LaidOutElement<Track> lowerLaidOutElement = new LaidOutElement<Track>((Track) lowerTrack, lowerLayout);
Option<LaidOutElement<Track>> upperLaidOutElement = Option.<LaidOutElement<Track>>none();
if (StringUtils.isNotBlank(upperTrackXml)) {
MediaPackageElement upperTrack = MediaPackageElementParser.getFromXml(upperTrackXml);
Layout upperLayout = Serializer.layout(JsonObj.jsonObj(upperLayoutJson));
if (!Track.TYPE.equals(upperTrack.getElementType())) {
return Response.status(Response.Status.BAD_REQUEST).entity("upperTrack element must be of type track").build();
}
upperLaidOutElement = Option.option(new LaidOutElement<Track>((Track) upperTrack, upperLayout));
}
Option<LaidOutElement<Attachment>> watermarkLaidOutElement = Option.<LaidOutElement<Attachment>>none();
if (StringUtils.isNotBlank(watermarkAttachmentXml)) {
Layout watermarkLayout = Serializer.layout(JsonObj.jsonObj(watermarkLayoutJson));
MediaPackageElement watermarkAttachment = MediaPackageElementParser.getFromXml(watermarkAttachmentXml);
if (!Attachment.TYPE.equals(watermarkAttachment.getElementType()))
return Response.status(Response.Status.BAD_REQUEST).entity("watermarkTrack element must be of type track").build();
watermarkLaidOutElement = Option.some(new LaidOutElement<Attachment>((Attachment) watermarkAttachment, watermarkLayout));
}
Dimension compositeTrackSize = Serializer.dimension(JsonObj.jsonObj(compositeSizeJson));
try {
// Asynchronously composite the specified source elements
Job job = composerService.composite(compositeTrackSize, upperLaidOutElement, lowerLaidOutElement, watermarkLaidOutElement, profileId, background);
return Response.ok().entity(new JaxbJob(job)).build();
} catch (EncoderException e) {
logger.warn("Unable to composite video: " + e.getMessage());
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
use of org.opencastproject.composer.layout.Layout in project opencast by opencast.
the class ComposerServiceImpl method process.
/**
* {@inheritDoc}
*
* @see org.opencastproject.job.api.AbstractJobProducer#process(org.opencastproject.job.api.Job)
*/
@Override
protected String process(Job job) throws ServiceRegistryException {
String operation = job.getOperation();
List<String> arguments = job.getArguments();
try {
Operation op = Operation.valueOf(operation);
Track firstTrack;
Track secondTrack;
String encodingProfile = arguments.get(0);
final String serialized;
switch(op) {
case Encode:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
serialized = encode(job, Collections.map(tuple("video", firstTrack)), encodingProfile).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case ParallelEncode:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
serialized = MediaPackageElementParser.getArrayAsXml(parallelEncode(job, firstTrack, encodingProfile));
break;
case Image:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
List<Attachment> resultingElements;
if (Boolean.parseBoolean(arguments.get(2))) {
double[] times = new double[arguments.size() - 3];
for (int i = 3; i < arguments.size(); i++) {
times[i - 3] = Double.parseDouble(arguments.get(i));
}
resultingElements = image(job, firstTrack, encodingProfile, times);
} else {
Map<String, String> properties = parseProperties(arguments.get(3));
resultingElements = image(job, firstTrack, encodingProfile, properties);
}
serialized = MediaPackageElementParser.getArrayAsXml(resultingElements);
break;
case ImageConversion:
Attachment sourceImage = (Attachment) MediaPackageElementParser.getFromXml(arguments.get(1));
serialized = convertImage(job, sourceImage, encodingProfile).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Mux:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
secondTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(2));
serialized = mux(job, firstTrack, secondTrack, encodingProfile).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Trim:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
long start = Long.parseLong(arguments.get(2));
long duration = Long.parseLong(arguments.get(3));
serialized = trim(job, firstTrack, encodingProfile, start, duration).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Composite:
Attachment watermarkAttachment;
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(LOWER_TRACK_INDEX));
Layout lowerLayout = Serializer.layout(JsonObj.jsonObj(arguments.get(LOWER_TRACK_LAYOUT_INDEX)));
LaidOutElement<Track> lowerLaidOutElement = new LaidOutElement<>(firstTrack, lowerLayout);
Option<LaidOutElement<Track>> upperLaidOutElement = Option.none();
if (NOT_AVAILABLE.equals(arguments.get(UPPER_TRACK_INDEX)) && NOT_AVAILABLE.equals(arguments.get(UPPER_TRACK_LAYOUT_INDEX))) {
logger.trace("This composite action does not use a second track.");
} else {
secondTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(UPPER_TRACK_INDEX));
Layout upperLayout = Serializer.layout(JsonObj.jsonObj(arguments.get(UPPER_TRACK_LAYOUT_INDEX)));
upperLaidOutElement = Option.option(new LaidOutElement<Track>(secondTrack, upperLayout));
}
Dimension compositeTrackSize = Serializer.dimension(JsonObj.jsonObj(arguments.get(COMPOSITE_TRACK_SIZE_INDEX)));
String backgroundColor = arguments.get(BACKGROUND_COLOR_INDEX);
Option<LaidOutElement<Attachment>> watermarkOption = Option.none();
if (arguments.size() == 9) {
watermarkAttachment = (Attachment) MediaPackageElementParser.getFromXml(arguments.get(WATERMARK_INDEX));
Layout watermarkLayout = Serializer.layout(JsonObj.jsonObj(arguments.get(WATERMARK_LAYOUT_INDEX)));
watermarkOption = Option.some(new LaidOutElement<>(watermarkAttachment, watermarkLayout));
}
serialized = composite(job, compositeTrackSize, lowerLaidOutElement, upperLaidOutElement, watermarkOption, encodingProfile, backgroundColor).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Concat:
String dimensionString = arguments.get(1);
String frameRateString = arguments.get(2);
Dimension outputDimension = null;
if (StringUtils.isNotBlank(dimensionString))
outputDimension = Serializer.dimension(JsonObj.jsonObj(dimensionString));
float outputFrameRate = NumberUtils.toFloat(frameRateString, -1.0f);
List<Track> tracks = new ArrayList<>();
for (int i = 3; i < arguments.size(); i++) {
tracks.add(i - 3, (Track) MediaPackageElementParser.getFromXml(arguments.get(i)));
}
serialized = concat(job, tracks, encodingProfile, outputDimension, outputFrameRate).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case ImageToVideo:
Attachment image = (Attachment) MediaPackageElementParser.getFromXml(arguments.get(1));
double time = Double.parseDouble(arguments.get(2));
serialized = imageToVideo(job, image, encodingProfile, time).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
default:
throw new IllegalStateException("Don't know how to handle operation '" + operation + "'");
}
return serialized;
} catch (IllegalArgumentException e) {
throw new ServiceRegistryException(String.format("Cannot handle operations of type '%s'", operation), e);
} catch (IndexOutOfBoundsException e) {
throw new ServiceRegistryException(String.format("Invalid arguments for operation '%s'", operation), e);
} catch (Exception e) {
throw new ServiceRegistryException(String.format("Error handling operation '%s'", operation), e);
}
}
Aggregations