use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class ComposerServiceImpl method trim.
/**
* Trims the given track using the encoding profile <code>profileId</code> and the given starting point and duration
* in miliseconds.
*
* @param job
* the associated job
* @param sourceTrack
* the source track
* @param profileId
* the encoding profile identifier
* @param start
* the trimming in-point in millis
* @param duration
* the trimming duration in millis
* @return the trimmed track or none if the operation does not return a track. This may happen for example when doing
* two pass encodings where the first pass only creates metadata for the second one
* @throws EncoderException
* if trimming fails
*/
private Option<Track> trim(Job job, Track sourceTrack, String profileId, long start, long duration) throws EncoderException {
String targetTrackId = idBuilder.createNew().toString();
// Get the track and make sure it exists
final File trackFile = loadTrackIntoWorkspace(job, "source", sourceTrack);
// Get the encoding profile
final EncodingProfile profile = getProfile(job, profileId);
// Create the engine
final EncoderEngine encoderEngine = getEncoderEngine();
File output;
try {
output = encoderEngine.trim(trackFile, profile, start, duration, null);
} catch (EncoderException e) {
Map<String, String> params = new HashMap<>();
params.put("track", sourceTrack.getURI().toString());
params.put("profile", profile.getIdentifier());
params.put("start", Long.toString(start));
params.put("duration", Long.toString(duration));
incident().recordFailure(job, TRIMMING_FAILED, e, params, detailsFor(e, encoderEngine));
throw e;
} finally {
activeEncoder.remove(encoderEngine);
}
// trim did not return a file
if (!output.exists() || output.length() == 0)
return none();
// Put the file in the workspace
URI workspaceURI = putToCollection(job, output, "trimmed file");
// Have the encoded track inspected and return the result
Job inspectionJob = inspect(job, workspaceURI);
try {
Track inspectedTrack = (Track) MediaPackageElementParser.getFromXml(inspectionJob.getPayload());
inspectedTrack.setIdentifier(targetTrackId);
return some(inspectedTrack);
} catch (MediaPackageException e) {
throw new EncoderException(e);
}
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class ComposerServiceImpl method composite.
private Option<Track> composite(Job job, Dimension compositeTrackSize, LaidOutElement<Track> lowerLaidOutElement, Option<LaidOutElement<Track>> upperLaidOutElement, Option<LaidOutElement<Attachment>> watermarkOption, String profileId, String backgroundColor) throws EncoderException, MediaPackageException {
// Get the encoding profile
final EncodingProfile profile = getProfile(job, profileId);
// Create the engine
final EncoderEngine encoderEngine = getEncoderEngine();
final String targetTrackId = idBuilder.createNew().toString();
Option<File> upperVideoFile = Option.none();
try {
// Get the tracks and make sure they exist
final File lowerVideoFile = loadTrackIntoWorkspace(job, "lower video", lowerLaidOutElement.getElement());
if (upperLaidOutElement.isSome()) {
upperVideoFile = Option.option(loadTrackIntoWorkspace(job, "upper video", upperLaidOutElement.get().getElement()));
}
File watermarkFile = null;
if (watermarkOption.isSome()) {
try {
watermarkFile = workspace.get(watermarkOption.get().getElement().getURI());
} catch (NotFoundException e) {
incident().recordFailure(job, WORKSPACE_GET_NOT_FOUND, e, getWorkspaceMediapackageParams("watermark image", watermarkOption.get().getElement()), NO_DETAILS);
throw new EncoderException("Requested watermark image " + watermarkOption.get().getElement() + " is not found");
} catch (IOException e) {
incident().recordFailure(job, WORKSPACE_GET_IO_EXCEPTION, e, getWorkspaceMediapackageParams("watermark image", watermarkOption.get().getElement()), NO_DETAILS);
throw new EncoderException("Unable to access right watermark image " + watermarkOption.get().getElement());
}
if (upperLaidOutElement.isSome()) {
logger.info("Composing lower video track {} {} and upper video track {} {} including watermark {} {} into {}", lowerLaidOutElement.getElement().getIdentifier(), lowerLaidOutElement.getElement().getURI(), upperLaidOutElement.get().getElement().getIdentifier(), upperLaidOutElement.get().getElement().getURI(), watermarkOption.get().getElement().getIdentifier(), watermarkOption.get().getElement().getURI(), targetTrackId);
} else {
logger.info("Composing video track {} {} including watermark {} {} into {}", lowerLaidOutElement.getElement().getIdentifier(), lowerLaidOutElement.getElement().getURI(), watermarkOption.get().getElement().getIdentifier(), watermarkOption.get().getElement().getURI(), targetTrackId);
}
} else {
if (upperLaidOutElement.isSome()) {
logger.info("Composing lower video track {} {} and upper video track {} {} into {}", lowerLaidOutElement.getElement().getIdentifier(), lowerLaidOutElement.getElement().getURI(), upperLaidOutElement.get().getElement().getIdentifier(), upperLaidOutElement.get().getElement().getURI(), targetTrackId);
} else {
logger.info("Composing video track {} {} into {}", lowerLaidOutElement.getElement().getIdentifier(), lowerLaidOutElement.getElement().getURI(), targetTrackId);
}
}
// Creating video filter command
final String compositeCommand = buildCompositeCommand(compositeTrackSize, lowerLaidOutElement, upperLaidOutElement, upperVideoFile, watermarkOption, watermarkFile, backgroundColor);
Map<String, String> properties = new HashMap<>();
properties.put(EncoderEngine.CMD_SUFFIX + ".compositeCommand", compositeCommand);
List<File> output;
try {
Map<String, File> source = new HashMap<>();
if (upperVideoFile.isSome()) {
source.put("audio", upperVideoFile.get());
}
source.put("video", lowerVideoFile);
output = encoderEngine.process(source, profile, properties);
} catch (EncoderException e) {
Map<String, String> params = new HashMap<>();
if (upperLaidOutElement.isSome()) {
params.put("upper", upperLaidOutElement.get().getElement().getURI().toString());
}
params.put("lower", lowerLaidOutElement.getElement().getURI().toString());
if (watermarkFile != null)
params.put("watermark", watermarkOption.get().getElement().getURI().toString());
params.put("profile", profile.getIdentifier());
params.put("properties", properties.toString());
incident().recordFailure(job, COMPOSITE_FAILED, e, params, detailsFor(e, encoderEngine));
throw e;
} finally {
activeEncoder.remove(encoderEngine);
}
// We expect one file as output
if (output.size() != 1) {
// Ensure we do not leave behind old files in the workspace
for (File file : output) {
FileUtils.deleteQuietly(file);
}
throw new EncoderException("Composite does not support multiple files as output");
}
// Put the file in the workspace
URI workspaceURI = putToCollection(job, output.get(0), "compound file");
// Have the compound track inspected and return the result
Job inspectionJob = inspect(job, workspaceURI);
Track inspectedTrack = (Track) MediaPackageElementParser.getFromXml(inspectionJob.getPayload());
inspectedTrack.setIdentifier(targetTrackId);
if (profile.getMimeType() != null)
inspectedTrack.setMimeType(MimeTypes.parseMimeType(profile.getMimeType()));
return some(inspectedTrack);
} catch (Exception e) {
if (upperLaidOutElement.isSome()) {
logger.warn("Error composing {} and {}: {}", lowerLaidOutElement.getElement(), upperLaidOutElement.get().getElement(), getStackTrace(e));
} else {
logger.warn("Error composing {}: {}", lowerLaidOutElement.getElement(), getStackTrace(e));
}
if (e instanceof EncoderException) {
throw (EncoderException) e;
} else {
throw new EncoderException(e);
}
}
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class ComposerServiceImpl method imageToVideo.
private Option<Track> imageToVideo(Job job, Attachment sourceImage, String profileId, Double time) throws EncoderException, MediaPackageException {
// Get the encoding profile
final EncodingProfile profile = getProfile(job, profileId);
final String targetTrackId = idBuilder.createNew().toString();
// Get the attachment and make sure it exist
File imageFile;
try {
imageFile = workspace.get(sourceImage.getURI());
} catch (NotFoundException e) {
incident().recordFailure(job, WORKSPACE_GET_NOT_FOUND, e, getWorkspaceMediapackageParams("source image", sourceImage), NO_DETAILS);
throw new EncoderException("Requested source image " + sourceImage + " is not found");
} catch (IOException e) {
incident().recordFailure(job, WORKSPACE_GET_IO_EXCEPTION, e, getWorkspaceMediapackageParams("source image", sourceImage), NO_DETAILS);
throw new EncoderException("Unable to access source image " + sourceImage);
}
// Create the engine
final EncoderEngine encoderEngine = getEncoderEngine();
logger.info("Converting image attachment {} into video {}", sourceImage.getIdentifier(), targetTrackId);
Map<String, String> properties = new HashMap<>();
if (time == -1)
time = 0D;
DecimalFormatSymbols ffmpegFormat = new DecimalFormatSymbols();
ffmpegFormat.setDecimalSeparator('.');
DecimalFormat df = new DecimalFormat("0.000", ffmpegFormat);
properties.put("time", df.format(time));
File output;
try {
output = encoderEngine.encode(imageFile, profile, properties);
} catch (EncoderException e) {
Map<String, String> params = new HashMap<>();
params.put("image", sourceImage.getURI().toString());
params.put("profile", profile.getIdentifier());
params.put("properties", properties.toString());
incident().recordFailure(job, IMAGE_TO_VIDEO_FAILED, e, params, detailsFor(e, encoderEngine));
throw e;
} finally {
activeEncoder.remove(encoderEngine);
}
// encoding did not return a file
if (!output.exists() || output.length() == 0)
return none();
// Put the file in the workspace
URI workspaceURI = putToCollection(job, output, "converted image file");
// Have the compound track inspected and return the result
Job inspectionJob = inspect(job, workspaceURI);
Track inspectedTrack = (Track) MediaPackageElementParser.getFromXml(inspectionJob.getPayload());
inspectedTrack.setIdentifier(targetTrackId);
if (profile.getMimeType() != null)
inspectedTrack.setMimeType(MimeTypes.parseMimeType(profile.getMimeType()));
return some(inspectedTrack);
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class ComposerServiceImpl method process.
/**
* {@inheritDoc}
*
* @see org.opencastproject.job.api.AbstractJobProducer#process(org.opencastproject.job.api.Job)
*/
@Override
protected String process(Job job) throws ServiceRegistryException {
String operation = job.getOperation();
List<String> arguments = job.getArguments();
try {
Operation op = Operation.valueOf(operation);
Track firstTrack;
Track secondTrack;
String encodingProfile = arguments.get(0);
final String serialized;
switch(op) {
case Encode:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
serialized = encode(job, Collections.map(tuple("video", firstTrack)), encodingProfile).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case ParallelEncode:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
serialized = MediaPackageElementParser.getArrayAsXml(parallelEncode(job, firstTrack, encodingProfile));
break;
case Image:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
List<Attachment> resultingElements;
if (Boolean.parseBoolean(arguments.get(2))) {
double[] times = new double[arguments.size() - 3];
for (int i = 3; i < arguments.size(); i++) {
times[i - 3] = Double.parseDouble(arguments.get(i));
}
resultingElements = image(job, firstTrack, encodingProfile, times);
} else {
Map<String, String> properties = parseProperties(arguments.get(3));
resultingElements = image(job, firstTrack, encodingProfile, properties);
}
serialized = MediaPackageElementParser.getArrayAsXml(resultingElements);
break;
case ImageConversion:
Attachment sourceImage = (Attachment) MediaPackageElementParser.getFromXml(arguments.get(1));
serialized = convertImage(job, sourceImage, encodingProfile).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Mux:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
secondTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(2));
serialized = mux(job, firstTrack, secondTrack, encodingProfile).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Trim:
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(1));
long start = Long.parseLong(arguments.get(2));
long duration = Long.parseLong(arguments.get(3));
serialized = trim(job, firstTrack, encodingProfile, start, duration).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Composite:
Attachment watermarkAttachment;
firstTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(LOWER_TRACK_INDEX));
Layout lowerLayout = Serializer.layout(JsonObj.jsonObj(arguments.get(LOWER_TRACK_LAYOUT_INDEX)));
LaidOutElement<Track> lowerLaidOutElement = new LaidOutElement<>(firstTrack, lowerLayout);
Option<LaidOutElement<Track>> upperLaidOutElement = Option.none();
if (NOT_AVAILABLE.equals(arguments.get(UPPER_TRACK_INDEX)) && NOT_AVAILABLE.equals(arguments.get(UPPER_TRACK_LAYOUT_INDEX))) {
logger.trace("This composite action does not use a second track.");
} else {
secondTrack = (Track) MediaPackageElementParser.getFromXml(arguments.get(UPPER_TRACK_INDEX));
Layout upperLayout = Serializer.layout(JsonObj.jsonObj(arguments.get(UPPER_TRACK_LAYOUT_INDEX)));
upperLaidOutElement = Option.option(new LaidOutElement<Track>(secondTrack, upperLayout));
}
Dimension compositeTrackSize = Serializer.dimension(JsonObj.jsonObj(arguments.get(COMPOSITE_TRACK_SIZE_INDEX)));
String backgroundColor = arguments.get(BACKGROUND_COLOR_INDEX);
Option<LaidOutElement<Attachment>> watermarkOption = Option.none();
if (arguments.size() == 9) {
watermarkAttachment = (Attachment) MediaPackageElementParser.getFromXml(arguments.get(WATERMARK_INDEX));
Layout watermarkLayout = Serializer.layout(JsonObj.jsonObj(arguments.get(WATERMARK_LAYOUT_INDEX)));
watermarkOption = Option.some(new LaidOutElement<>(watermarkAttachment, watermarkLayout));
}
serialized = composite(job, compositeTrackSize, lowerLaidOutElement, upperLaidOutElement, watermarkOption, encodingProfile, backgroundColor).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case Concat:
String dimensionString = arguments.get(1);
String frameRateString = arguments.get(2);
Dimension outputDimension = null;
if (StringUtils.isNotBlank(dimensionString))
outputDimension = Serializer.dimension(JsonObj.jsonObj(dimensionString));
float outputFrameRate = NumberUtils.toFloat(frameRateString, -1.0f);
List<Track> tracks = new ArrayList<>();
for (int i = 3; i < arguments.size(); i++) {
tracks.add(i - 3, (Track) MediaPackageElementParser.getFromXml(arguments.get(i)));
}
serialized = concat(job, tracks, encodingProfile, outputDimension, outputFrameRate).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
case ImageToVideo:
Attachment image = (Attachment) MediaPackageElementParser.getFromXml(arguments.get(1));
double time = Double.parseDouble(arguments.get(2));
serialized = imageToVideo(job, image, encodingProfile, time).map(MediaPackageElementParser.getAsXml()).getOrElse("");
break;
default:
throw new IllegalStateException("Don't know how to handle operation '" + operation + "'");
}
return serialized;
} catch (IllegalArgumentException e) {
throw new ServiceRegistryException(String.format("Cannot handle operations of type '%s'", operation), e);
} catch (IndexOutOfBoundsException e) {
throw new ServiceRegistryException(String.format("Invalid arguments for operation '%s'", operation), e);
} catch (Exception e) {
throw new ServiceRegistryException(String.format("Error handling operation '%s'", operation), e);
}
}
use of org.opencastproject.mediapackage.Track in project opencast by opencast.
the class ComposerRestService method trim.
/**
* Trims a track to a new length.
*
* @param sourceTrackAsXml
* The source track
* @param profileId
* the encoding profile to use for trimming
* @param start
* the new trimming start time
* @param duration
* the new video duration
* @return A response containing the job for this encoding job in the response body.
* @throws Exception
*/
@POST
@Path("trim")
@Produces(MediaType.TEXT_XML)
@RestQuery(name = "trim", description = "Starts a trimming process, based on the specified track, start time and duration in ms", restParameters = { @RestParameter(description = "The track containing the stream", isRequired = true, name = "sourceTrack", type = Type.TEXT, defaultValue = "${this.videoTrackDefault}"), @RestParameter(description = "The encoding profile to use for trimming", isRequired = true, name = "profileId", type = Type.STRING, defaultValue = "trim.work"), @RestParameter(description = "The start time in milisecond", isRequired = true, name = "start", type = Type.STRING, defaultValue = "0"), @RestParameter(description = "The duration in milisecond", isRequired = true, name = "duration", type = Type.STRING, defaultValue = "10000") }, reponses = { @RestResponse(description = "Results in an xml document containing the job for the trimming task", responseCode = HttpServletResponse.SC_OK), @RestResponse(description = "If the start time is negative or exceeds the track duration", responseCode = HttpServletResponse.SC_BAD_REQUEST), @RestResponse(description = "If the duration is negative or, including the new start time, exceeds the track duration", responseCode = HttpServletResponse.SC_BAD_REQUEST) }, returnDescription = "")
public Response trim(@FormParam("sourceTrack") String sourceTrackAsXml, @FormParam("profileId") String profileId, @FormParam("start") long start, @FormParam("duration") long duration) throws Exception {
// Ensure that the POST parameters are present
if (StringUtils.isBlank(sourceTrackAsXml) || StringUtils.isBlank(profileId))
return Response.status(Response.Status.BAD_REQUEST).entity("sourceTrack and profileId must not be null").build();
// Deserialize the track
MediaPackageElement sourceElement = MediaPackageElementParser.getFromXml(sourceTrackAsXml);
if (!Track.TYPE.equals(sourceElement.getElementType()))
return Response.status(Response.Status.BAD_REQUEST).entity("sourceTrack element must be of type track").build();
// Make sure the trim times make sense
Track sourceTrack = (Track) sourceElement;
if (sourceTrack.getDuration() == null)
return Response.status(Response.Status.BAD_REQUEST).entity("sourceTrack element does not have a duration").build();
if (start < 0) {
start = 0;
} else if (duration <= 0) {
duration = (sourceTrack.getDuration() - start);
} else if (start + duration > sourceTrack.getDuration()) {
duration = (sourceTrack.getDuration() - start);
}
try {
// Asynchronously encode the specified tracks
Job job = composerService.trim(sourceTrack, profileId, start, duration);
return Response.ok().entity(new JaxbJob(job)).build();
} catch (EncoderException e) {
logger.warn("Unable to trim the track: " + e.getMessage());
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
Aggregations