use of org.opencastproject.util.NotFoundException in project opencast by opencast.
the class ConcatWorkflowOperationHandler method concat.
private WorkflowOperationResult concat(MediaPackage src, WorkflowOperationInstance operation) throws EncoderException, IOException, NotFoundException, MediaPackageException, WorkflowOperationException {
MediaPackage mediaPackage = (MediaPackage) src.clone();
Map<Integer, Tuple<TrackSelector, Boolean>> trackSelectors = getTrackSelectors(operation);
String outputResolution = StringUtils.trimToNull(operation.getConfiguration(OUTPUT_RESOLUTION));
String outputFrameRate = StringUtils.trimToNull(operation.getConfiguration(OUTPUT_FRAMERATE));
String encodingProfile = StringUtils.trimToNull(operation.getConfiguration(ENCODING_PROFILE));
// Skip the worklow if no source-flavors or tags has been configured
if (trackSelectors.isEmpty()) {
logger.warn("No source-tags or source-flavors has been set.");
return createResult(mediaPackage, Action.SKIP);
}
String targetTagsOption = StringUtils.trimToNull(operation.getConfiguration(TARGET_TAGS));
String targetFlavorOption = StringUtils.trimToNull(operation.getConfiguration(TARGET_FLAVOR));
// Target tags
List<String> targetTags = asList(targetTagsOption);
// Target flavor
if (targetFlavorOption == null)
throw new WorkflowOperationException("Target flavor must be set!");
// Find the encoding profile
if (encodingProfile == null)
throw new WorkflowOperationException("Encoding profile must be set!");
EncodingProfile profile = composerService.getProfile(encodingProfile);
if (profile == null)
throw new WorkflowOperationException("Encoding profile '" + encodingProfile + "' was not found");
// Output resolution
if (outputResolution == null)
throw new WorkflowOperationException("Output resolution must be set!");
Dimension outputDimension = null;
if (outputResolution.startsWith(OUTPUT_PART_PREFIX)) {
if (!trackSelectors.keySet().contains(Integer.parseInt(outputResolution.substring(OUTPUT_PART_PREFIX.length()))))
throw new WorkflowOperationException("Output resolution part not set!");
} else {
try {
String[] outputResolutionArray = StringUtils.split(outputResolution, "x");
if (outputResolutionArray.length != 2) {
throw new WorkflowOperationException("Invalid format of output resolution!");
}
outputDimension = Dimension.dimension(Integer.parseInt(outputResolutionArray[0]), Integer.parseInt(outputResolutionArray[1]));
} catch (WorkflowOperationException e) {
throw e;
} catch (Exception e) {
throw new WorkflowOperationException("Unable to parse output resolution!", e);
}
}
float fps = -1.0f;
if (StringUtils.isNotEmpty(outputFrameRate)) {
if (StringUtils.startsWith(outputFrameRate, OUTPUT_PART_PREFIX)) {
if (!NumberUtils.isNumber(outputFrameRate.substring(OUTPUT_PART_PREFIX.length())) || !trackSelectors.keySet().contains(Integer.parseInt(outputFrameRate.substring(OUTPUT_PART_PREFIX.length())))) {
throw new WorkflowOperationException("Output frame rate part not set or invalid!");
}
} else if (NumberUtils.isNumber(outputFrameRate)) {
fps = NumberUtils.toFloat(outputFrameRate);
} else {
throw new WorkflowOperationException("Unable to parse output frame rate!");
}
}
MediaPackageElementFlavor targetFlavor = null;
try {
targetFlavor = MediaPackageElementFlavor.parseFlavor(targetFlavorOption);
if ("*".equals(targetFlavor.getType()) || "*".equals(targetFlavor.getSubtype()))
throw new WorkflowOperationException("Target flavor must have a type and a subtype, '*' are not allowed!");
} catch (IllegalArgumentException e) {
throw new WorkflowOperationException("Target flavor '" + targetFlavorOption + "' is malformed");
}
List<Track> tracks = new ArrayList<Track>();
for (Entry<Integer, Tuple<TrackSelector, Boolean>> trackSelector : trackSelectors.entrySet()) {
Collection<Track> tracksForSelector = trackSelector.getValue().getA().select(mediaPackage, false);
String currentFlavor = StringUtils.join(trackSelector.getValue().getA().getFlavors());
String currentTag = StringUtils.join(trackSelector.getValue().getA().getTags());
if (tracksForSelector.size() > 1) {
logger.warn("More than one track has been found with flavor '{}' and/or tag '{}' for concat operation, skipping concatenation!", currentFlavor, currentTag);
return createResult(mediaPackage, Action.SKIP);
} else if (tracksForSelector.size() == 0 && trackSelector.getValue().getB()) {
logger.warn("No track has been found with flavor '{}' and/or tag '{}' for concat operation, skipping concatenation!", currentFlavor, currentTag);
return createResult(mediaPackage, Action.SKIP);
} else if (tracksForSelector.size() == 0 && !trackSelector.getValue().getB()) {
logger.info("No track has been found with flavor '{}' and/or tag '{}' for concat operation, skipping track!", currentFlavor, currentTag);
continue;
}
for (Track t : tracksForSelector) {
tracks.add(t);
VideoStream[] videoStreams = TrackSupport.byType(t.getStreams(), VideoStream.class);
if (videoStreams.length == 0) {
logger.info("No video stream available in the track with flavor {}! {}", currentFlavor, t);
return createResult(mediaPackage, Action.SKIP);
}
if (StringUtils.startsWith(outputResolution, OUTPUT_PART_PREFIX) && NumberUtils.isNumber(outputResolution.substring(OUTPUT_PART_PREFIX.length())) && trackSelector.getKey() == Integer.parseInt(outputResolution.substring(OUTPUT_PART_PREFIX.length()))) {
outputDimension = new Dimension(videoStreams[0].getFrameWidth(), videoStreams[0].getFrameHeight());
if (!trackSelector.getValue().getB()) {
logger.warn("Output resolution track {} must be mandatory, skipping concatenation!", outputResolution);
return createResult(mediaPackage, Action.SKIP);
}
}
if (fps <= 0 && StringUtils.startsWith(outputFrameRate, OUTPUT_PART_PREFIX) && NumberUtils.isNumber(outputFrameRate.substring(OUTPUT_PART_PREFIX.length())) && trackSelector.getKey() == Integer.parseInt(outputFrameRate.substring(OUTPUT_PART_PREFIX.length()))) {
fps = videoStreams[0].getFrameRate();
}
}
}
if (tracks.size() == 0) {
logger.warn("No tracks found for concating operation, skipping concatenation!");
return createResult(mediaPackage, Action.SKIP);
} else if (tracks.size() == 1) {
Track track = (Track) tracks.get(0).clone();
track.setIdentifier(null);
addNewTrack(mediaPackage, track, targetTags, targetFlavor);
logger.info("At least two tracks are needed for the concating operation, skipping concatenation!");
return createResult(mediaPackage, Action.SKIP);
}
Job concatJob;
if (fps > 0) {
concatJob = composerService.concat(profile.getIdentifier(), outputDimension, fps, tracks.toArray(new Track[tracks.size()]));
} else {
concatJob = composerService.concat(profile.getIdentifier(), outputDimension, tracks.toArray(new Track[tracks.size()]));
}
// Wait for the jobs to return
if (!waitForStatus(concatJob).isSuccess())
throw new WorkflowOperationException("The concat job did not complete successfully");
if (concatJob.getPayload().length() > 0) {
Track concatTrack = (Track) MediaPackageElementParser.getFromXml(concatJob.getPayload());
concatTrack.setURI(workspace.moveTo(concatTrack.getURI(), mediaPackage.getIdentifier().toString(), concatTrack.getIdentifier(), "concat." + FilenameUtils.getExtension(concatTrack.getURI().toString())));
addNewTrack(mediaPackage, concatTrack, targetTags, targetFlavor);
WorkflowOperationResult result = createResult(mediaPackage, Action.CONTINUE, concatJob.getQueueTime());
logger.debug("Concat operation completed");
return result;
} else {
logger.info("concat operation unsuccessful, no payload returned: {}", concatJob);
return createResult(mediaPackage, Action.SKIP);
}
}
use of org.opencastproject.util.NotFoundException in project opencast by opencast.
the class EventsLoader method addDublinCoreCatalog.
private MediaPackage addDublinCoreCatalog(InputStream in, MediaPackageElementFlavor flavor, MediaPackage mediaPackage) throws IOException {
try {
String elementId = UUID.randomUUID().toString();
URI catalogUrl = workspace.put(mediaPackage.getIdentifier().compact(), elementId, "dublincore.xml", in);
logger.info("Adding catalog with flavor {} to mediapackage {}", flavor, mediaPackage);
MediaPackageElement mpe = mediaPackage.add(catalogUrl, MediaPackageElement.Type.Catalog, flavor);
mpe.setIdentifier(elementId);
mpe.setChecksum(Checksum.create(ChecksumType.DEFAULT_TYPE, workspace.get(catalogUrl)));
return mediaPackage;
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
use of org.opencastproject.util.NotFoundException in project opencast by opencast.
the class ComposerRestServiceTest method testProfiles.
@Test
public void testProfiles() throws Exception {
Response response = restService.getProfile(profileId);
Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
Assert.assertEquals(profile, response.getEntity());
try {
restService.getProfile("some other ID");
Assert.fail("This ID should cause the rest endpoint to throw");
} catch (NotFoundException e) {
// expected
}
EncodingProfileList list = restService.listProfiles();
Assert.assertEquals(profileList, list);
}
use of org.opencastproject.util.NotFoundException in project opencast by opencast.
the class VideoSegmenterServiceImpl method segment.
/**
* Starts segmentation on the video track identified by
* <code>mediapackageId</code> and <code>elementId</code> and returns a
* receipt containing the final result in the form of anMpeg7Catalog.
*
* @param track
* the element to analyze
* @return a receipt containing the resulting mpeg-7 catalog
* @throws VideoSegmenterException
*/
protected Catalog segment(Job job, Track track) throws VideoSegmenterException, MediaPackageException {
// implementation
if (!track.hasVideo()) {
logger.warn("Element {} is not a video track", track);
throw new VideoSegmenterException("Element is not a video track");
}
try {
Mpeg7Catalog mpeg7;
File mediaFile = null;
URL mediaUrl = null;
try {
mediaFile = workspace.get(track.getURI());
mediaUrl = mediaFile.toURI().toURL();
} catch (NotFoundException e) {
throw new VideoSegmenterException("Error finding the video file in the workspace", e);
} catch (IOException e) {
throw new VideoSegmenterException("Error reading the video file in the workspace", e);
}
if (track.getDuration() == null)
throw new MediaPackageException("Track " + track + " does not have a duration");
logger.info("Track {} loaded, duration is {} s", mediaUrl, track.getDuration() / 1000);
MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
Video videoContent;
logger.debug("changesThreshold: {}, stabilityThreshold: {}", changesThreshold, stabilityThreshold);
logger.debug("prefNumber: {}, maxCycles: {}", prefNumber, maxCycles);
boolean endOptimization = false;
int cycleCount = 0;
LinkedList<Segment> segments;
LinkedList<OptimizationStep> optimizationList = new LinkedList<OptimizationStep>();
LinkedList<OptimizationStep> unusedResultsList = new LinkedList<OptimizationStep>();
OptimizationStep stepBest = new OptimizationStep();
// local copy of changesThreshold, that can safely be changed over optimization iterations
float changesThresholdLocal = changesThreshold;
// local copies of prefNumber, absoluteMin and absoluteMax, to make a dependency on track length possible
int prefNumberLocal = prefNumber;
int absoluteMaxLocal = absoluteMax;
int absoluteMinLocal = absoluteMin;
// absoluteMax and absoluteMin with the duration of the track
if (durationDependent) {
double trackDurationInHours = track.getDuration() / 3600000.0;
prefNumberLocal = (int) Math.round(trackDurationInHours * prefNumberLocal);
absoluteMaxLocal = (int) Math.round(trackDurationInHours * absoluteMax);
absoluteMinLocal = (int) Math.round(trackDurationInHours * absoluteMin);
// make sure prefNumberLocal will never be 0 or negative
if (prefNumberLocal <= 0) {
prefNumberLocal = 1;
}
logger.info("Numbers of segments are set to be relative to track duration. Therefore for {} the preferred " + "number of segments is {}", mediaUrl, prefNumberLocal);
}
logger.info("Starting video segmentation of {}", mediaUrl);
// to the desired number of segments
while (!endOptimization) {
mpeg7 = mpeg7CatalogService.newInstance();
videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);
// run the segmentation with FFmpeg
segments = runSegmentationFFmpeg(track, videoContent, mediaFile, changesThresholdLocal);
// calculate errors for "normal" and filtered segmentation
// and compare them to find better optimization.
// "normal"
OptimizationStep currentStep = new OptimizationStep(stabilityThreshold, changesThresholdLocal, segments.size(), prefNumberLocal, mpeg7, segments);
// filtered
LinkedList<Segment> segmentsNew = new LinkedList<Segment>();
OptimizationStep currentStepFiltered = new OptimizationStep(stabilityThreshold, changesThresholdLocal, 0, prefNumberLocal, filterSegmentation(segments, track, segmentsNew, stabilityThreshold * 1000), segments);
currentStepFiltered.setSegmentNumAndRecalcErrors(segmentsNew.size());
logger.info("Segmentation yields {} segments after filtering", segmentsNew.size());
OptimizationStep currentStepBest;
// - and the filtered segmentation is not already better than the maximum error
if (currentStep.getErrorAbs() <= currentStepFiltered.getErrorAbs() || (segmentsNew.size() < prefNumberLocal && currentStep.getSegmentNum() > (track.getDuration() / 1000.0f) / (stabilityThreshold / 2) && !(currentStepFiltered.getErrorAbs() <= maxError))) {
optimizationList.add(currentStep);
Collections.sort(optimizationList);
currentStepBest = currentStep;
unusedResultsList.add(currentStepFiltered);
} else {
optimizationList.add(currentStepFiltered);
Collections.sort(optimizationList);
currentStepBest = currentStepFiltered;
}
cycleCount++;
logger.debug("errorAbs = {}, error = {}", currentStep.getErrorAbs(), currentStep.getError());
logger.debug("changesThreshold = {}", changesThresholdLocal);
logger.debug("cycleCount = {}", cycleCount);
// end optimization if maximum number of cycles is reached or if the segmentation is good enough
if (cycleCount >= maxCycles || currentStepBest.getErrorAbs() <= maxError) {
endOptimization = true;
if (optimizationList.size() > 0) {
if (optimizationList.getFirst().getErrorAbs() <= optimizationList.getLast().getErrorAbs() && optimizationList.getFirst().getError() >= 0) {
stepBest = optimizationList.getFirst();
} else {
stepBest = optimizationList.getLast();
}
}
// just to be sure, check if one of the unused results was better
for (OptimizationStep currentUnusedStep : unusedResultsList) {
if (currentUnusedStep.getErrorAbs() < stepBest.getErrorAbs()) {
stepBest = unusedResultsList.getFirst();
}
}
// continue optimization, calculate new changes threshold for next iteration of optimization
} else {
OptimizationStep first = optimizationList.getFirst();
OptimizationStep last = optimizationList.getLast();
// estimate a new changesThreshold based on the one yielding the smallest error
if (optimizationList.size() == 1 || first.getError() < 0 || last.getError() > 0) {
if (currentStepBest.getError() >= 0) {
// if the error is smaller or equal to 1, increase changes threshold weighted with the error
if (currentStepBest.getError() <= 1) {
changesThresholdLocal += changesThresholdLocal * currentStepBest.getError();
} else {
// to faster reach reasonable segment numbers
if (cycleCount <= 1 && currentStep.getSegmentNum() > 2000) {
changesThresholdLocal = 0.2f;
// if the error is bigger than one, double the changes threshold, because multiplying
// with a large error can yield a much too high changes threshold
} else {
changesThresholdLocal *= 2;
}
}
} else {
changesThresholdLocal /= 2;
}
logger.debug("onesided optimization yields new changesThreshold = {}", changesThresholdLocal);
// if there are already iterations with positive and negative errors, choose a changesThreshold between those
} else {
// for simplicity a linear relationship between the changesThreshold
// and the number of generated segments is assumed and based on that
// the expected correct changesThreshold is calculated
// the new changesThreshold is calculated by averaging the the mean and the mean weighted with errors
// because this seemed to yield better results in several cases
float x = (first.getSegmentNum() - prefNumberLocal) / (float) (first.getSegmentNum() - last.getSegmentNum());
float newX = ((x + 0.5f) * 0.5f);
changesThresholdLocal = first.getChangesThreshold() * (1 - newX) + last.getChangesThreshold() * newX;
logger.debug("doublesided optimization yields new changesThreshold = {}", changesThresholdLocal);
}
}
}
// after optimization of the changes threshold, the minimum duration for a segment
// (stability threshold) is optimized if the result is still not good enough
int threshLow = stabilityThreshold * 1000;
int threshHigh = threshLow + (threshLow / 2);
LinkedList<Segment> tmpSegments;
float smallestError = Float.MAX_VALUE;
int bestI = threshLow;
segments = stepBest.getSegments();
// is smaller than the maximum error, the stability threshold will not be optimized
if (stepBest.getError() <= maxError) {
threshHigh = stabilityThreshold * 1000;
}
for (int i = threshLow; i <= threshHigh; i = i + 1000) {
tmpSegments = new LinkedList<Segment>();
filterSegmentation(segments, track, tmpSegments, i);
float newError = OptimizationStep.calculateErrorAbs(tmpSegments.size(), prefNumberLocal);
if (newError < smallestError) {
smallestError = newError;
bestI = i;
}
}
tmpSegments = new LinkedList<Segment>();
mpeg7 = filterSegmentation(segments, track, tmpSegments, bestI);
// for debugging: output of final segmentation after optimization
logger.debug("result segments:");
for (int i = 0; i < tmpSegments.size(); i++) {
int[] tmpLog2 = new int[7];
tmpLog2[0] = tmpSegments.get(i).getMediaTime().getMediaTimePoint().getHour();
tmpLog2[1] = tmpSegments.get(i).getMediaTime().getMediaTimePoint().getMinutes();
tmpLog2[2] = tmpSegments.get(i).getMediaTime().getMediaTimePoint().getSeconds();
tmpLog2[3] = tmpSegments.get(i).getMediaTime().getMediaDuration().getHours();
tmpLog2[4] = tmpSegments.get(i).getMediaTime().getMediaDuration().getMinutes();
tmpLog2[5] = tmpSegments.get(i).getMediaTime().getMediaDuration().getSeconds();
Object[] tmpLog1 = { tmpLog2[0], tmpLog2[1], tmpLog2[2], tmpLog2[3], tmpLog2[4], tmpLog2[5], tmpLog2[6] };
tmpLog1[6] = tmpSegments.get(i).getIdentifier();
logger.debug("s:{}:{}:{}, d:{}:{}:{}, {}", tmpLog1);
}
logger.info("Optimized Segmentation yields (after {} iteration" + (cycleCount == 1 ? "" : "s") + ") {} segments", cycleCount, tmpSegments.size());
// if no reasonable segmentation could be found, instead return a uniform segmentation
if (tmpSegments.size() < absoluteMinLocal || tmpSegments.size() > absoluteMaxLocal) {
mpeg7 = uniformSegmentation(track, tmpSegments, prefNumberLocal);
logger.info("Since no reasonable segmentation could be found, a uniform segmentation was created");
}
Catalog mpeg7Catalog = (Catalog) MediaPackageElementBuilderFactory.newInstance().newElementBuilder().newElement(Catalog.TYPE, MediaPackageElements.SEGMENTS);
URI uri;
try {
uri = workspace.putInCollection(COLLECTION_ID, job.getId() + ".xml", mpeg7CatalogService.serialize(mpeg7));
} catch (IOException e) {
throw new VideoSegmenterException("Unable to put the mpeg7 catalog into the workspace", e);
}
mpeg7Catalog.setURI(uri);
logger.info("Finished video segmentation of {}", mediaUrl);
return mpeg7Catalog;
} catch (Exception e) {
logger.warn("Error segmenting " + track, e);
if (e instanceof VideoSegmenterException) {
throw (VideoSegmenterException) e;
} else {
throw new VideoSegmenterException(e);
}
}
}
use of org.opencastproject.util.NotFoundException in project opencast by opencast.
the class WaveformServiceImpl method extractWaveform.
/**
* Create and run waveform extraction ffmpeg command.
*
* @param track source audio/video track with at least one audio channel
* @return waveform image attachment
* @throws WaveformServiceException if processing fails
*/
private Attachment extractWaveform(Track track) throws WaveformServiceException {
if (!track.hasAudio()) {
throw new WaveformServiceException("Track has no audio");
}
// copy source file into workspace
File mediaFile;
try {
mediaFile = workspace.get(track.getURI());
} catch (NotFoundException e) {
throw new WaveformServiceException("Error finding the media file in the workspace", e);
} catch (IOException e) {
throw new WaveformServiceException("Error reading the media file in the workspace", e);
}
String waveformFilePath = FilenameUtils.removeExtension(mediaFile.getAbsolutePath()).concat('-' + track.getIdentifier()).concat("-waveform.png");
// create ffmpeg command
String[] command = new String[] { binary, "-nostats", "-i", mediaFile.getAbsolutePath(), "-lavfi", createWaveformFilter(track), "-an", "-vn", "-sn", "-y", waveformFilePath };
logger.debug("Start waveform ffmpeg process: {}", StringUtils.join(command, " "));
logger.info("Create waveform image file for track '{}' at {}", track.getIdentifier(), waveformFilePath);
// run ffmpeg
ProcessBuilder pb = new ProcessBuilder(command);
pb.redirectErrorStream(true);
Process ffmpegProcess = null;
int exitCode = 1;
BufferedReader errStream = null;
try {
ffmpegProcess = pb.start();
errStream = new BufferedReader(new InputStreamReader(ffmpegProcess.getInputStream()));
String line = errStream.readLine();
while (line != null) {
logger.debug(line);
line = errStream.readLine();
}
exitCode = ffmpegProcess.waitFor();
} catch (IOException ex) {
throw new WaveformServiceException("Start ffmpeg process failed", ex);
} catch (InterruptedException ex) {
throw new WaveformServiceException("Waiting for encoder process exited was interrupted unexpectly", ex);
} finally {
IoSupport.closeQuietly(ffmpegProcess);
IoSupport.closeQuietly(errStream);
if (exitCode != 0) {
try {
FileUtils.forceDelete(new File(waveformFilePath));
} catch (IOException e) {
// it is ok, no output file was generated by ffmpeg
}
}
}
if (exitCode != 0)
throw new WaveformServiceException("The encoder process exited abnormally with exit code " + exitCode);
// put waveform image into workspace
FileInputStream waveformFileInputStream = null;
URI waveformFileUri;
try {
waveformFileInputStream = new FileInputStream(waveformFilePath);
waveformFileUri = workspace.putInCollection(COLLECTION_ID, FilenameUtils.getName(waveformFilePath), waveformFileInputStream);
logger.info("Copied the created waveform to the workspace {}", waveformFileUri);
} catch (FileNotFoundException ex) {
throw new WaveformServiceException(String.format("Waveform image file '%s' not found", waveformFilePath), ex);
} catch (IOException ex) {
throw new WaveformServiceException(String.format("Can't write waveform image file '%s' to workspace", waveformFilePath), ex);
} catch (IllegalArgumentException ex) {
throw new WaveformServiceException(ex);
} finally {
IoSupport.closeQuietly(waveformFileInputStream);
logger.info("Deleted local waveform image file at {}", waveformFilePath);
FileUtils.deleteQuietly(new File(waveformFilePath));
}
// create media package element
MediaPackageElementBuilder mpElementBuilder = MediaPackageElementBuilderFactory.newInstance().newElementBuilder();
// it is up to the workflow operation handler to set the attachment flavor
Attachment waveformMpe = (Attachment) mpElementBuilder.elementFromURI(waveformFileUri, Type.Attachment, track.getFlavor());
waveformMpe.setIdentifier(IdBuilderFactory.newInstance().newIdBuilder().createNew().compact());
return waveformMpe;
}
Aggregations