use of org.opencastproject.metadata.mpeg7.Segment in project opencast by opencast.
the class VideoSegmenterTest method testAnalyze.
@Test
public void testAnalyze() throws Exception {
Job receipt = vsegmenter.segment(track);
JobBarrier jobBarrier = new JobBarrier(null, serviceRegistry, 1000, receipt);
jobBarrier.waitForJobs();
Catalog catalog = (Catalog) MediaPackageElementParser.getFromXml(receipt.getPayload());
Mpeg7Catalog mpeg7 = new Mpeg7CatalogImpl(catalog.getURI().toURL().openStream());
// Is there multimedia content in the mpeg7?
assertTrue("Audiovisual content was expected", mpeg7.hasVideoContent());
assertNotNull("Audiovisual content expected", mpeg7.multimediaContent().next().elements().hasNext());
MultimediaContentType contentType = mpeg7.multimediaContent().next().elements().next();
// Is there at least one segment?
TemporalDecomposition<? extends Segment> segments = contentType.getTemporalDecomposition();
Iterator<? extends Segment> si = segments.segments();
assertTrue(si.hasNext());
Segment firstSegment = si.next();
MediaTime firstSegmentMediaTime = firstSegment.getMediaTime();
long startTime = firstSegmentMediaTime.getMediaTimePoint().getTimeInMilliseconds();
long duration = firstSegmentMediaTime.getMediaDuration().getDurationInMilliseconds();
assertEquals("Unexpected start time of first segment", 0, startTime);
assertEquals("Unexpected duration of first segment", firstSegmentDuration, duration);
// What about the second one?
assertTrue("Video is expected to have more than one segment", si.hasNext());
Segment secondSegment = si.next();
MediaTime secondSegmentMediaTime = secondSegment.getMediaTime();
startTime = secondSegmentMediaTime.getMediaTimePoint().getTimeInMilliseconds();
duration = secondSegmentMediaTime.getMediaDuration().getDurationInMilliseconds();
assertEquals("Unexpected start time of second segment", firstSegmentDuration, startTime);
assertEquals("Unexpected duration of second segment", secondSegmentDuration, duration);
// There should be no third segment
assertFalse("Found an unexpected third video segment", si.hasNext());
}
use of org.opencastproject.metadata.mpeg7.Segment in project opencast by opencast.
the class VideoSegmenterTest method testAnalyzeSegmentMerging.
@Test
public void testAnalyzeSegmentMerging() {
Mpeg7CatalogService mpeg7catalogService = vsegmenter.mpeg7CatalogService;
MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
Mpeg7Catalog mpeg7 = mpeg7catalogService.newInstance();
Video videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);
LinkedList<Segment> segments;
LinkedList<Segment> result;
int segmentcount = 1;
track.setDuration(47000L);
// list of segment durations (starttimes can be calculated from those)
int[] segmentArray1 = { 3000, 2000, 8000, 3000, 1000, 6000, 3000, 2000, 4000, 11000, 2000, 2000 };
int[] segmentArray2 = { 1000, 2000, 8000, 3000, 1000, 6000, 3000, 2000, 4000, 11000, 2000, 4000 };
int[] segmentArray3 = { 1000, 2000, 4000, 3000, 1000, 2000, 3000, 2000, 4000, 1000, 2000, 4000 };
int[] segmentArray4 = { 6000, 7000, 13000, 9000, 8000, 11000, 5000, 16000 };
// predicted outcome of filtering the segmentation
int[] prediction1 = { 5000, 10000, 8000, 9000, 15000 };
int[] prediction2 = { 13000, 8000, 9000, 11000, 6000 };
int[] prediction3 = { 29000 };
int[] prediction4 = { 6000, 7000, 13000, 9000, 8000, 11000, 5000, 16000 };
// total duration of respective segment arrays
long duration1 = 47000L;
long duration2 = 47000L;
long duration3 = 29000L;
long duration4 = 75000L;
int[][] segmentArray = { segmentArray1, segmentArray2, segmentArray3, segmentArray4 };
int[][] prediction = { prediction1, prediction2, prediction3, prediction4 };
long[] durations = { duration1, duration2, duration3, duration4 };
// check for all test segmentations if "filterSegmentation" yields the expected result
for (int k = 0; k < segmentArray.length; k++) {
segments = new LinkedList<Segment>();
result = new LinkedList<Segment>();
track.setDuration(durations[k]);
int previous = 0;
for (int i = 0; i < segmentArray[k].length; i++) {
Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
s.setMediaTime(new MediaRelTimeImpl(previous, segmentArray[k][i]));
segments.add(s);
previous += segmentArray[k][i];
}
vsegmenter.filterSegmentation(segments, track, result, 5000);
assertEquals("segment merging yields wrong number of segments", prediction[k].length, result.size());
previous = 0;
for (int i = 0; i < prediction[k].length; i++) {
String message = "segment " + i + " in set " + k + " has the wrong start time.";
String message1 = "segment " + i + " in set " + k + " has the wrong duration.";
assertEquals(message, previous, result.get(i).getMediaTime().getMediaTimePoint().getTimeInMilliseconds());
assertEquals(message1, prediction[k][i], result.get(i).getMediaTime().getMediaDuration().getDurationInMilliseconds());
previous += prediction[k][i];
}
}
}
use of org.opencastproject.metadata.mpeg7.Segment in project opencast by opencast.
the class TextAnalyzerServiceImpl method extract.
/**
* Starts text extraction on the image and returns a receipt containing the final result in the form of an
* Mpeg7Catalog.
*
* @param image
* the element to analyze
* @param block
* <code>true</code> to make this operation synchronous
* @return a receipt containing the resulting mpeg-7 catalog
* @throws TextAnalyzerException
*/
private Catalog extract(Job job, Attachment image) throws TextAnalyzerException, MediaPackageException {
final Attachment attachment = image;
final URI imageUrl = attachment.getURI();
File imageFile = null;
try {
Mpeg7CatalogImpl mpeg7 = Mpeg7CatalogImpl.newInstance();
logger.info("Starting text extraction from {}", imageUrl);
try {
imageFile = workspace.get(imageUrl);
} catch (NotFoundException e) {
throw new TextAnalyzerException("Image " + imageUrl + " not found in workspace", e);
} catch (IOException e) {
throw new TextAnalyzerException("Unable to access " + imageUrl + " in workspace", e);
}
VideoText[] videoTexts = analyze(imageFile, image.getIdentifier());
// Create a temporal decomposition
MediaTime mediaTime = new MediaTimeImpl(0, 0);
Video avContent = mpeg7.addVideoContent(image.getIdentifier(), mediaTime, null);
TemporalDecomposition<VideoSegment> temporalDecomposition = (TemporalDecomposition<VideoSegment>) avContent.getTemporalDecomposition();
// Add a segment
VideoSegment videoSegment = temporalDecomposition.createSegment("segment-0");
videoSegment.setMediaTime(mediaTime);
// Add the video text to the spacio temporal decomposition of the segment
SpatioTemporalDecomposition spatioTemporalDecomposition = videoSegment.createSpatioTemporalDecomposition(true, false);
for (VideoText videoText : videoTexts) {
spatioTemporalDecomposition.addVideoText(videoText);
}
logger.info("Text extraction of {} finished, {} lines found", attachment.getURI(), videoTexts.length);
URI uri;
InputStream in;
try {
in = mpeg7CatalogService.serialize(mpeg7);
} catch (IOException e) {
throw new TextAnalyzerException("Error serializing mpeg7", e);
}
try {
uri = workspace.putInCollection(COLLECTION_ID, job.getId() + ".xml", in);
} catch (IOException e) {
throw new TextAnalyzerException("Unable to put mpeg7 into the workspace", e);
}
Catalog catalog = (Catalog) MediaPackageElementBuilderFactory.newInstance().newElementBuilder().newElement(Catalog.TYPE, MediaPackageElements.TEXTS);
catalog.setURI(uri);
logger.debug("Created MPEG7 catalog for {}", imageUrl);
return catalog;
} catch (Exception e) {
logger.warn("Error extracting text from " + imageUrl, e);
if (e instanceof TextAnalyzerException) {
throw (TextAnalyzerException) e;
} else {
throw new TextAnalyzerException(e);
}
} finally {
try {
workspace.delete(imageUrl);
} catch (Exception e) {
logger.warn("Unable to delete temporary text analysis image {}: {}", imageUrl, e);
}
}
}
use of org.opencastproject.metadata.mpeg7.Segment in project opencast by opencast.
the class TextAnalysisWorkflowOperationHandler method start.
/**
* {@inheritDoc}
*
* @see org.opencastproject.workflow.api.WorkflowOperationHandler#start(org.opencastproject.workflow.api.WorkflowInstance,
* JobContext)
*/
@Override
public WorkflowOperationResult start(WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
logger.debug("Running segments preview workflow operation on {}", workflowInstance);
// Check if there is an mpeg-7 catalog containing video segments
MediaPackage src = (MediaPackage) workflowInstance.getMediaPackage().clone();
Catalog[] segmentCatalogs = src.getCatalogs(MediaPackageElements.SEGMENTS);
if (segmentCatalogs.length == 0) {
logger.info("Media package {} does not contain segment information", src);
return createResult(Action.CONTINUE);
}
try {
return extractVideoText(src, workflowInstance.getCurrentOperation());
} catch (Exception e) {
throw new WorkflowOperationException(e);
}
}
use of org.opencastproject.metadata.mpeg7.Segment in project opencast by opencast.
the class Mpeg7CaptionConverter method exportCaption.
@Override
public void exportCaption(OutputStream outputStream, List<Caption> captions, String language) throws IOException {
Mpeg7Catalog mpeg7 = Mpeg7CatalogImpl.newInstance();
MediaTime mediaTime = new MediaTimeImpl(0, 0);
Audio audioContent = mpeg7.addAudioContent("captions", mediaTime, null);
@SuppressWarnings("unchecked") TemporalDecomposition<AudioSegment> captionDecomposition = (TemporalDecomposition<AudioSegment>) audioContent.getTemporalDecomposition();
int segmentCount = 0;
for (Caption caption : captions) {
// Get all the words/parts for the transcript
String[] words = caption.getCaption();
if (words.length == 0)
continue;
// Create a new segment
AudioSegment segment = captionDecomposition.createSegment("segment-" + segmentCount++);
Time captionST = caption.getStartTime();
Time captionET = caption.getStopTime();
// Calculate start time
Calendar startTime = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
startTime.setTimeInMillis(0);
startTime.add(Calendar.HOUR_OF_DAY, captionST.getHours());
startTime.add(Calendar.MINUTE, captionST.getMinutes());
startTime.add(Calendar.SECOND, captionST.getSeconds());
startTime.add(Calendar.MILLISECOND, captionST.getMilliseconds());
// Calculate end time
Calendar endTime = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
endTime.setTimeInMillis(0);
endTime.add(Calendar.HOUR_OF_DAY, captionET.getHours());
endTime.add(Calendar.MINUTE, captionET.getMinutes());
endTime.add(Calendar.SECOND, captionET.getSeconds());
endTime.add(Calendar.MILLISECOND, captionET.getMilliseconds());
long startTimeInMillis = startTime.getTimeInMillis();
long endTimeInMillis = endTime.getTimeInMillis();
long duration = endTimeInMillis - startTimeInMillis;
segment.setMediaTime(new MediaTimeImpl(startTimeInMillis, duration));
TextAnnotation textAnnotation = segment.createTextAnnotation(0, 0, language);
// Collect all the words in the segment
StringBuffer captionLine = new StringBuffer();
// Add each words/parts as segment to the catalog
for (String word : words) {
if (captionLine.length() > 0)
captionLine.append(' ');
captionLine.append(word);
}
// Append the text to the annotation
textAnnotation.addFreeTextAnnotation(new FreeTextAnnotationImpl(captionLine.toString()));
}
Transformer tf = null;
try {
tf = TransformerFactory.newInstance().newTransformer();
DOMSource xmlSource = new DOMSource(mpeg7.toXml());
tf.transform(xmlSource, new StreamResult(outputStream));
} catch (TransformerConfigurationException e) {
logger.warn("Error serializing mpeg7 captions catalog: {}", e.getMessage());
throw new IOException(e);
} catch (TransformerFactoryConfigurationError e) {
logger.warn("Error serializing mpeg7 captions catalog: {}", e.getMessage());
throw new IOException(e);
} catch (TransformerException e) {
logger.warn("Error serializing mpeg7 captions catalog: {}", e.getMessage());
throw new IOException(e);
} catch (ParserConfigurationException e) {
logger.warn("Error serializing mpeg7 captions catalog: {}", e.getMessage());
throw new IOException(e);
}
}
Aggregations