Search in sources :

Example 11 with Segment

use of org.dishevelled.bio.assembly.gfa2.Segment in project opencast by opencast.

the class VideoSegmenterServiceImpl method runSegmentationFFmpeg.

/**
 * Does the actual segmentation with an FFmpeg call, adds the segments to the given videoContent of a catalog and
 * returns a list with the resulting segments
 *
 * @param track the element to analyze
 * @param videoContent the videoContent of the Mpeg7Catalog that the segments should be added to
 * @param mediaFile the file of the track to analyze
 * @param changesThreshold the changesThreshold that is used as option for the FFmpeg call
 * @return a list of the resulting segments
 * @throws IOException
 * @throws VideoSegmenterException
 */
private LinkedList<Segment> runSegmentationFFmpeg(Track track, Video videoContent, File mediaFile, float changesThreshold) throws IOException, VideoSegmenterException {
    String[] command = new String[] { binary, "-nostats", "-nostdin", "-i", mediaFile.getAbsolutePath(), "-filter:v", "select=gt(scene\\," + changesThreshold + "),showinfo", "-f", "null", "-" };
    logger.info("Detecting video segments using command: {}", (Object) command);
    ProcessBuilder pbuilder = new ProcessBuilder(command);
    List<String> segmentsStrings = new LinkedList<>();
    Process process = pbuilder.start();
    try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
        String line = reader.readLine();
        while (null != line) {
            if (line.startsWith("[Parsed_showinfo")) {
                segmentsStrings.add(line);
            }
            line = reader.readLine();
        }
    } catch (IOException e) {
        logger.error("Error executing ffmpeg: {}", e.getMessage());
    }
    // [Parsed_showinfo_1 @ 0x157fb40] n:0 pts:12 pts_time:12 pos:227495
    // fmt:rgb24 sar:0/1 s:320x240 i:P iskey:1 type:I checksum:8DF39EA9
    // plane_checksum:[8DF39EA9]
    int segmentcount = 1;
    LinkedList<Segment> segments = new LinkedList<>();
    if (segmentsStrings.size() == 0) {
        Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount);
        s.setMediaTime(new MediaRelTimeImpl(0, track.getDuration()));
        segments.add(s);
    } else {
        long starttime = 0;
        long endtime = 0;
        Pattern pattern = Pattern.compile("pts_time\\:\\d+(\\.\\d+)?");
        for (String seginfo : segmentsStrings) {
            Matcher matcher = pattern.matcher(seginfo);
            String time = "";
            while (matcher.find()) {
                time = matcher.group().substring(9);
            }
            if ("".equals(time)) {
                // filter is used for multiple purposes.
                continue;
            }
            try {
                endtime = Math.round(Float.parseFloat(time) * 1000);
            } catch (NumberFormatException e) {
                logger.error("Unable to parse FFmpeg output, likely FFmpeg version mismatch!", e);
                throw new VideoSegmenterException(e);
            }
            long segmentLength = endtime - starttime;
            if (1000 * stabilityThresholdPrefilter < segmentLength) {
                Segment segment = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount);
                segment.setMediaTime(new MediaRelTimeImpl(starttime, endtime - starttime));
                logger.debug("Created segment {} at start time {} with duration {}", segmentcount, starttime, endtime);
                segments.add(segment);
                segmentcount++;
                starttime = endtime;
            }
        }
        // Add last segment
        Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount);
        s.setMediaTime(new MediaRelTimeImpl(starttime, track.getDuration() - starttime));
        logger.debug("Created segment {} at start time {} with duration {}", segmentcount, starttime, track.getDuration() - endtime);
        segments.add(s);
    }
    logger.info("Segmentation of {} yields {} segments", mediaFile.toURI().toURL(), segments.size());
    return segments;
}
Also used : Pattern(java.util.regex.Pattern) InputStreamReader(java.io.InputStreamReader) Matcher(java.util.regex.Matcher) IOException(java.io.IOException) VideoSegmenterException(org.opencastproject.videosegmenter.api.VideoSegmenterException) LinkedList(java.util.LinkedList) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) Segment(org.opencastproject.metadata.mpeg7.Segment) BufferedReader(java.io.BufferedReader) MediaRelTimeImpl(org.opencastproject.metadata.mpeg7.MediaRelTimeImpl)

Example 12 with Segment

use of org.dishevelled.bio.assembly.gfa2.Segment in project opencast by opencast.

the class VideoSegmenterTest method testAnalyze.

@Test
public void testAnalyze() throws Exception {
    Job receipt = vsegmenter.segment(track);
    JobBarrier jobBarrier = new JobBarrier(null, serviceRegistry, 1000, receipt);
    jobBarrier.waitForJobs();
    Catalog catalog = (Catalog) MediaPackageElementParser.getFromXml(receipt.getPayload());
    Mpeg7Catalog mpeg7 = new Mpeg7CatalogImpl(catalog.getURI().toURL().openStream());
    // Is there multimedia content in the mpeg7?
    assertTrue("Audiovisual content was expected", mpeg7.hasVideoContent());
    MultimediaContentType contentType = mpeg7.multimediaContent().next().elements().next();
    // Is there at least one segment?
    TemporalDecomposition<? extends Segment> segments = contentType.getTemporalDecomposition();
    Iterator<? extends Segment> si = segments.segments();
    assertTrue(si.hasNext());
    Segment firstSegment = si.next();
    MediaTime firstSegmentMediaTime = firstSegment.getMediaTime();
    long startTime = firstSegmentMediaTime.getMediaTimePoint().getTimeInMilliseconds();
    long duration = firstSegmentMediaTime.getMediaDuration().getDurationInMilliseconds();
    assertEquals("Unexpected start time of first segment", 0, startTime);
    assertEquals("Unexpected duration of first segment", firstSegmentDuration, duration);
    // What about the second one?
    assertTrue("Video is expected to have more than one segment", si.hasNext());
    Segment secondSegment = si.next();
    MediaTime secondSegmentMediaTime = secondSegment.getMediaTime();
    startTime = secondSegmentMediaTime.getMediaTimePoint().getTimeInMilliseconds();
    duration = secondSegmentMediaTime.getMediaDuration().getDurationInMilliseconds();
    assertEquals("Unexpected start time of second segment", firstSegmentDuration, startTime);
    assertEquals("Unexpected duration of second segment", secondSegmentDuration, duration);
    // There should be no third segment
    assertFalse("Found an unexpected third video segment", si.hasNext());
}
Also used : Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) MediaTime(org.opencastproject.metadata.mpeg7.MediaTime) Mpeg7CatalogImpl(org.opencastproject.metadata.mpeg7.Mpeg7CatalogImpl) Job(org.opencastproject.job.api.Job) JobBarrier(org.opencastproject.job.api.JobBarrier) MultimediaContentType(org.opencastproject.metadata.mpeg7.MultimediaContentType) Catalog(org.opencastproject.mediapackage.Catalog) Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) Segment(org.opencastproject.metadata.mpeg7.Segment) Test(org.junit.Test)

Example 13 with Segment

use of org.dishevelled.bio.assembly.gfa2.Segment in project opencast by opencast.

the class VideoSegmenterTest method testAnalyzeSegmentMerging.

@Test
public void testAnalyzeSegmentMerging() {
    Mpeg7CatalogService mpeg7catalogService = vsegmenter.mpeg7CatalogService;
    MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
    MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
    Mpeg7Catalog mpeg7 = mpeg7catalogService.newInstance();
    Video videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);
    LinkedList<Segment> segments;
    LinkedList<Segment> result;
    int segmentcount = 1;
    track.setDuration(47000L);
    // list of segment durations (starttimes can be calculated from those)
    int[] segmentArray1 = { 3000, 2000, 8000, 3000, 1000, 6000, 3000, 2000, 4000, 11000, 2000, 2000 };
    int[] segmentArray2 = { 1000, 2000, 8000, 3000, 1000, 6000, 3000, 2000, 4000, 11000, 2000, 4000 };
    int[] segmentArray3 = { 1000, 2000, 4000, 3000, 1000, 2000, 3000, 2000, 4000, 1000, 2000, 4000 };
    int[] segmentArray4 = { 6000, 7000, 13000, 9000, 8000, 11000, 5000, 16000 };
    // predicted outcome of filtering the segmentation
    int[] prediction1 = { 5000, 10000, 8000, 9000, 15000 };
    int[] prediction2 = { 13000, 8000, 9000, 11000, 6000 };
    int[] prediction3 = { 29000 };
    int[] prediction4 = { 6000, 7000, 13000, 9000, 8000, 11000, 5000, 16000 };
    // total duration of respective segment arrays
    long duration1 = 47000L;
    long duration2 = 47000L;
    long duration3 = 29000L;
    long duration4 = 75000L;
    int[][] segmentArray = { segmentArray1, segmentArray2, segmentArray3, segmentArray4 };
    int[][] prediction = { prediction1, prediction2, prediction3, prediction4 };
    long[] durations = { duration1, duration2, duration3, duration4 };
    // check for all test segmentations if "filterSegmentation" yields the expected result
    for (int k = 0; k < segmentArray.length; k++) {
        segments = new LinkedList<Segment>();
        result = new LinkedList<Segment>();
        track.setDuration(durations[k]);
        int previous = 0;
        for (int i = 0; i < segmentArray[k].length; i++) {
            Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
            s.setMediaTime(new MediaRelTimeImpl(previous, segmentArray[k][i]));
            segments.add(s);
            previous += segmentArray[k][i];
        }
        vsegmenter.filterSegmentation(segments, track, result, 5000);
        assertEquals("segment merging yields wrong number of segments", prediction[k].length, result.size());
        previous = 0;
        for (int i = 0; i < prediction[k].length; i++) {
            String message = "segment " + i + " in set " + k + " has the wrong start time.";
            String message1 = "segment " + i + " in set " + k + " has the wrong duration.";
            assertEquals(message, previous, result.get(i).getMediaTime().getMediaTimePoint().getTimeInMilliseconds());
            assertEquals(message1, prediction[k][i], result.get(i).getMediaTime().getMediaDuration().getDurationInMilliseconds());
            previous += prediction[k][i];
        }
    }
}
Also used : Mpeg7CatalogService(org.opencastproject.metadata.mpeg7.Mpeg7CatalogService) Segment(org.opencastproject.metadata.mpeg7.Segment) Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) MediaLocator(org.opencastproject.metadata.mpeg7.MediaLocator) Video(org.opencastproject.metadata.mpeg7.Video) MediaTime(org.opencastproject.metadata.mpeg7.MediaTime) MediaLocatorImpl(org.opencastproject.metadata.mpeg7.MediaLocatorImpl) MediaRelTimeImpl(org.opencastproject.metadata.mpeg7.MediaRelTimeImpl) Test(org.junit.Test)

Example 14 with Segment

use of org.dishevelled.bio.assembly.gfa2.Segment in project opencast by opencast.

the class VideoSegmenterServiceImpl method uniformSegmentation.

/**
 * Creates a uniform segmentation for a given track, with prefNumber as the number of segments
 * which will all have the same length
 *
 * @param track the track that is segmented
 * @param segmentsNew will be set to list of new segments (pass null if not required)
 * @param prefNumber number of generated segments
 * @return Mpeg7Catalog that can later be saved in a Catalog as endresult
 */
protected Mpeg7Catalog uniformSegmentation(Track track, LinkedList<Segment> segmentsNew, int prefNumber) {
    if (segmentsNew == null) {
        segmentsNew = new LinkedList<Segment>();
    }
    MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
    MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
    Mpeg7Catalog mpeg7 = mpeg7CatalogService.newInstance();
    Video videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);
    long segmentDuration = track.getDuration() / prefNumber;
    long currentSegStart = 0;
    // create "prefNumber"-many segments that all have the same length
    for (int i = 1; i < prefNumber; i++) {
        Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + i);
        s.setMediaTime(new MediaRelTimeImpl(currentSegStart, segmentDuration));
        segmentsNew.add(s);
        currentSegStart += segmentDuration;
    }
    // add last segment separately to make sure the last segment ends exactly at the end of the track
    Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + prefNumber);
    s.setMediaTime(new MediaRelTimeImpl(currentSegStart, track.getDuration() - currentSegStart));
    segmentsNew.add(s);
    return mpeg7;
}
Also used : Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) MediaLocator(org.opencastproject.metadata.mpeg7.MediaLocator) Video(org.opencastproject.metadata.mpeg7.Video) MediaTime(org.opencastproject.metadata.mpeg7.MediaTime) Segment(org.opencastproject.metadata.mpeg7.Segment) MediaLocatorImpl(org.opencastproject.metadata.mpeg7.MediaLocatorImpl) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) MediaRelTimeImpl(org.opencastproject.metadata.mpeg7.MediaRelTimeImpl)

Example 15 with Segment

use of org.dishevelled.bio.assembly.gfa2.Segment in project opencast by opencast.

the class VideoSegmenterServiceImpl method filterSegmentation.

/**
 * Merges small subsequent segments (with high difference) into a bigger one
 *
 * @param segments list of segments to be filtered
 * @param track the track that is segmented
 * @param segmentsNew will be set to list of new segments (pass null if not required)
 * @param mergeThresh minimum duration for a segment in milliseconds
 * @return Mpeg7Catalog that can later be saved in a Catalog as endresult
 */
protected Mpeg7Catalog filterSegmentation(LinkedList<Segment> segments, Track track, LinkedList<Segment> segmentsNew, int mergeThresh) {
    if (segmentsNew == null) {
        segmentsNew = new LinkedList<Segment>();
    }
    boolean merging = false;
    MediaTime contentTime = new MediaRelTimeImpl(0, track.getDuration());
    MediaLocator contentLocator = new MediaLocatorImpl(track.getURI());
    Mpeg7Catalog mpeg7 = mpeg7CatalogService.newInstance();
    Video videoContent = mpeg7.addVideoContent("videosegment", contentTime, contentLocator);
    int segmentcount = 1;
    MediaTimePoint currentSegStart = new MediaTimePointImpl();
    for (Segment o : segments) {
        // if the current segment is shorter than merge treshold start merging
        if (o.getMediaTime().getMediaDuration().getDurationInMilliseconds() <= mergeThresh) {
            // start merging and save beginning of new segment that will be generated
            if (!merging) {
                currentSegStart = o.getMediaTime().getMediaTimePoint();
                merging = true;
            }
        // current segment is longer than merge threshold
        } else {
            long currentSegDuration = o.getMediaTime().getMediaDuration().getDurationInMilliseconds();
            long currentSegEnd = o.getMediaTime().getMediaTimePoint().getTimeInMilliseconds() + currentSegDuration;
            if (merging) {
                long newDuration = o.getMediaTime().getMediaTimePoint().getTimeInMilliseconds() - currentSegStart.getTimeInMilliseconds();
                // save new segment that merges all previously skipped short segments
                if (newDuration >= mergeThresh) {
                    Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
                    s.setMediaTime(new MediaRelTimeImpl(currentSegStart.getTimeInMilliseconds(), newDuration));
                    segmentsNew.add(s);
                    // copy the following long segment to new list
                    Segment s2 = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
                    s2.setMediaTime(o.getMediaTime());
                    segmentsNew.add(s2);
                // if too short split new segment in middle and merge halves to
                // previous and following segments
                } else {
                    long followingStartOld = o.getMediaTime().getMediaTimePoint().getTimeInMilliseconds();
                    long newSplit = (currentSegStart.getTimeInMilliseconds() + followingStartOld) / 2;
                    long followingEnd = followingStartOld + o.getMediaTime().getMediaDuration().getDurationInMilliseconds();
                    long followingDuration = followingEnd - newSplit;
                    // if at beginning, don't split, just merge to first large segment
                    if (segmentsNew.isEmpty()) {
                        Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
                        s.setMediaTime(new MediaRelTimeImpl(0, followingEnd));
                        segmentsNew.add(s);
                    } else {
                        long previousStart = segmentsNew.getLast().getMediaTime().getMediaTimePoint().getTimeInMilliseconds();
                        // adjust end time of previous segment to split time
                        segmentsNew.getLast().setMediaTime(new MediaRelTimeImpl(previousStart, newSplit - previousStart));
                        // create new segment starting at split time
                        Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
                        s.setMediaTime(new MediaRelTimeImpl(newSplit, followingDuration));
                        segmentsNew.add(s);
                    }
                }
                merging = false;
            // copy segments that are long enough to new list (with corrected number)
            } else {
                Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount++);
                s.setMediaTime(o.getMediaTime());
                segmentsNew.add(s);
            }
        }
    }
    // if there is an unfinished merging process after going through all segments
    if (merging && !segmentsNew.isEmpty()) {
        long newDuration = track.getDuration() - currentSegStart.getTimeInMilliseconds();
        // if merged segment is long enough, create new segment
        if (newDuration >= mergeThresh) {
            Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount);
            s.setMediaTime(new MediaRelTimeImpl(currentSegStart.getTimeInMilliseconds(), newDuration));
            segmentsNew.add(s);
        // if not long enough, merge with previous segment
        } else {
            newDuration = track.getDuration() - segmentsNew.getLast().getMediaTime().getMediaTimePoint().getTimeInMilliseconds();
            segmentsNew.getLast().setMediaTime(new MediaRelTimeImpl(segmentsNew.getLast().getMediaTime().getMediaTimePoint().getTimeInMilliseconds(), newDuration));
        }
    }
    // segment spanning the whole video
    if (segmentsNew.isEmpty()) {
        Segment s = videoContent.getTemporalDecomposition().createSegment("segment-" + segmentcount);
        s.setMediaTime(new MediaRelTimeImpl(0, track.getDuration()));
        segmentsNew.add(s);
    }
    return mpeg7;
}
Also used : MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) Segment(org.opencastproject.metadata.mpeg7.Segment) MediaTimePoint(org.opencastproject.metadata.mpeg7.MediaTimePoint) Mpeg7Catalog(org.opencastproject.metadata.mpeg7.Mpeg7Catalog) MediaLocator(org.opencastproject.metadata.mpeg7.MediaLocator) MediaTimePointImpl(org.opencastproject.metadata.mpeg7.MediaTimePointImpl) Video(org.opencastproject.metadata.mpeg7.Video) MediaTime(org.opencastproject.metadata.mpeg7.MediaTime) MediaLocatorImpl(org.opencastproject.metadata.mpeg7.MediaLocatorImpl) MediaRelTimeImpl(org.opencastproject.metadata.mpeg7.MediaRelTimeImpl)

Aggregations

Segment (org.opencastproject.metadata.mpeg7.Segment)9 Segment (org.apache.commons.compress.harmony.unpack200.Segment)8 Mpeg7Catalog (org.opencastproject.metadata.mpeg7.Mpeg7Catalog)8 MediaTimePoint (org.opencastproject.metadata.mpeg7.MediaTimePoint)7 Video (org.opencastproject.metadata.mpeg7.Video)7 BufferedReader (java.io.BufferedReader)6 Segment (org.dishevelled.bio.assembly.gfa1.Segment)6 MediaTime (org.opencastproject.metadata.mpeg7.MediaTime)6 PrintWriter (java.io.PrintWriter)5 LinkedList (java.util.LinkedList)5 CommandLineParseException (org.dishevelled.commandline.CommandLineParseException)5 Catalog (org.opencastproject.mediapackage.Catalog)5 MediaRelTimeImpl (org.opencastproject.metadata.mpeg7.MediaRelTimeImpl)5 HashMap (java.util.HashMap)4 Gfa1Adapter (org.dishevelled.bio.assembly.gfa1.Gfa1Adapter)4 Job (org.opencastproject.job.api.Job)4 MediaLocator (org.opencastproject.metadata.mpeg7.MediaLocator)4 MediaLocatorImpl (org.opencastproject.metadata.mpeg7.MediaLocatorImpl)4 ServiceRegistryException (org.opencastproject.serviceregistry.api.ServiceRegistryException)4 FileOutputStream (java.io.FileOutputStream)3