Search in sources :

Example 1 with FilterContext

use of de.serviceflow.frankenstein.plugin.api.FilterContext in project Frankenstein by olir.

the class MovieProcessor method seek.

public void seek(final ProcessingListener l, int frameId) {
    // System.out.println("MovieProcessor.seek @"+frameId);
    if (configuration.doInput && frameId < currentPos) {
        if (l != null)
            l.seeking(0);
        currentPos = 0;
        configuration.getSource().reopen(l);
    }
    ExecutorThread.getInstance().execute(new Runnable() {

        @Override
        public void run() {
            currentPos = configuration.getSource().seek(frameId, l);
            frame = configuration.getSource().getFrame();
            if (frame != null && !frame.empty()) {
                FilterContext context = new DefaultFilterContext();
                Mat newFrame = frame;
                for (VideoFilter filter : filters) {
                    // System.out.println("MovieProcessor process
                    // "+filter.getClass().getName());
                    newFrame = filter.process(newFrame, frameId, context);
                }
                if (localFilters != null && !localFilters.isEmpty()) {
                    for (FilterElement element : localFilters) {
                        if (element.filter != null) {
                            if (element.r.start <= currentPos && currentPos < element.r.end) {
                                // System.out.println("MovieProcessor
                                // processStreamFrame " +
                                // element.filter);
                                newFrame = element.filter.process(newFrame, currentPos, context);
                            }
                        }
                    }
                }
                if (previewFilter != null) {
                    // System.out.println("MovieProcessor processStreamFrame
                    // " +
                    // previewFilter);
                    newFrame = previewFilter.process(newFrame, currentPos, context);
                }
                if (l != null)
                    l.nextFrameProcessed(newFrame, currentPos);
            } else {
                if (frameId <= movie_frameCount && l != null)
                    l.prematureEnd(frameId - 1);
            }
            l.seekDone(frameId);
        }
    });
}
Also used : DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterElement(de.serviceflow.frankenstein.vf.FilterElement) Mat(org.opencv.core.Mat) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Example 2 with FilterContext

use of de.serviceflow.frankenstein.plugin.api.FilterContext in project Frankenstein by olir.

the class MovieProcessor method processStreamFrame.

public void processStreamFrame(ProcessingListener l) {
    currentPos = 1;
    frame = configuration.getSource().getFrame();
    if (frame != null && !frame.empty()) {
        FilterContext context = new DefaultFilterContext();
        Mat newFrame = frame;
        for (VideoFilter filter : filters) {
            // System.out.println("MovieProcessor processStreamFrame " +
            // filter.getClass().getName());
            newFrame = filter.process(newFrame, currentPos, context);
        }
        if (localFilters != null && !localFilters.isEmpty()) {
            for (FilterElement element : localFilters) {
                if (element.filter != null) {
                    // System.out.println("MovieProcessor
                    // processStreamFrame
                    // " +
                    // element.filter);
                    newFrame = element.filter.process(newFrame, currentPos, context);
                }
            }
        }
        if (l != null)
            l.nextFrameProcessed(newFrame, currentPos);
        movie_w = newFrame.cols();
        movie_h = newFrame.rows();
    } else {
        if (l != null)
            l.prematureEnd(1);
    }
}
Also used : DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterElement(de.serviceflow.frankenstein.vf.FilterElement) Mat(org.opencv.core.Mat) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Example 3 with FilterContext

use of de.serviceflow.frankenstein.plugin.api.FilterContext in project Frankenstein by olir.

the class MovieProcessor method processVideo.

public boolean processVideo(ProcessingListener l) {
    try {
        streamStopped = !configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource);
        if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource)) {
            System.out.print("doOutput=" + configuration.doOutput + " with source=" + (configuration.getSource() != null ? configuration.getSource().getClass().getName() : "none"));
            // temporarily
            if (configuration.doOutput && configuration.doInput) {
                if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i \"" + configuration.getInputVideo() + "\"" + " -f ffmetadata " + tempMetadataFile.getAbsolutePath() + " -vn -ar 44100 -ac 2 -ab 192k -f mp3 -r 21 " + tempAudioFile.getAbsolutePath(), new TimeTaskHandler(l, "Splitting Audio")).run())
                    return false;
                configuration.metadata.clear();
                configuration.metadata.load(tempMetadataFile);
                System.out.print("Meta Data:\n===================\n" + configuration.metadata + "===================\n");
            } else if (configuration.doOutput) {
                // Create silent mp3
                if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -f lavfi -i anullsrc=r=44100:cl=mono -t " + (movie_frameCount / movie_fps) + " -q:a 9 -acodec libmp3lame " + tempAudioFile.getAbsolutePath(), new TimeTaskHandler(l, "Creating Silent Audio Audio")).run())
                    return false;
            }
        }
        // 2. Process Video without audio ()
        System.out.print("Processing video: ");
        Mat newFrame = null;
        if (l != null)
            l.taskUpdate(null, "Processing video");
        int i = 0;
        while (!stopped && (configuration.getSource().getFrames() < 0 || i < configuration.getSource().getFrames() || !streamStopped)) {
            i++;
            if (streamStopped) {
                currentPos = configuration.getSource().seek(i, l);
                frame = configuration.getSource().getFrame();
            } else {
                ((VideoStreamSource) configuration.getSource()).pause();
                frame = configuration.getSource().getFrame();
            }
            if (frame != null && !frame.empty()) {
                if (!filters.isEmpty()) {
                    FilterContext context = new DefaultFilterContext();
                    newFrame = frame;
                    for (VideoFilter filter : filters) {
                        // System.out.println("MovieProcessor
                        // process"+filter.getClass().getName());
                        newFrame = filter.process(newFrame, i, context);
                    }
                } else {
                    newFrame = frame;
                }
                if (localFilters != null && !localFilters.isEmpty()) {
                    FilterContext context = new DefaultFilterContext();
                    for (FilterElement element : localFilters) {
                        if (element.filter != null) {
                            if (element.r.start <= i && (i < element.r.end || !streamStopped)) {
                                // System.out.println("MovieProcessor
                                // processStreamFrame
                                // " +
                                // element.filter);
                                newFrame = element.filter.process(newFrame, i, context);
                            }
                        }
                    }
                }
                if (configuration.doOutput) {
                    if (movie_w != newFrame.cols() || movie_h != newFrame.rows())
                        System.out.println("Warning: outputVideo.write changed size:" + new Size(newFrame.cols(), newFrame.rows()));
                    outputVideo.write(newFrame);
                    if ((i % 1000) == 0) {
                        // break;
                        System.out.print("+");
                    } else if ((i % 100) == 0)
                        System.out.print(".");
                }
                if (l != null)
                    l.nextFrameProcessed(newFrame, currentPos);
            } else {
                if (currentPos < movie_frameCount && l != null)
                    l.prematureEnd(currentPos);
                break;
            }
        }
        System.out.println("ok\nFrames proccessed: " + i);
        if (configuration.doOutput) {
            outputVideo.release();
        }
        if (stopped) {
            return false;
        }
        if (configuration.doOutput) {
            File of = findFreeFile(new File(configuration.outputVideo));
            if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource)) {
                if (configuration.doInput) {
                    if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i \"" + tempVideoFile.getAbsolutePath() + "\" -i " + tempAudioFile.getAbsolutePath() + " -i " + tempMetadataFile.getAbsolutePath() + " -map_metadata 2" + " -c:a aac -c:v libx264  -q 17 \"" + of.getAbsolutePath() + '"', new TimeTaskHandler(l, "Assembling Output")).run())
                        return false;
                } else {
                    if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i \"" + tempVideoFile.getAbsolutePath() + "\" -i " + tempAudioFile.getAbsolutePath() + " -c:a aac -c:v libx264  -q 17 \"" + of.getAbsolutePath() + '"', new TimeTaskHandler(l, "Processing Output")).run())
                        System.out.println("Warning: Task failed");
                }
            } else {
                System.out.println("Renaming temp  file " + tempVideoFile.getAbsolutePath());
                tempVideoFile.renameTo(of);
            }
            if (!of.exists()) {
                System.err.println("Missing output " + of.getAbsolutePath());
                return false;
            } else {
                System.out.println("Video created: " + of.getAbsolutePath());
            }
            tempVideoFile.delete();
            tempAudioFile.delete();
            tempMetadataFile.delete();
        }
    } finally {
        // if (!configuration.doInput || !(configuration.getSource()
        // instanceof VideoStreamSource))
        // closeInput();
        closeOutput();
        openOutput(null);
    }
    return true;
}
Also used : VideoStreamSource(de.serviceflow.frankenstein.vf.VideoStreamSource) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterElement(de.serviceflow.frankenstein.vf.FilterElement) Mat(org.opencv.core.Mat) Task(de.serviceflow.frankenstein.task.Task) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) Size(org.opencv.core.Size) TimeTaskHandler(de.serviceflow.frankenstein.task.TimeTaskHandler) File(java.io.File) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Example 4 with FilterContext

use of de.serviceflow.frankenstein.plugin.api.FilterContext in project Frankenstein by olir.

the class MovieProcessor method init.

public void init(ProcessingListener l) {
    currentPos = 1;
    if (!openInput(l)) {
        if (l != null)
            l.prematureEnd(1);
        return;
    }
    frame = configuration.getSource().getFrame();
    if (frame != null && !frame.empty()) {
        Mat newFrame = frame;
        if (!filters.isEmpty()) {
            for (VideoFilter filter : filters) {
                System.out.println("MovieProcessor configure " + filter.getClass().getName());
                newFrame = filter.configure(newFrame);
            }
        }
        FilterContext context = new DefaultFilterContext();
        newFrame = frame;
        for (VideoFilter filter : filters) {
            System.out.println("MovieProcessor process " + filter.getClass().getName());
            newFrame = filter.process(newFrame, 1, context);
        }
        if (l != null)
            l.nextFrameProcessed(newFrame, currentPos);
        movie_w = newFrame.cols();
        movie_h = newFrame.rows();
    } else {
        if (l != null)
            l.prematureEnd(1);
        return;
    }
    openOutput(l);
    // TODO: Currently Windows only
    ffmpeg = new File(ffmpegPath, "\\bin\\ffmpeg.exe");
    if (l != null)
        l.videoStarted(configuration.getSource().getFrames(), configuration.getSource().getFps());
    currentPos = 1;
}
Also used : DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) Mat(org.opencv.core.Mat) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) File(java.io.File) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Aggregations

FilterContext (de.serviceflow.frankenstein.plugin.api.FilterContext)4 SegmentVideoFilter (de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter)4 DefaultFilterContext (de.serviceflow.frankenstein.vf.DefaultFilterContext)4 VideoFilter (de.serviceflow.frankenstein.vf.VideoFilter)4 Mat (org.opencv.core.Mat)4 FilterElement (de.serviceflow.frankenstein.vf.FilterElement)3 File (java.io.File)2 Task (de.serviceflow.frankenstein.task.Task)1 TimeTaskHandler (de.serviceflow.frankenstein.task.TimeTaskHandler)1 VideoStreamSource (de.serviceflow.frankenstein.vf.VideoStreamSource)1 Size (org.opencv.core.Size)1