Search in sources :

Example 1 with FilterElement

use of de.serviceflow.frankenstein.vf.FilterElement in project Frankenstein by olir.

the class MovieProcessor method seek.

public void seek(final ProcessingListener l, int frameId) {
    // System.out.println("MovieProcessor.seek @"+frameId);
    if (configuration.doInput && frameId < currentPos) {
        if (l != null)
            l.seeking(0);
        currentPos = 0;
        configuration.getSource().reopen(l);
    }
    ExecutorThread.getInstance().execute(new Runnable() {

        @Override
        public void run() {
            currentPos = configuration.getSource().seek(frameId, l);
            frame = configuration.getSource().getFrame();
            if (frame != null && !frame.empty()) {
                FilterContext context = new DefaultFilterContext();
                Mat newFrame = frame;
                for (VideoFilter filter : filters) {
                    // System.out.println("MovieProcessor process
                    // "+filter.getClass().getName());
                    newFrame = filter.process(newFrame, frameId, context);
                }
                if (localFilters != null && !localFilters.isEmpty()) {
                    for (FilterElement element : localFilters) {
                        if (element.filter != null) {
                            if (element.r.start <= currentPos && currentPos < element.r.end) {
                                // System.out.println("MovieProcessor
                                // processStreamFrame " +
                                // element.filter);
                                newFrame = element.filter.process(newFrame, currentPos, context);
                            }
                        }
                    }
                }
                if (previewFilter != null) {
                    // System.out.println("MovieProcessor processStreamFrame
                    // " +
                    // previewFilter);
                    newFrame = previewFilter.process(newFrame, currentPos, context);
                }
                if (l != null)
                    l.nextFrameProcessed(newFrame, currentPos);
            } else {
                if (frameId <= movie_frameCount && l != null)
                    l.prematureEnd(frameId - 1);
            }
            l.seekDone(frameId);
        }
    });
}
Also used : DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterElement(de.serviceflow.frankenstein.vf.FilterElement) Mat(org.opencv.core.Mat) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Example 2 with FilterElement

use of de.serviceflow.frankenstein.vf.FilterElement in project Frankenstein by olir.

the class MovieProcessor method processStreamFrame.

public void processStreamFrame(ProcessingListener l) {
    currentPos = 1;
    frame = configuration.getSource().getFrame();
    if (frame != null && !frame.empty()) {
        FilterContext context = new DefaultFilterContext();
        Mat newFrame = frame;
        for (VideoFilter filter : filters) {
            // System.out.println("MovieProcessor processStreamFrame " +
            // filter.getClass().getName());
            newFrame = filter.process(newFrame, currentPos, context);
        }
        if (localFilters != null && !localFilters.isEmpty()) {
            for (FilterElement element : localFilters) {
                if (element.filter != null) {
                    // System.out.println("MovieProcessor
                    // processStreamFrame
                    // " +
                    // element.filter);
                    newFrame = element.filter.process(newFrame, currentPos, context);
                }
            }
        }
        if (l != null)
            l.nextFrameProcessed(newFrame, currentPos);
        movie_w = newFrame.cols();
        movie_h = newFrame.rows();
    } else {
        if (l != null)
            l.prematureEnd(1);
    }
}
Also used : DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterElement(de.serviceflow.frankenstein.vf.FilterElement) Mat(org.opencv.core.Mat) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Example 3 with FilterElement

use of de.serviceflow.frankenstein.vf.FilterElement in project Frankenstein by olir.

the class MovieProcessor method processVideo.

public boolean processVideo(ProcessingListener l) {
    try {
        streamStopped = !configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource);
        if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource)) {
            System.out.print("doOutput=" + configuration.doOutput + " with source=" + (configuration.getSource() != null ? configuration.getSource().getClass().getName() : "none"));
            // temporarily
            if (configuration.doOutput && configuration.doInput) {
                if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i \"" + configuration.getInputVideo() + "\"" + " -f ffmetadata " + tempMetadataFile.getAbsolutePath() + " -vn -ar 44100 -ac 2 -ab 192k -f mp3 -r 21 " + tempAudioFile.getAbsolutePath(), new TimeTaskHandler(l, "Splitting Audio")).run())
                    return false;
                configuration.metadata.clear();
                configuration.metadata.load(tempMetadataFile);
                System.out.print("Meta Data:\n===================\n" + configuration.metadata + "===================\n");
            } else if (configuration.doOutput) {
                // Create silent mp3
                if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -f lavfi -i anullsrc=r=44100:cl=mono -t " + (movie_frameCount / movie_fps) + " -q:a 9 -acodec libmp3lame " + tempAudioFile.getAbsolutePath(), new TimeTaskHandler(l, "Creating Silent Audio Audio")).run())
                    return false;
            }
        }
        // 2. Process Video without audio ()
        System.out.print("Processing video: ");
        Mat newFrame = null;
        if (l != null)
            l.taskUpdate(null, "Processing video");
        int i = 0;
        while (!stopped && (configuration.getSource().getFrames() < 0 || i < configuration.getSource().getFrames() || !streamStopped)) {
            i++;
            if (streamStopped) {
                currentPos = configuration.getSource().seek(i, l);
                frame = configuration.getSource().getFrame();
            } else {
                ((VideoStreamSource) configuration.getSource()).pause();
                frame = configuration.getSource().getFrame();
            }
            if (frame != null && !frame.empty()) {
                if (!filters.isEmpty()) {
                    FilterContext context = new DefaultFilterContext();
                    newFrame = frame;
                    for (VideoFilter filter : filters) {
                        // System.out.println("MovieProcessor
                        // process"+filter.getClass().getName());
                        newFrame = filter.process(newFrame, i, context);
                    }
                } else {
                    newFrame = frame;
                }
                if (localFilters != null && !localFilters.isEmpty()) {
                    FilterContext context = new DefaultFilterContext();
                    for (FilterElement element : localFilters) {
                        if (element.filter != null) {
                            if (element.r.start <= i && (i < element.r.end || !streamStopped)) {
                                // System.out.println("MovieProcessor
                                // processStreamFrame
                                // " +
                                // element.filter);
                                newFrame = element.filter.process(newFrame, i, context);
                            }
                        }
                    }
                }
                if (configuration.doOutput) {
                    if (movie_w != newFrame.cols() || movie_h != newFrame.rows())
                        System.out.println("Warning: outputVideo.write changed size:" + new Size(newFrame.cols(), newFrame.rows()));
                    outputVideo.write(newFrame);
                    if ((i % 1000) == 0) {
                        // break;
                        System.out.print("+");
                    } else if ((i % 100) == 0)
                        System.out.print(".");
                }
                if (l != null)
                    l.nextFrameProcessed(newFrame, currentPos);
            } else {
                if (currentPos < movie_frameCount && l != null)
                    l.prematureEnd(currentPos);
                break;
            }
        }
        System.out.println("ok\nFrames proccessed: " + i);
        if (configuration.doOutput) {
            outputVideo.release();
        }
        if (stopped) {
            return false;
        }
        if (configuration.doOutput) {
            File of = findFreeFile(new File(configuration.outputVideo));
            if (!configuration.doInput || !(configuration.getSource() instanceof VideoStreamSource)) {
                if (configuration.doInput) {
                    if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i \"" + tempVideoFile.getAbsolutePath() + "\" -i " + tempAudioFile.getAbsolutePath() + " -i " + tempMetadataFile.getAbsolutePath() + " -map_metadata 2" + " -c:a aac -c:v libx264  -q 17 \"" + of.getAbsolutePath() + '"', new TimeTaskHandler(l, "Assembling Output")).run())
                        return false;
                } else {
                    if (!new Task(this, ffmpeg.getAbsolutePath() + " -y -i \"" + tempVideoFile.getAbsolutePath() + "\" -i " + tempAudioFile.getAbsolutePath() + " -c:a aac -c:v libx264  -q 17 \"" + of.getAbsolutePath() + '"', new TimeTaskHandler(l, "Processing Output")).run())
                        System.out.println("Warning: Task failed");
                }
            } else {
                System.out.println("Renaming temp  file " + tempVideoFile.getAbsolutePath());
                tempVideoFile.renameTo(of);
            }
            if (!of.exists()) {
                System.err.println("Missing output " + of.getAbsolutePath());
                return false;
            } else {
                System.out.println("Video created: " + of.getAbsolutePath());
            }
            tempVideoFile.delete();
            tempAudioFile.delete();
            tempMetadataFile.delete();
        }
    } finally {
        // if (!configuration.doInput || !(configuration.getSource()
        // instanceof VideoStreamSource))
        // closeInput();
        closeOutput();
        openOutput(null);
    }
    return true;
}
Also used : VideoStreamSource(de.serviceflow.frankenstein.vf.VideoStreamSource) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterElement(de.serviceflow.frankenstein.vf.FilterElement) Mat(org.opencv.core.Mat) Task(de.serviceflow.frankenstein.task.Task) SegmentVideoFilter(de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter) VideoFilter(de.serviceflow.frankenstein.vf.VideoFilter) Size(org.opencv.core.Size) TimeTaskHandler(de.serviceflow.frankenstein.task.TimeTaskHandler) File(java.io.File) DefaultFilterContext(de.serviceflow.frankenstein.vf.DefaultFilterContext) FilterContext(de.serviceflow.frankenstein.plugin.api.FilterContext)

Example 4 with FilterElement

use of de.serviceflow.frankenstein.vf.FilterElement in project Frankenstein by olir.

the class ProcessingSceneController method drawEditCanvas.

public void drawEditCanvas() {
    if (frames > 0) {
        GraphicsContext gc = editCanvas.getGraphicsContext2D();
        // Time in seconds at position
        // double t = (position - 1) / fps;
        // Total time in seconds
        double tt = (frames - 1) / fps;
        // Background
        try {
            gc.setFill(rootBorder.getBackground().getFills().get(0).getFill());
        } catch (Exception e) {
            gc.setFill(Color.WHITE);
        }
        gc.fillRect(0, 0, editCanvas.getWidth(), editCanvas.getHeight());
        if (taskErrorMessage != null) {
            gc.setFill(Color.DARKRED);
            gc.fillText(taskErrorMessage, 10, 16, 900);
            return;
        } else if (processingRunning) {
            // Task Progress
            if (taskPosition != -1) {
                int x = (int) ((editCanvas.getWidth() - 1) * taskPosition / ((double) frames - 1) * fps);
                // System.out.println("taskPosition "+taskPosition+" "+x);
                gc.setFill(Color.DARKGREEN);
                gc.fillRect(0, 0, x, 2);
                gc.setFill(Color.BLACK);
                gc.fillText(taskMessage, 10, 16, 900);
            } else {
                // Video Progress
                int x = (int) ((editCanvas.getWidth() - 1) * (position - 1) / (frames - 1));
                gc.setFill(Color.DARKOLIVEGREEN);
                gc.fillRect(0, 0, x, 2);
                gc.setFill(Color.BLACK);
                gc.fillText("Processing Video", 10, 16, 900);
            }
        } else {
            // Show Segments
            // Base color
            gc.setFill(Color.LIGHTGRAY);
            gc.fillRect(0, 3, editCanvas.getWidth(), 6);
            // Visualize Filter ranges
            gc.setFill(Color.CADETBLUE.deriveColor(1.0, 1.0, 1.0, 0.5));
            for (FilterElement fe : filterListData) {
                int x = (int) ((editCanvas.getWidth() - 1) * (fe.r.start - 1) / (frames - 1));
                int w = (int) ((editCanvas.getWidth() - 1) * (fe.r.end - fe.r.start) / (frames - 1));
                if (w < 2)
                    w = 2;
                gc.fillRect(x, 3, w, 6);
            }
            // Visualize selectedFilter
            if (selectedFilter != null) {
                gc.setFill(Color.DODGERBLUE);
                int x = (int) ((editCanvas.getWidth() - 1) * (selectedFilter.r.start - 1) / (frames - 1));
                int w = (int) ((editCanvas.getWidth() - 1) * (selectedFilter.r.end - selectedFilter.r.start) / (frames - 1));
                if (w < 2)
                    w = 2;
                gc.fillRect(x, 3, w, 6);
            }
        }
        if (seeking && seekPos > 0) {
            // Seek running
            int x = (int) ((editCanvas.getWidth() - 1) * (seekPos - 1) / (frames - 1));
            gc.setFill(Color.RED);
            gc.fillRect(0, 0, x, 2);
        }
        // Time Ruler
        double t1off = 1.0;
        double t2off = 0.1;
        int tlevel = 1;
        int ttlevel = 0;
        if (tt >= 10.0) {
            t1off = 10.0;
            t2off = 1.0;
            tlevel++;
        }
        if (tt >= 60.0) {
            t1off = 60.0;
            t2off = 10.0;
            tlevel++;
            ttlevel++;
        }
        if (tt >= 3600.0) {
            t1off = 3600.0;
            t2off = 900.0;
            tlevel++;
            ttlevel++;
        }
        double tm = (((double) editCanvas.getWidth()) - 1.0) / tt;
        gc.setFill(colorByLevel(tlevel));
        for (double tx = 0.0; tx < tt; tx += t1off) gc.fillRect(tx * tm - ttlevel, 0, 1 + ttlevel + ttlevel, tlevel * 2);
        gc.setFill(colorByLevel(tlevel - 1));
        for (double tx = 0.0; tx < tt; tx += t2off) gc.fillRect(tx * tm, 1, 1, tlevel * 2 - 1);
        gc.setFill(Color.BLACK);
        gc.fillRect(0, 2, editCanvas.getWidth(), 1);
        gc.fillRect(0, 2, 1, 16);
        gc.fillRect(editCanvas.getWidth() - 1, 2, 1, 16);
        // Mark to Position
        Range r = clipBoardRange;
        if (r != null) {
            if (markPosition != -1)
                gc.setFill(Color.rgb(64, 64, 192, 0.5));
            else
                gc.setFill(Color.rgb(128, 128, 64, 0.5));
            gc.fillRect(xForPosition(r.start), 7, xForPosition(r.end) - xForPosition(r.start) + 1, 10);
        }
        // Clipboard
        r = currentRange();
        if (r != null) {
            gc.setFill(Color.rgb(128, 128, 64, 0.5));
            gc.fillRect(xForPosition(r.start), 7, xForPosition(r.end) - xForPosition(r.start), 10);
        }
        // Current Position
        int x = xForPosition(position);
        gc.setFill(Color.RED);
        gc.fillRect(x, 0, 1, editCanvas.getHeight());
    }
}
Also used : FilterElement(de.serviceflow.frankenstein.vf.FilterElement) GraphicsContext(javafx.scene.canvas.GraphicsContext) Range(org.opencv.core.Range) CvException(org.opencv.core.CvException) IOException(java.io.IOException) DateTimeParseException(java.time.format.DateTimeParseException)

Example 5 with FilterElement

use of de.serviceflow.frankenstein.vf.FilterElement in project Frankenstein by olir.

the class ProcessingSceneController method filterAdd.

@FXML
public void filterAdd() {
    FilterElement val = new FilterElement(currentRange(), this);
    filterListData.add(val);
    Platform.runLater(() -> {
        drawEditCanvas();
    });
}
Also used : FilterElement(de.serviceflow.frankenstein.vf.FilterElement) FXML(javafx.fxml.FXML)

Aggregations

FilterElement (de.serviceflow.frankenstein.vf.FilterElement)6 FilterContext (de.serviceflow.frankenstein.plugin.api.FilterContext)3 SegmentVideoFilter (de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter)3 DefaultFilterContext (de.serviceflow.frankenstein.vf.DefaultFilterContext)3 VideoFilter (de.serviceflow.frankenstein.vf.VideoFilter)3 Mat (org.opencv.core.Mat)3 Range (org.opencv.core.Range)2 Task (de.serviceflow.frankenstein.task.Task)1 TimeTaskHandler (de.serviceflow.frankenstein.task.TimeTaskHandler)1 VideoStreamSource (de.serviceflow.frankenstein.vf.VideoStreamSource)1 File (java.io.File)1 IOException (java.io.IOException)1 DateTimeParseException (java.time.format.DateTimeParseException)1 FXML (javafx.fxml.FXML)1 GraphicsContext (javafx.scene.canvas.GraphicsContext)1 CvException (org.opencv.core.CvException)1 Size (org.opencv.core.Size)1