Search in sources :

Example 1 with Exception

use of org.bytedeco.javacv.FrameGrabber.Exception in project javacv by bytedeco.

the class DeinterlacedVideoPlayer method start.

public void start() {
    FrameFilter filter = null;
    try {
        startFrameGrabber();
        Frame frame = null;
        while ((frame = grabber.grab()) != null) {
            if (filter == null) {
                filter = new FFmpegFrameFilter(ffmpegString, frame.imageWidth, frame.imageHeight);
                filter.setPixelFormat(PIXEL_FORMAT);
                filter.start();
            }
            filter.push(frame);
            frame = filter.pull();
        // do something with the filtered frame
        }
    } catch (Exception | org.bytedeco.javacv.FrameFilter.Exception e) {
        throw new RuntimeException(e.getMessage(), e);
    } finally {
        releaseGrabberAndFilter(this.grabber, filter);
    }
}
Also used : Frame(org.bytedeco.javacv.Frame) FFmpegFrameFilter(org.bytedeco.javacv.FFmpegFrameFilter) FFmpegFrameFilter(org.bytedeco.javacv.FFmpegFrameFilter) FrameFilter(org.bytedeco.javacv.FrameFilter) Exception(org.bytedeco.javacv.FrameGrabber.Exception)

Example 2 with Exception

use of org.bytedeco.javacv.FrameGrabber.Exception in project javacv by bytedeco.

the class FaceRecognizerInVideo method main.

public static void main(String[] args) throws Exception {
    OpenCVFrameConverter.ToMat converterToMat = new OpenCVFrameConverter.ToMat();
    if (args.length < 2) {
        System.out.println("Two parameters are required to run this program, first parameter is the analized video and second parameter is the trained result for fisher faces.");
    }
    String videoFileName = args[0];
    String trainedResult = args[1];
    CascadeClassifier face_cascade = new CascadeClassifier("data\\haarcascade_frontalface_default.xml");
    FaceRecognizer lbphFaceRecognizer = LBPHFaceRecognizer.create();
    lbphFaceRecognizer.read(trainedResult);
    File f = new File(videoFileName);
    OpenCVFrameGrabber grabber = null;
    try {
        grabber = OpenCVFrameGrabber.createDefault(f);
        grabber.start();
    } catch (Exception e) {
        System.err.println("Failed start the grabber.");
    }
    Frame videoFrame = null;
    Mat videoMat = new Mat();
    while (true) {
        videoFrame = grabber.grab();
        videoMat = converterToMat.convert(videoFrame);
        Mat videoMatGray = new Mat();
        // Convert the current frame to grayscale:
        cvtColor(videoMat, videoMatGray, COLOR_BGRA2GRAY);
        equalizeHist(videoMatGray, videoMatGray);
        Point p = new Point();
        RectVector faces = new RectVector();
        // Find the faces in the frame:
        face_cascade.detectMultiScale(videoMatGray, faces);
        // annotate it in the video. Cool or what?
        for (int i = 0; i < faces.size(); i++) {
            Rect face_i = faces.get(i);
            Mat face = new Mat(videoMatGray, face_i);
            // If fisher face recognizer is used, the face need to be
            // resized.
            // resize(face, face_resized, new Size(im_width, im_height),
            // 1.0, 1.0, INTER_CUBIC);
            // Now perform the prediction, see how easy that is:
            IntPointer label = new IntPointer(1);
            DoublePointer confidence = new DoublePointer(1);
            lbphFaceRecognizer.predict(face, label, confidence);
            int prediction = label.get(0);
            // And finally write all we've found out to the original image!
            // First of all draw a green rectangle around the detected face:
            rectangle(videoMat, face_i, new Scalar(0, 255, 0, 1));
            // Create the text we will annotate the box with:
            String box_text = "Prediction = " + prediction;
            // Calculate the position for annotated text (make sure we don't
            // put illegal values in there):
            int pos_x = Math.max(face_i.tl().x() - 10, 0);
            int pos_y = Math.max(face_i.tl().y() - 10, 0);
            // And now put it into the image:
            putText(videoMat, box_text, new Point(pos_x, pos_y), FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0, 2.0));
        }
        // Show the result:
        imshow("face_recognizer", videoMat);
        char key = (char) waitKey(20);
        // Exit this loop on escape:
        if (key == 27) {
            destroyAllWindows();
            break;
        }
    }
}
Also used : Frame(org.bytedeco.javacv.Frame) DoublePointer(org.bytedeco.javacpp.DoublePointer) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) Exception(org.bytedeco.javacv.FrameGrabber.Exception) IntPointer(org.bytedeco.javacpp.IntPointer) OpenCVFrameConverter(org.bytedeco.javacv.OpenCVFrameConverter) File(java.io.File)

Aggregations

Frame (org.bytedeco.javacv.Frame)2 Exception (org.bytedeco.javacv.FrameGrabber.Exception)2 File (java.io.File)1 DoublePointer (org.bytedeco.javacpp.DoublePointer)1 IntPointer (org.bytedeco.javacpp.IntPointer)1 FFmpegFrameFilter (org.bytedeco.javacv.FFmpegFrameFilter)1 FrameFilter (org.bytedeco.javacv.FrameFilter)1 OpenCVFrameConverter (org.bytedeco.javacv.OpenCVFrameConverter)1 OpenCVFrameGrabber (org.bytedeco.javacv.OpenCVFrameGrabber)1