Search in sources :

Example 1 with OpenCVFrameGrabber

use of org.bytedeco.javacv.OpenCVFrameGrabber in project javacv by bytedeco.

the class FaceRecognizerInVideo method main.

public static void main(String[] args) throws Exception {
    OpenCVFrameConverter.ToMat converterToMat = new OpenCVFrameConverter.ToMat();
    if (args.length < 2) {
        System.out.println("Two parameters are required to run this program, first parameter is the analized video and second parameter is the trained result for fisher faces.");
    }
    String videoFileName = args[0];
    String trainedResult = args[1];
    CascadeClassifier face_cascade = new CascadeClassifier("data\\haarcascade_frontalface_default.xml");
    FaceRecognizer lbphFaceRecognizer = LBPHFaceRecognizer.create();
    lbphFaceRecognizer.read(trainedResult);
    File f = new File(videoFileName);
    OpenCVFrameGrabber grabber = null;
    try {
        grabber = OpenCVFrameGrabber.createDefault(f);
        grabber.start();
    } catch (Exception e) {
        System.err.println("Failed start the grabber.");
    }
    Frame videoFrame = null;
    Mat videoMat = new Mat();
    while (true) {
        videoFrame = grabber.grab();
        videoMat = converterToMat.convert(videoFrame);
        Mat videoMatGray = new Mat();
        // Convert the current frame to grayscale:
        cvtColor(videoMat, videoMatGray, COLOR_BGRA2GRAY);
        equalizeHist(videoMatGray, videoMatGray);
        Point p = new Point();
        RectVector faces = new RectVector();
        // Find the faces in the frame:
        face_cascade.detectMultiScale(videoMatGray, faces);
        // annotate it in the video. Cool or what?
        for (int i = 0; i < faces.size(); i++) {
            Rect face_i = faces.get(i);
            Mat face = new Mat(videoMatGray, face_i);
            // If fisher face recognizer is used, the face need to be
            // resized.
            // resize(face, face_resized, new Size(im_width, im_height),
            // 1.0, 1.0, INTER_CUBIC);
            // Now perform the prediction, see how easy that is:
            IntPointer label = new IntPointer(1);
            DoublePointer confidence = new DoublePointer(1);
            lbphFaceRecognizer.predict(face, label, confidence);
            int prediction = label.get(0);
            // And finally write all we've found out to the original image!
            // First of all draw a green rectangle around the detected face:
            rectangle(videoMat, face_i, new Scalar(0, 255, 0, 1));
            // Create the text we will annotate the box with:
            String box_text = "Prediction = " + prediction;
            // Calculate the position for annotated text (make sure we don't
            // put illegal values in there):
            int pos_x = Math.max(face_i.tl().x() - 10, 0);
            int pos_y = Math.max(face_i.tl().y() - 10, 0);
            // And now put it into the image:
            putText(videoMat, box_text, new Point(pos_x, pos_y), FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0, 2.0));
        }
        // Show the result:
        imshow("face_recognizer", videoMat);
        char key = (char) waitKey(20);
        // Exit this loop on escape:
        if (key == 27) {
            destroyAllWindows();
            break;
        }
    }
}
Also used : Frame(org.bytedeco.javacv.Frame) DoublePointer(org.bytedeco.javacpp.DoublePointer) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) Exception(org.bytedeco.javacv.FrameGrabber.Exception) IntPointer(org.bytedeco.javacpp.IntPointer) OpenCVFrameConverter(org.bytedeco.javacv.OpenCVFrameConverter) File(java.io.File)

Example 2 with OpenCVFrameGrabber

use of org.bytedeco.javacv.OpenCVFrameGrabber in project BoofCV by lessthanoptimal.

the class WebcamOpenCV method open.

@Override
public <T extends ImageBase<T>> SimpleImageSequence<T> open(String device, int width, int height, ImageType<T> imageType) {
    OpenCVFrameGrabber grabber = null;
    if (device != null) {
        try {
            int which = Integer.parseInt(device);
            grabber = new OpenCVFrameGrabber(which);
        } catch (NumberFormatException ignore) {
            grabber = new OpenCVFrameGrabber(0);
        }
        if (grabber == null) {
            throw new RuntimeException("Can't find webcam with ID or name at " + device);
        }
    } else {
        grabber = new OpenCVFrameGrabber(0);
    }
    grabber.setImageWidth(width);
    grabber.setImageHeight(height);
    return new SimpleSequence<>(grabber, imageType);
}
Also used : OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber)

Example 3 with OpenCVFrameGrabber

use of org.bytedeco.javacv.OpenCVFrameGrabber in project javacv by bytedeco.

the class WebcamAndMicrophoneCapture method main.

public static void main(String[] args) throws Exception, org.bytedeco.javacv.FrameGrabber.Exception {
    final int captureWidth = 1280;
    final int captureHeight = 720;
    // The available FrameGrabber classes include OpenCVFrameGrabber (opencv_videoio),
    // DC1394FrameGrabber, FlyCapture2FrameGrabber, OpenKinectFrameGrabber,
    // PS3EyeFrameGrabber, VideoInputFrameGrabber, and FFmpegFrameGrabber.
    final OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(WEBCAM_DEVICE_INDEX);
    grabber.setImageWidth(captureWidth);
    grabber.setImageHeight(captureHeight);
    grabber.start();
    // org.bytedeco.javacv.FFmpegFrameRecorder.FFmpegFrameRecorder(String
    // filename, int imageWidth, int imageHeight, int audioChannels)
    // For each param, we're passing in...
    // filename = either a path to a local file we wish to create, or an
    // RTMP url to an FMS / Wowza server
    // imageWidth = width we specified for the grabber
    // imageHeight = height we specified for the grabber
    // audioChannels = 2, because we like stereo
    final FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("rtmp://my-streaming-server/app_name_here/instance_name/stream_name", captureWidth, captureHeight, 2);
    recorder.setInterleaved(true);
    // decrease "startup" latency in FFMPEG (see:
    // https://trac.ffmpeg.org/wiki/StreamingGuide)
    recorder.setVideoOption("tune", "zerolatency");
    // tradeoff between quality and encode speed
    // possible values are ultrafast,superfast, veryfast, faster, fast,
    // medium, slow, slower, veryslow
    // ultrafast offers us the least amount of compression (lower encoder
    // CPU) at the cost of a larger stream size
    // at the other end, veryslow provides the best compression (high
    // encoder CPU) while lowering the stream size
    // (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("preset", "ultrafast");
    // Constant Rate Factor (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("crf", "28");
    // 2000 kb/s, reasonable "sane" area for 720
    recorder.setVideoBitrate(2000000);
    recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
    recorder.setFormat("flv");
    // FPS (frames per second)
    recorder.setFrameRate(FRAME_RATE);
    // Key frame interval, in our case every 2 seconds -> 30 (fps) * 2 = 60
    // (gop length)
    recorder.setGopSize(GOP_LENGTH_IN_FRAMES);
    // We don't want variable bitrate audio
    recorder.setAudioOption("crf", "0");
    // Highest quality
    recorder.setAudioQuality(0);
    // 192 Kbps
    recorder.setAudioBitrate(192000);
    recorder.setSampleRate(44100);
    recorder.setAudioChannels(2);
    recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
    // Jack 'n coke... do it...
    recorder.start();
    // Thread for audio capture, this could be in a nested private class if you prefer...
    new Thread(new Runnable() {

        @Override
        public void run() {
            // Pick a format...
            // NOTE: It is better to enumerate the formats that the system supports,
            // because getLine() can error out with any particular format...
            // For us: 44.1 sample rate, 16 bits, stereo, signed, little endian
            AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
            // Get TargetDataLine with that format
            Mixer.Info[] minfoSet = AudioSystem.getMixerInfo();
            Mixer mixer = AudioSystem.getMixer(minfoSet[AUDIO_DEVICE_INDEX]);
            DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
            try {
                // Open and start capturing audio
                // It's possible to have more control over the chosen audio device with this line:
                // TargetDataLine line = (TargetDataLine)mixer.getLine(dataLineInfo);
                final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
                line.open(audioFormat);
                line.start();
                final int sampleRate = (int) audioFormat.getSampleRate();
                final int numChannels = audioFormat.getChannels();
                // Let's initialize our audio buffer...
                final int audioBufferSize = sampleRate * numChannels;
                final byte[] audioBytes = new byte[audioBufferSize];
                // Using a ScheduledThreadPoolExecutor vs a while loop with
                // a Thread.sleep will allow
                // us to get around some OS specific timing issues, and keep
                // to a more precise
                // clock as the fixed rate accounts for garbage collection
                // time, etc
                // a similar approach could be used for the webcam capture
                // as well, if you wish
                ScheduledThreadPoolExecutor exec = new ScheduledThreadPoolExecutor(1);
                exec.scheduleAtFixedRate(new Runnable() {

                    @Override
                    public void run() {
                        try {
                            // Read from the line... non-blocking
                            int nBytesRead = 0;
                            while (nBytesRead == 0) {
                                nBytesRead = line.read(audioBytes, 0, line.available());
                            }
                            // Since we specified 16 bits in the AudioFormat,
                            // we need to convert our read byte[] to short[]
                            // (see source from FFmpegFrameRecorder.recordSamples for AV_SAMPLE_FMT_S16)
                            // Let's initialize our short[] array
                            int nSamplesRead = nBytesRead / 2;
                            short[] samples = new short[nSamplesRead];
                            // Let's wrap our short[] into a ShortBuffer and
                            // pass it to recordSamples
                            ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
                            ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
                            // recorder is instance of
                            // org.bytedeco.javacv.FFmpegFrameRecorder
                            recorder.recordSamples(sampleRate, numChannels, sBuff);
                        } catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
                            e.printStackTrace();
                        }
                    }
                }, 0, (long) 1000 / FRAME_RATE, TimeUnit.MILLISECONDS);
            } catch (LineUnavailableException e1) {
                e1.printStackTrace();
            }
        }
    }).start();
    // A really nice hardware accelerated component for our preview...
    final CanvasFrame cFrame = new CanvasFrame("Capture Preview", CanvasFrame.getDefaultGamma() / grabber.getGamma());
    Frame capturedFrame = null;
    // While we are capturing...
    while ((capturedFrame = grabber.grab()) != null) {
        if (cFrame.isVisible()) {
            // Show our frame in the preview
            cFrame.showImage(capturedFrame);
        }
        // as the delta from assignment to computed time could be too high
        if (startTime == 0)
            startTime = System.currentTimeMillis();
        // Create timestamp for this frame
        videoTS = 1000 * (System.currentTimeMillis() - startTime);
        // Check for AV drift
        if (videoTS > recorder.getTimestamp()) {
            System.out.println("Lip-flap correction: " + videoTS + " : " + recorder.getTimestamp() + " -> " + (videoTS - recorder.getTimestamp()));
            // We tell the recorder to write this frame at this timestamp
            recorder.setTimestamp(videoTS);
        }
        // Send the frame to the org.bytedeco.javacv.FFmpegFrameRecorder
        recorder.record(capturedFrame);
    }
    cFrame.dispose();
    recorder.stop();
    grabber.stop();
}
Also used : Frame(org.bytedeco.javacv.Frame) CanvasFrame(org.bytedeco.javacv.CanvasFrame) ScheduledThreadPoolExecutor(java.util.concurrent.ScheduledThreadPoolExecutor) Mixer(javax.sound.sampled.Mixer) TargetDataLine(javax.sound.sampled.TargetDataLine) DataLine(javax.sound.sampled.DataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) LineUnavailableException(javax.sound.sampled.LineUnavailableException) Exception(org.bytedeco.javacv.FrameRecorder.Exception) TargetDataLine(javax.sound.sampled.TargetDataLine) FFmpegFrameRecorder(org.bytedeco.javacv.FFmpegFrameRecorder) AudioFormat(javax.sound.sampled.AudioFormat) ShortBuffer(java.nio.ShortBuffer) CanvasFrame(org.bytedeco.javacv.CanvasFrame)

Example 4 with OpenCVFrameGrabber

use of org.bytedeco.javacv.OpenCVFrameGrabber in project javacv by bytedeco.

the class DeinterlacedVideoPlayer method startFrameGrabber.

private void startFrameGrabber() throws Exception {
    grabber = new OpenCVFrameGrabber(DEVICE_ID);
    grabber.setImageWidth(WIDTH);
    grabber.setImageHeight(HEIGHT);
    grabber.setFrameRate(FRAMERATE);
    grabber.setPixelFormat(PIXEL_FORMAT);
    grabber.start();
}
Also used : OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber)

Aggregations

OpenCVFrameGrabber (org.bytedeco.javacv.OpenCVFrameGrabber)4 Frame (org.bytedeco.javacv.Frame)2 File (java.io.File)1 ShortBuffer (java.nio.ShortBuffer)1 ScheduledThreadPoolExecutor (java.util.concurrent.ScheduledThreadPoolExecutor)1 AudioFormat (javax.sound.sampled.AudioFormat)1 DataLine (javax.sound.sampled.DataLine)1 LineUnavailableException (javax.sound.sampled.LineUnavailableException)1 Mixer (javax.sound.sampled.Mixer)1 TargetDataLine (javax.sound.sampled.TargetDataLine)1 DoublePointer (org.bytedeco.javacpp.DoublePointer)1 IntPointer (org.bytedeco.javacpp.IntPointer)1 CanvasFrame (org.bytedeco.javacv.CanvasFrame)1 FFmpegFrameRecorder (org.bytedeco.javacv.FFmpegFrameRecorder)1 Exception (org.bytedeco.javacv.FrameGrabber.Exception)1 Exception (org.bytedeco.javacv.FrameRecorder.Exception)1 OpenCVFrameConverter (org.bytedeco.javacv.OpenCVFrameConverter)1