Search in sources :

Example 6 with Frame

use of org.bytedeco.javacv.Frame in project javacv by bytedeco.

the class FFmpegStreamingTimeout method rtspStreamingTest.

private static void rtspStreamingTest() {
    try {
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(SOURCE_RTSP);
        /**
         * "rw_timeout" - IS IGNORED when a network cable have been
         * unplugged before a connection but the option takes effect after a
         * connection was established.
         *
         * "timeout" - works fine.
         */
        grabber.setOption(TimeoutOption.TIMEOUT.getKey(), String.valueOf(TIMEOUT * 1000000));
        // In microseconds.
        grabber.start();
        Frame frame = null;
        /**
         * When network is disabled (before grabber was started) grabber
         * throws exception: "org.bytedeco.javacv.FrameGrabber$Exception:
         * avformat_open_input() error -138: Could not open input...".
         *
         * When connections is lost (after a few grabbed frames)
         * grabber.grab() returns null without exception.
         */
        while ((frame = grabber.grab()) != null) {
            System.out.println("frame grabbed at " + grabber.getTimestamp());
        }
        System.out.println("loop end with frame: " + frame);
    } catch (FrameGrabber.Exception ex) {
        System.out.println("exception: " + ex);
    }
    System.out.println("end");
}
Also used : FFmpegFrameGrabber(org.bytedeco.javacv.FFmpegFrameGrabber) Frame(org.bytedeco.javacv.Frame) FrameGrabber(org.bytedeco.javacv.FrameGrabber) FFmpegFrameGrabber(org.bytedeco.javacv.FFmpegFrameGrabber)

Example 7 with Frame

use of org.bytedeco.javacv.Frame in project javacv by bytedeco.

the class FFmpegStreamingTimeout method testWithCallback.

private static void testWithCallback() {
    try {
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(SOURCE_RTSP);
        /**
         * grabber.getFormatContext() is null before grabber.start().
         *
         * But if network is disabled grabber.start() will never return.
         *
         * That's why interrupt_callback not suitable for "network disabled
         * case".
         */
        grabber.start();
        final AtomicBoolean interruptFlag = new AtomicBoolean(false);
        AVIOInterruptCB.Callback_Pointer cp = new AVIOInterruptCB.Callback_Pointer() {

            @Override
            public int call(Pointer pointer) {
                // 0 - continue, 1 - exit
                int interruptFlagInt = interruptFlag.get() ? 1 : 0;
                System.out.println("callback, interrupt flag == " + interruptFlagInt);
                return interruptFlagInt;
            }
        };
        AVFormatContext oc = grabber.getFormatContext();
        avformat_alloc_context();
        AVIOInterruptCB cb = new AVIOInterruptCB();
        cb.callback(cp);
        oc.interrupt_callback(cb);
        new Thread(new Runnable() {

            public void run() {
                try {
                    TimeUnit.SECONDS.sleep(TIMEOUT);
                    interruptFlag.set(true);
                    System.out.println("interrupt flag was changed");
                } catch (InterruptedException ex) {
                    System.out.println("exception in interruption thread: " + ex);
                }
            }
        }).start();
        Frame frame = null;
        /**
         * On one of my RTSP cams grabber stops calling callback on
         * connection lost. I think it's has something to do with message:
         * "[swscaler @ 0000000029af49e0] deprecated pixel format used, make
         * sure you did set range correctly".
         *
         * So there is at least one case when grabber stops calling
         * callback.
         */
        while ((frame = grabber.grab()) != null) {
            System.out.println("frame grabbed at " + grabber.getTimestamp());
        }
        System.out.println("loop end with frame: " + frame);
    } catch (FrameGrabber.Exception ex) {
        System.out.println("exception: " + ex);
    }
    System.out.println("end");
}
Also used : Frame(org.bytedeco.javacv.Frame) Pointer(org.bytedeco.javacpp.Pointer) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) FFmpegFrameGrabber(org.bytedeco.javacv.FFmpegFrameGrabber) FrameGrabber(org.bytedeco.javacv.FrameGrabber) FFmpegFrameGrabber(org.bytedeco.javacv.FFmpegFrameGrabber)

Example 8 with Frame

use of org.bytedeco.javacv.Frame in project javacv by bytedeco.

the class FaceRecognizerInVideo method main.

public static void main(String[] args) throws Exception {
    OpenCVFrameConverter.ToMat converterToMat = new OpenCVFrameConverter.ToMat();
    if (args.length < 2) {
        System.out.println("Two parameters are required to run this program, first parameter is the analized video and second parameter is the trained result for fisher faces.");
    }
    String videoFileName = args[0];
    String trainedResult = args[1];
    CascadeClassifier face_cascade = new CascadeClassifier("data\\haarcascade_frontalface_default.xml");
    FaceRecognizer lbphFaceRecognizer = LBPHFaceRecognizer.create();
    lbphFaceRecognizer.read(trainedResult);
    File f = new File(videoFileName);
    OpenCVFrameGrabber grabber = null;
    try {
        grabber = OpenCVFrameGrabber.createDefault(f);
        grabber.start();
    } catch (Exception e) {
        System.err.println("Failed start the grabber.");
    }
    Frame videoFrame = null;
    Mat videoMat = new Mat();
    while (true) {
        videoFrame = grabber.grab();
        videoMat = converterToMat.convert(videoFrame);
        Mat videoMatGray = new Mat();
        // Convert the current frame to grayscale:
        cvtColor(videoMat, videoMatGray, COLOR_BGRA2GRAY);
        equalizeHist(videoMatGray, videoMatGray);
        Point p = new Point();
        RectVector faces = new RectVector();
        // Find the faces in the frame:
        face_cascade.detectMultiScale(videoMatGray, faces);
        // annotate it in the video. Cool or what?
        for (int i = 0; i < faces.size(); i++) {
            Rect face_i = faces.get(i);
            Mat face = new Mat(videoMatGray, face_i);
            // If fisher face recognizer is used, the face need to be
            // resized.
            // resize(face, face_resized, new Size(im_width, im_height),
            // 1.0, 1.0, INTER_CUBIC);
            // Now perform the prediction, see how easy that is:
            IntPointer label = new IntPointer(1);
            DoublePointer confidence = new DoublePointer(1);
            lbphFaceRecognizer.predict(face, label, confidence);
            int prediction = label.get(0);
            // And finally write all we've found out to the original image!
            // First of all draw a green rectangle around the detected face:
            rectangle(videoMat, face_i, new Scalar(0, 255, 0, 1));
            // Create the text we will annotate the box with:
            String box_text = "Prediction = " + prediction;
            // Calculate the position for annotated text (make sure we don't
            // put illegal values in there):
            int pos_x = Math.max(face_i.tl().x() - 10, 0);
            int pos_y = Math.max(face_i.tl().y() - 10, 0);
            // And now put it into the image:
            putText(videoMat, box_text, new Point(pos_x, pos_y), FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0, 2.0));
        }
        // Show the result:
        imshow("face_recognizer", videoMat);
        char key = (char) waitKey(20);
        // Exit this loop on escape:
        if (key == 27) {
            destroyAllWindows();
            break;
        }
    }
}
Also used : Frame(org.bytedeco.javacv.Frame) DoublePointer(org.bytedeco.javacpp.DoublePointer) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) Exception(org.bytedeco.javacv.FrameGrabber.Exception) IntPointer(org.bytedeco.javacpp.IntPointer) OpenCVFrameConverter(org.bytedeco.javacv.OpenCVFrameConverter) File(java.io.File)

Example 9 with Frame

use of org.bytedeco.javacv.Frame in project javacv by bytedeco.

the class RecordActivity method initRecorder.

// ---------------------------------------
// initialize ffmpeg_recorder
// ---------------------------------------
private void initRecorder() {
    Log.w(LOG_TAG, "init recorder");
    Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
    recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
    recorder.setFormat("flv");
    recorder.setSampleRate(sampleAudioRateInHz);
    // Set in the surface changed method
    recorder.setFrameRate(frameRate);
    // The filterString  is any ffmpeg filter.
    // Here is the link for a list: https://ffmpeg.org/ffmpeg-filters.html
    filterString = "transpose=2,crop=w=200:h=200:x=0:y=0";
    filter = new FFmpegFrameFilter(filterString, imageWidth, imageHeight);
    // default format on android
    filter.setPixelFormat(avutil.AV_PIX_FMT_NV21);
    if (RECORD_LENGTH > 0) {
        imagesIndex = 0;
        images = new Frame[RECORD_LENGTH * frameRate];
        timestamps = new long[images.length];
        for (int i = 0; i < images.length; i++) {
            images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
            timestamps[i] = -1;
        }
    } else if (yuvImage == null) {
        yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
        Log.i(LOG_TAG, "create yuvImage");
    }
    Log.i(LOG_TAG, "recorder initialize success");
    audioRecordRunnable = new AudioRecordRunnable();
    audioThread = new Thread(audioRecordRunnable);
    runAudioThread = true;
}
Also used : Frame(org.bytedeco.javacv.Frame) FFmpegFrameFilter(org.bytedeco.javacv.FFmpegFrameFilter) FFmpegFrameRecorder(org.bytedeco.javacv.FFmpegFrameRecorder)

Example 10 with Frame

use of org.bytedeco.javacv.Frame in project javacv by bytedeco.

the class WebcamAndMicrophoneCapture method main.

public static void main(String[] args) throws Exception, org.bytedeco.javacv.FrameGrabber.Exception {
    final int captureWidth = 1280;
    final int captureHeight = 720;
    // The available FrameGrabber classes include OpenCVFrameGrabber (opencv_videoio),
    // DC1394FrameGrabber, FlyCapture2FrameGrabber, OpenKinectFrameGrabber,
    // PS3EyeFrameGrabber, VideoInputFrameGrabber, and FFmpegFrameGrabber.
    final OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(WEBCAM_DEVICE_INDEX);
    grabber.setImageWidth(captureWidth);
    grabber.setImageHeight(captureHeight);
    grabber.start();
    // org.bytedeco.javacv.FFmpegFrameRecorder.FFmpegFrameRecorder(String
    // filename, int imageWidth, int imageHeight, int audioChannels)
    // For each param, we're passing in...
    // filename = either a path to a local file we wish to create, or an
    // RTMP url to an FMS / Wowza server
    // imageWidth = width we specified for the grabber
    // imageHeight = height we specified for the grabber
    // audioChannels = 2, because we like stereo
    final FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("rtmp://my-streaming-server/app_name_here/instance_name/stream_name", captureWidth, captureHeight, 2);
    recorder.setInterleaved(true);
    // decrease "startup" latency in FFMPEG (see:
    // https://trac.ffmpeg.org/wiki/StreamingGuide)
    recorder.setVideoOption("tune", "zerolatency");
    // tradeoff between quality and encode speed
    // possible values are ultrafast,superfast, veryfast, faster, fast,
    // medium, slow, slower, veryslow
    // ultrafast offers us the least amount of compression (lower encoder
    // CPU) at the cost of a larger stream size
    // at the other end, veryslow provides the best compression (high
    // encoder CPU) while lowering the stream size
    // (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("preset", "ultrafast");
    // Constant Rate Factor (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("crf", "28");
    // 2000 kb/s, reasonable "sane" area for 720
    recorder.setVideoBitrate(2000000);
    recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
    recorder.setFormat("flv");
    // FPS (frames per second)
    recorder.setFrameRate(FRAME_RATE);
    // Key frame interval, in our case every 2 seconds -> 30 (fps) * 2 = 60
    // (gop length)
    recorder.setGopSize(GOP_LENGTH_IN_FRAMES);
    // We don't want variable bitrate audio
    recorder.setAudioOption("crf", "0");
    // Highest quality
    recorder.setAudioQuality(0);
    // 192 Kbps
    recorder.setAudioBitrate(192000);
    recorder.setSampleRate(44100);
    recorder.setAudioChannels(2);
    recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
    // Jack 'n coke... do it...
    recorder.start();
    // Thread for audio capture, this could be in a nested private class if you prefer...
    new Thread(new Runnable() {

        @Override
        public void run() {
            // Pick a format...
            // NOTE: It is better to enumerate the formats that the system supports,
            // because getLine() can error out with any particular format...
            // For us: 44.1 sample rate, 16 bits, stereo, signed, little endian
            AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
            // Get TargetDataLine with that format
            Mixer.Info[] minfoSet = AudioSystem.getMixerInfo();
            Mixer mixer = AudioSystem.getMixer(minfoSet[AUDIO_DEVICE_INDEX]);
            DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
            try {
                // Open and start capturing audio
                // It's possible to have more control over the chosen audio device with this line:
                // TargetDataLine line = (TargetDataLine)mixer.getLine(dataLineInfo);
                final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
                line.open(audioFormat);
                line.start();
                final int sampleRate = (int) audioFormat.getSampleRate();
                final int numChannels = audioFormat.getChannels();
                // Let's initialize our audio buffer...
                final int audioBufferSize = sampleRate * numChannels;
                final byte[] audioBytes = new byte[audioBufferSize];
                // Using a ScheduledThreadPoolExecutor vs a while loop with
                // a Thread.sleep will allow
                // us to get around some OS specific timing issues, and keep
                // to a more precise
                // clock as the fixed rate accounts for garbage collection
                // time, etc
                // a similar approach could be used for the webcam capture
                // as well, if you wish
                ScheduledThreadPoolExecutor exec = new ScheduledThreadPoolExecutor(1);
                exec.scheduleAtFixedRate(new Runnable() {

                    @Override
                    public void run() {
                        try {
                            // Read from the line... non-blocking
                            int nBytesRead = 0;
                            while (nBytesRead == 0) {
                                nBytesRead = line.read(audioBytes, 0, line.available());
                            }
                            // Since we specified 16 bits in the AudioFormat,
                            // we need to convert our read byte[] to short[]
                            // (see source from FFmpegFrameRecorder.recordSamples for AV_SAMPLE_FMT_S16)
                            // Let's initialize our short[] array
                            int nSamplesRead = nBytesRead / 2;
                            short[] samples = new short[nSamplesRead];
                            // Let's wrap our short[] into a ShortBuffer and
                            // pass it to recordSamples
                            ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
                            ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
                            // recorder is instance of
                            // org.bytedeco.javacv.FFmpegFrameRecorder
                            recorder.recordSamples(sampleRate, numChannels, sBuff);
                        } catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
                            e.printStackTrace();
                        }
                    }
                }, 0, (long) 1000 / FRAME_RATE, TimeUnit.MILLISECONDS);
            } catch (LineUnavailableException e1) {
                e1.printStackTrace();
            }
        }
    }).start();
    // A really nice hardware accelerated component for our preview...
    final CanvasFrame cFrame = new CanvasFrame("Capture Preview", CanvasFrame.getDefaultGamma() / grabber.getGamma());
    Frame capturedFrame = null;
    // While we are capturing...
    while ((capturedFrame = grabber.grab()) != null) {
        if (cFrame.isVisible()) {
            // Show our frame in the preview
            cFrame.showImage(capturedFrame);
        }
        // as the delta from assignment to computed time could be too high
        if (startTime == 0)
            startTime = System.currentTimeMillis();
        // Create timestamp for this frame
        videoTS = 1000 * (System.currentTimeMillis() - startTime);
        // Check for AV drift
        if (videoTS > recorder.getTimestamp()) {
            System.out.println("Lip-flap correction: " + videoTS + " : " + recorder.getTimestamp() + " -> " + (videoTS - recorder.getTimestamp()));
            // We tell the recorder to write this frame at this timestamp
            recorder.setTimestamp(videoTS);
        }
        // Send the frame to the org.bytedeco.javacv.FFmpegFrameRecorder
        recorder.record(capturedFrame);
    }
    cFrame.dispose();
    recorder.stop();
    grabber.stop();
}
Also used : Frame(org.bytedeco.javacv.Frame) CanvasFrame(org.bytedeco.javacv.CanvasFrame) ScheduledThreadPoolExecutor(java.util.concurrent.ScheduledThreadPoolExecutor) Mixer(javax.sound.sampled.Mixer) TargetDataLine(javax.sound.sampled.TargetDataLine) DataLine(javax.sound.sampled.DataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) LineUnavailableException(javax.sound.sampled.LineUnavailableException) Exception(org.bytedeco.javacv.FrameRecorder.Exception) TargetDataLine(javax.sound.sampled.TargetDataLine) FFmpegFrameRecorder(org.bytedeco.javacv.FFmpegFrameRecorder) AudioFormat(javax.sound.sampled.AudioFormat) ShortBuffer(java.nio.ShortBuffer) CanvasFrame(org.bytedeco.javacv.CanvasFrame)

Aggregations

Frame (org.bytedeco.javacv.Frame)10 FFmpegFrameGrabber (org.bytedeco.javacv.FFmpegFrameGrabber)3 ShortBuffer (java.nio.ShortBuffer)2 AudioFormat (javax.sound.sampled.AudioFormat)2 DataLine (javax.sound.sampled.DataLine)2 CanvasFrame (org.bytedeco.javacv.CanvasFrame)2 FFmpegFrameFilter (org.bytedeco.javacv.FFmpegFrameFilter)2 FFmpegFrameRecorder (org.bytedeco.javacv.FFmpegFrameRecorder)2 FrameGrabber (org.bytedeco.javacv.FrameGrabber)2 Exception (org.bytedeco.javacv.FrameGrabber.Exception)2 OpenCVFrameGrabber (org.bytedeco.javacv.OpenCVFrameGrabber)2 AWTException (java.awt.AWTException)1 BasicStroke (java.awt.BasicStroke)1 Graphics2D (java.awt.Graphics2D)1 MouseAdapter (java.awt.event.MouseAdapter)1 MouseEvent (java.awt.event.MouseEvent)1 BufferedImage (java.awt.image.BufferedImage)1 File (java.io.File)1 IOException (java.io.IOException)1 ByteBuffer (java.nio.ByteBuffer)1