Search in sources :

Example 1 with PointerScope

use of org.bytedeco.javacpp.PointerScope in project javacv by bytedeco.

the class FrameGrabberTest method testFFmpegFrameGrabberLockingTest.

@Test
public void testFFmpegFrameGrabberLockingTest() {
    final boolean[] failed = { false };
    final int numberOfInstances = 20;
    System.out.println("FFmpegFrameGrabberLocking");
    Runnable[] runables = new Runnable[numberOfInstances];
    Thread[] threads = new Thread[numberOfInstances];
    final boolean[] finish = new boolean[numberOfInstances];
    for (int instance = 0; instance < numberOfInstances; instance++) {
        final int instance_final = instance;
        Runnable r = new Runnable() {

            public void run() {
                File tempFile = new File(Loader.getTempDir(), "test" + instance_final + ".mkv");
                try (PointerScope scope = new PointerScope()) {
                    FFmpegLogCallback.set();
                    FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(new FileOutputStream(tempFile), 640, 480, 2);
                    // mp4 doesn't support streaming
                    recorder.setFormat("matroska");
                    recorder.setPixelFormat(AV_PIX_FMT_BGR24);
                    recorder.setVideoCodecName("jpegls");
                    // lossless
                    recorder.setVideoQuality(0);
                    recorder.setSampleFormat(AV_SAMPLE_FMT_S16);
                    recorder.setSampleRate(44100);
                    recorder.setAudioCodecName("pcm_s16le");
                    recorder.startUnsafe();
                    Frame[] frames = new Frame[10];
                    for (int n = 0; n < frames.length; n++) {
                        Frame frame = new Frame(640, 480, Frame.DEPTH_UBYTE, 3);
                        UByteIndexer frameIdx = frame.createIndexer();
                        for (int i = 0; i < frameIdx.rows(); i++) {
                            for (int j = 0; j < frameIdx.cols(); j++) {
                                for (int k = 0; k < frameIdx.channels(); k++) {
                                    frameIdx.put(i, j, k, n + i + j + k);
                                }
                            }
                        }
                        recorder.record(frame);
                        frames[n] = frame;
                    }
                    Frame audioFrame = new Frame();
                    ShortBuffer audioBuffer = ShortBuffer.allocate(64 * 1024);
                    audioFrame.sampleRate = 44100;
                    audioFrame.audioChannels = 2;
                    audioFrame.samples = new ShortBuffer[] { audioBuffer };
                    for (int i = 0; i < audioBuffer.capacity(); i++) {
                        audioBuffer.put(i, (short) i);
                    }
                    recorder.record(audioFrame);
                    recorder.stop();
                    recorder.release();
                    Thread.sleep(1000);
                    FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new FileInputStream(tempFile));
                    grabber.setSampleMode(FrameGrabber.SampleMode.FLOAT);
                    grabber.startUnsafe();
                    int n = 0, m = 0;
                    Frame frame2;
                    while ((frame2 = grabber.grab()) != null) {
                        if (frame2.image != null) {
                            Frame frame = frames[n++];
                            assertEquals(frame.imageWidth, frame2.imageWidth);
                            assertEquals(frame.imageHeight, frame2.imageHeight);
                            assertEquals(frame.imageChannels, frame2.imageChannels);
                            UByteIndexer frameIdx = frame.createIndexer();
                            UByteIndexer frame2Idx = frame2.createIndexer();
                            for (int i = 0; i < frameIdx.rows(); i++) {
                                for (int j = 0; j < frameIdx.cols(); j++) {
                                    for (int k = 0; k < frameIdx.channels(); k++) {
                                        int b = frameIdx.get(i, j, k);
                                        assertEquals(b, frame2Idx.get(i, j, k));
                                    }
                                }
                            }
                        } else {
                            FloatBuffer audioBuffer2 = (FloatBuffer) frame2.samples[0];
                            while (audioBuffer2.hasRemaining()) {
                                assertEquals((float) audioBuffer.get(m++) / (Short.MAX_VALUE + 1), audioBuffer2.get(), 0);
                            }
                        }
                    }
                    assertEquals(frames.length, n);
                    assertEquals(null, grabber.grab());
                    grabber.restart();
                    grabber.stop();
                    grabber.release();
                    for (n = 0; n < frames.length; n++) {
                        frames[n].close();
                    }
                } catch (Error | Exception e) {
                    failed[0] = true;
                    e.printStackTrace();
                    fail("Exception should not have been thrown: " + e);
                } finally {
                    tempFile.delete();
                    finish[instance_final] = true;
                }
            }
        };
        runables[instance_final] = r;
    }
    for (int instance = 0; instance < numberOfInstances; instance++) {
        threads[instance] = new Thread(runables[instance]);
        threads[instance].setName("Testthread-" + instance);
    }
    for (int instance = 0; instance < numberOfInstances; instance++) {
        threads[instance].start();
    }
    while (true) {
        boolean finished = true;
        for (int instance = 0; instance < numberOfInstances; instance++) {
            if (!finish[instance]) {
                finished = false;
                break;
            }
        }
        if (!finished) {
            System.out.println("Still waiting...");
            try {
                Thread.sleep(500);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        } else {
            break;
        }
    }
    assertFalse(failed[0]);
}
Also used : FloatBuffer(java.nio.FloatBuffer) UByteIndexer(org.bytedeco.javacpp.indexer.UByteIndexer) PointerScope(org.bytedeco.javacpp.PointerScope) FileInputStream(java.io.FileInputStream) IOException(java.io.IOException) FileOutputStream(java.io.FileOutputStream) File(java.io.File) ShortBuffer(java.nio.ShortBuffer) Test(org.junit.Test)

Example 2 with PointerScope

use of org.bytedeco.javacpp.PointerScope in project javacv by bytedeco.

the class FFmpegFrameGrabber method grabFrame.

public synchronized Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (oc == null || oc.isNull()) {
            throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)");
        } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null) && !doData) {
            return null;
        }
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        boolean videoFrameGrabbed = frameGrabbed && frame.image != null;
        boolean audioFrameGrabbed = frameGrabbed && frame.samples != null;
        boolean dataFrameGrabbed = frameGrabbed && frame.data != null;
        frameGrabbed = false;
        if (doVideo && videoFrameGrabbed) {
            if (doProcessing) {
                processImage();
            }
            frame.keyFrame = picture.key_frame() != 0;
            return frame;
        } else if (doAudio && audioFrameGrabbed) {
            if (doProcessing) {
                processSamples();
            }
            frame.keyFrame = samples_frame.key_frame() != 0;
            return frame;
        } else if (doData && dataFrameGrabbed) {
            return frame;
        }
        frame.keyFrame = false;
        frame.imageWidth = 0;
        frame.imageHeight = 0;
        frame.imageDepth = 0;
        frame.imageChannels = 0;
        frame.imageStride = 0;
        frame.image = null;
        frame.sampleRate = 0;
        frame.audioChannels = 0;
        frame.samples = null;
        frame.data = null;
        frame.opaque = null;
        frame.type = null;
        boolean done = false;
        boolean readPacket = pkt.stream_index() == -1;
        while (!done) {
            int ret = 0;
            if (readPacket) {
                if (pkt.stream_index() != -1) {
                    // Free the packet that was allocated by av_read_frame
                    av_packet_unref(pkt);
                }
                if ((ret = av_read_frame(oc, pkt)) < 0) {
                    if (doVideo && video_st != null) {
                        // The video codec may have buffered some frames
                        pkt.stream_index(video_st.index());
                        pkt.flags(AV_PKT_FLAG_KEY);
                        pkt.data(null);
                        pkt.size(0);
                    } else {
                        pkt.stream_index(-1);
                        return null;
                    }
                }
            }
            frame.streamIndex = pkt.stream_index();
            // Is this a packet from the video stream?
            if (doVideo && video_st != null && frame.streamIndex == video_st.index() && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) {
                // Decode video frame
                if (readPacket) {
                    ret = avcodec_send_packet(video_c, pkt);
                    if (pkt.data() == null && pkt.size() == 0) {
                        pkt.stream_index(-1);
                    }
                    if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                    // The video codec may have buffered some frames
                    } else if (ret < 0) {
                    // Ignore errors to emulate the behavior of the old API
                    // throw new Exception("avcodec_send_packet() error " + ret + ": Error sending a video packet for decoding.");
                    }
                }
                // Did we get a video frame?
                while (!done) {
                    ret = avcodec_receive_frame(video_c, picture);
                    if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                        if (pkt.data() == null && pkt.size() == 0) {
                            return null;
                        } else {
                            readPacket = true;
                            break;
                        }
                    } else if (ret < 0) {
                        // Ignore errors to emulate the behavior of the old API
                        // throw new Exception("avcodec_receive_frame() error " + ret + ": Error during video decoding.");
                        readPacket = true;
                        break;
                    }
                    if (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I) {
                        long pts = picture.best_effort_timestamp();
                        AVRational time_base = video_st.time_base();
                        timestamp = 1000000L * pts * time_base.num() / time_base.den();
                        // best guess, AVCodecContext.frame_number = number of decoded frames...
                        frameNumber = (int) Math.floor(timestamp * getFrameRate() / 1000000L);
                        frame.image = image_buf;
                        if (doProcessing) {
                            processImage();
                        }
                        /* the picture is allocated by the decoder. no need to
                           free it */
                        done = true;
                        frame.timestamp = timestamp;
                        frame.keyFrame = picture.key_frame() != 0;
                        frame.pictType = (char) av_get_picture_type_char(picture.pict_type());
                        frame.type = Frame.Type.VIDEO;
                    }
                }
            } else if (doAudio && audio_st != null && frame.streamIndex == audio_st.index()) {
                // Decode audio frame
                if (readPacket) {
                    ret = avcodec_send_packet(audio_c, pkt);
                    if (ret < 0) {
                    // Ignore errors to emulate the behavior of the old API
                    // throw new Exception("avcodec_send_packet() error " + ret + ": Error sending an audio packet for decoding.");
                    }
                }
                // Did we get an audio frame?
                while (!done) {
                    ret = avcodec_receive_frame(audio_c, samples_frame);
                    if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                        readPacket = true;
                        break;
                    } else if (ret < 0) {
                        // Ignore errors to emulate the behavior of the old API
                        // throw new Exception("avcodec_receive_frame() error " + ret + ": Error during audio decoding.");
                        readPacket = true;
                        break;
                    }
                    long pts = samples_frame.best_effort_timestamp();
                    AVRational time_base = audio_st.time_base();
                    timestamp = 1000000L * pts * time_base.num() / time_base.den();
                    frame.samples = samples_buf;
                    /* if a frame has been decoded, output it */
                    if (doProcessing) {
                        processSamples();
                    }
                    done = true;
                    frame.timestamp = timestamp;
                    frame.keyFrame = samples_frame.key_frame() != 0;
                    frame.type = Frame.Type.AUDIO;
                }
            } else if (readPacket && doData && frame.streamIndex > -1 && frame.streamIndex < streams.length && streams[frame.streamIndex] != AVMEDIA_TYPE_VIDEO && streams[frame.streamIndex] != AVMEDIA_TYPE_AUDIO) {
                // Export the stream byte data for non audio / video frames
                frame.data = pkt.data().position(0).capacity(pkt.size()).asByteBuffer();
                frame.opaque = pkt;
                done = true;
                switch(streams[frame.streamIndex]) {
                    case AVMEDIA_TYPE_DATA:
                        frame.type = Frame.Type.DATA;
                        break;
                    case AVMEDIA_TYPE_SUBTITLE:
                        frame.type = Frame.Type.SUBTITLE;
                        break;
                    case AVMEDIA_TYPE_ATTACHMENT:
                        frame.type = Frame.Type.ATTACHMENT;
                        break;
                    default:
                        frame.type = null;
                }
            } else {
                // Current packet is not needed (different stream index required)
                readPacket = true;
            }
        }
        return frame;
    }
}
Also used : PointerScope(org.bytedeco.javacpp.PointerScope) IOException(java.io.IOException)

Example 3 with PointerScope

use of org.bytedeco.javacpp.PointerScope in project javacv by bytedeco.

the class FFmpegFrameGrabber method startUnsafe.

public synchronized void startUnsafe(boolean findStreamInfo) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (oc != null && !oc.isNull()) {
            throw new Exception("start() has already been called: Call stop() before calling start() again.");
        }
        int ret;
        img_convert_ctx = null;
        oc = new AVFormatContext(null);
        video_c = null;
        audio_c = null;
        plane_ptr = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
        plane_ptr2 = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
        pkt = new AVPacket().retainReference();
        frameGrabbed = false;
        frame = new Frame();
        timestamp = 0;
        frameNumber = 0;
        pkt.stream_index(-1);
        // Open video file
        AVInputFormat f = null;
        if (format != null && format.length() > 0) {
            if ((f = av_find_input_format(format)) == null) {
                throw new Exception("av_find_input_format() error: Could not find input format \"" + format + "\".");
            }
        }
        AVDictionary options = new AVDictionary(null);
        if (frameRate > 0) {
            AVRational r = av_d2q(frameRate, 1001000);
            av_dict_set(options, "framerate", r.num() + "/" + r.den(), 0);
        }
        if (pixelFormat >= 0) {
            av_dict_set(options, "pixel_format", av_get_pix_fmt_name(pixelFormat).getString(), 0);
        } else if (imageMode != ImageMode.RAW) {
            av_dict_set(options, "pixel_format", imageMode == ImageMode.COLOR ? "bgr24" : "gray8", 0);
        }
        if (imageWidth > 0 && imageHeight > 0) {
            av_dict_set(options, "video_size", imageWidth + "x" + imageHeight, 0);
        }
        if (sampleRate > 0) {
            av_dict_set(options, "sample_rate", "" + sampleRate, 0);
        }
        if (audioChannels > 0) {
            av_dict_set(options, "channels", "" + audioChannels, 0);
        }
        for (Entry<String, String> e : this.options.entrySet()) {
            av_dict_set(options, e.getKey(), e.getValue(), 0);
        }
        if (inputStream != null) {
            if (!inputStream.markSupported()) {
                inputStream = new BufferedInputStream(inputStream);
            }
            inputStream.mark(maximumSize);
            oc = avformat_alloc_context();
            avio = avio_alloc_context(new BytePointer(av_malloc(4096)), 4096, 0, oc, readCallback, null, maximumSize > 0 ? seekCallback : null);
            oc.pb(avio);
            filename = inputStream.toString();
            inputStreams.put(oc, inputStream);
        }
        if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
            av_dict_set(options, "pixel_format", null, 0);
            if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
                throw new Exception("avformat_open_input() error " + ret + ": Could not open input \"" + filename + "\". (Has setFormat() been called?)");
            }
        }
        av_dict_free(options);
        oc.max_delay(maxDelay);
        // Retrieve stream information, if desired
        if (findStreamInfo && (ret = avformat_find_stream_info(oc, (PointerPointer) null)) < 0) {
            throw new Exception("avformat_find_stream_info() error " + ret + ": Could not find stream information.");
        }
        if (av_log_get_level() >= AV_LOG_INFO) {
            // Dump information about file onto standard error
            av_dump_format(oc, 0, filename, 0);
        }
        // Find the first video and audio stream, unless the user specified otherwise
        video_st = audio_st = null;
        AVCodecParameters video_par = null, audio_par = null;
        int nb_streams = oc.nb_streams();
        streams = new int[nb_streams];
        for (int i = 0; i < nb_streams; i++) {
            AVStream st = oc.streams(i);
            // Get a pointer to the codec context for the video or audio stream
            AVCodecParameters par = st.codecpar();
            streams[i] = par.codec_type();
            if (video_st == null && par.codec_type() == AVMEDIA_TYPE_VIDEO && (videoStream < 0 || videoStream == i)) {
                video_st = st;
                video_par = par;
                videoStream = i;
            } else if (audio_st == null && par.codec_type() == AVMEDIA_TYPE_AUDIO && (audioStream < 0 || audioStream == i)) {
                audio_st = st;
                audio_par = par;
                audioStream = i;
            }
        }
        if (video_st == null && audio_st == null) {
            throw new Exception("Did not find a video or audio stream inside \"" + filename + "\" for videoStream == " + videoStream + " and audioStream == " + audioStream + ".");
        }
        if (video_st != null) {
            // Find the decoder for the video stream
            AVCodec codec = avcodec_find_decoder_by_name(videoCodecName);
            if (codec == null) {
                codec = avcodec_find_decoder(video_par.codec_id());
            }
            if (codec == null) {
                throw new Exception("avcodec_find_decoder() error: Unsupported video format or codec not found: " + video_par.codec_id() + ".");
            }
            /* Allocate a codec context for the decoder */
            if ((video_c = avcodec_alloc_context3(codec)) == null) {
                throw new Exception("avcodec_alloc_context3() error: Could not allocate video decoding context.");
            }
            /* copy the stream parameters from the muxer */
            if ((ret = avcodec_parameters_to_context(video_c, video_st.codecpar())) < 0) {
                releaseUnsafe();
                throw new Exception("avcodec_parameters_to_context() error " + ret + ": Could not copy the video stream parameters.");
            }
            options = new AVDictionary(null);
            for (Entry<String, String> e : videoOptions.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            // Enable multithreading when available
            video_c.thread_count(0);
            // Open video codec
            if ((ret = avcodec_open2(video_c, codec, options)) < 0) {
                throw new Exception("avcodec_open2() error " + ret + ": Could not open video codec.");
            }
            av_dict_free(options);
            // Hack to correct wrong frame rates that seem to be generated by some codecs
            if (video_c.time_base().num() > 1000 && video_c.time_base().den() == 1) {
                video_c.time_base().den(1000);
            }
            // Allocate video frame and an AVFrame structure for the RGB image
            if ((picture = av_frame_alloc()) == null) {
                throw new Exception("av_frame_alloc() error: Could not allocate raw picture frame.");
            }
            if ((picture_rgb = av_frame_alloc()) == null) {
                throw new Exception("av_frame_alloc() error: Could not allocate RGB picture frame.");
            }
            initPictureRGB();
        }
        if (audio_st != null) {
            // Find the decoder for the audio stream
            AVCodec codec = avcodec_find_decoder_by_name(audioCodecName);
            if (codec == null) {
                codec = avcodec_find_decoder(audio_par.codec_id());
            }
            if (codec == null) {
                throw new Exception("avcodec_find_decoder() error: Unsupported audio format or codec not found: " + audio_par.codec_id() + ".");
            }
            /* Allocate a codec context for the decoder */
            if ((audio_c = avcodec_alloc_context3(codec)) == null) {
                throw new Exception("avcodec_alloc_context3() error: Could not allocate audio decoding context.");
            }
            /* copy the stream parameters from the muxer */
            if ((ret = avcodec_parameters_to_context(audio_c, audio_st.codecpar())) < 0) {
                releaseUnsafe();
                throw new Exception("avcodec_parameters_to_context() error " + ret + ": Could not copy the audio stream parameters.");
            }
            options = new AVDictionary(null);
            for (Entry<String, String> e : audioOptions.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            // Enable multithreading when available
            audio_c.thread_count(0);
            // Open audio codec
            if ((ret = avcodec_open2(audio_c, codec, options)) < 0) {
                throw new Exception("avcodec_open2() error " + ret + ": Could not open audio codec.");
            }
            av_dict_free(options);
            // Allocate audio samples frame
            if ((samples_frame = av_frame_alloc()) == null) {
                throw new Exception("av_frame_alloc() error: Could not allocate audio frame.");
            }
            samples_ptr = new BytePointer[] { null };
            samples_buf = new Buffer[] { null };
        }
        started = true;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) PointerScope(org.bytedeco.javacpp.PointerScope) IOException(java.io.IOException) BufferedInputStream(java.io.BufferedInputStream)

Example 4 with PointerScope

use of org.bytedeco.javacpp.PointerScope in project javacv by bytedeco.

the class FFmpegFrameRecorder method recordSamples.

public synchronized boolean recordSamples(int sampleRate, int audioChannels, Buffer... samples) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (audio_st == null) {
            throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
        }
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        if (samples == null && samples_out[0].position() > 0) {
            // Typically samples_out[0].limit() is double the audio_input_frame_size --> sampleDivisor = 2
            double sampleDivisor = Math.floor((int) Math.min(samples_out[0].limit(), Integer.MAX_VALUE) / audio_input_frame_size);
            writeSamples((int) Math.floor((int) samples_out[0].position() / sampleDivisor));
            return writeFrame((AVFrame) null);
        }
        int ret;
        if (sampleRate <= 0) {
            sampleRate = audio_c.sample_rate();
        }
        if (audioChannels <= 0) {
            audioChannels = audio_c.channels();
        }
        int inputSize = samples != null ? samples[0].limit() - samples[0].position() : 0;
        int inputFormat = samples_format;
        int inputChannels = samples != null && samples.length > 1 ? 1 : audioChannels;
        int inputDepth = 0;
        int outputFormat = audio_c.sample_fmt();
        int outputChannels = samples_out.length > 1 ? 1 : audio_c.channels();
        int outputDepth = av_get_bytes_per_sample(outputFormat);
        if (samples != null && samples[0] instanceof ByteBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
            inputDepth = 1;
            for (int i = 0; i < samples.length; i++) {
                ByteBuffer b = (ByteBuffer) samples[i];
                if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new BytePointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof ShortBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
            inputDepth = 2;
            for (int i = 0; i < samples.length; i++) {
                ShortBuffer b = (ShortBuffer) samples[i];
                if (samples_in[i] instanceof ShortPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((ShortPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new ShortPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof IntBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
            inputDepth = 4;
            for (int i = 0; i < samples.length; i++) {
                IntBuffer b = (IntBuffer) samples[i];
                if (samples_in[i] instanceof IntPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new IntPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof FloatBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
            inputDepth = 4;
            for (int i = 0; i < samples.length; i++) {
                FloatBuffer b = (FloatBuffer) samples[i];
                if (samples_in[i] instanceof FloatPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new FloatPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof DoubleBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
            inputDepth = 8;
            for (int i = 0; i < samples.length; i++) {
                DoubleBuffer b = (DoubleBuffer) samples[i];
                if (samples_in[i] instanceof DoublePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new DoublePointer(b).retainReference();
                }
            }
        } else if (samples != null) {
            throw new Exception("Audio samples Buffer has unsupported type: " + samples);
        }
        if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) {
            samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, audio_c.channel_layout(), outputFormat, audio_c.sample_rate(), av_get_default_channel_layout(audioChannels), inputFormat, sampleRate, 0, null);
            if (samples_convert_ctx == null) {
                throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
            } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
                throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
            }
            samples_channels = audioChannels;
            samples_format = inputFormat;
            samples_rate = sampleRate;
        }
        for (int i = 0; samples != null && i < samples.length; i++) {
            samples_in[i].position(samples_in[i].position() * inputDepth).limit((samples_in[i].position() + inputSize) * inputDepth);
        }
        while (true) {
            int inputCount = (int) Math.min(samples != null ? (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth) : 0, Integer.MAX_VALUE);
            int outputCount = (int) Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE);
            inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate());
            for (int i = 0; samples != null && i < samples.length; i++) {
                plane_ptr.put(i, samples_in[i]);
            }
            for (int i = 0; i < samples_out.length; i++) {
                plane_ptr2.put(i, samples_out[i]);
            }
            if ((ret = swr_convert(samples_convert_ctx, plane_ptr2, outputCount, plane_ptr, inputCount)) < 0) {
                throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
            } else if (ret == 0) {
                break;
            }
            for (int i = 0; samples != null && i < samples.length; i++) {
                samples_in[i].position(samples_in[i].position() + inputCount * inputChannels * inputDepth);
            }
            for (int i = 0; i < samples_out.length; i++) {
                samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
            }
            if (samples == null || samples_out[0].position() >= samples_out[0].limit()) {
                writeSamples(audio_input_frame_size);
            }
        }
        return samples != null ? frame.key_frame() != 0 : writeFrame((AVFrame) null);
    }
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException) ShortPointer(org.bytedeco.javacpp.ShortPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Example 5 with PointerScope

use of org.bytedeco.javacpp.PointerScope in project javacv by bytedeco.

the class FFmpegFrameFilter method pushSamples.

public synchronized void pushSamples(int n, int audioChannels, int sampleRate, int sampleFormat, Buffer... samples) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        int ret;
        Pointer[] data = new Pointer[samples.length];
        int sampleSize = samples != null ? ((samples[0].limit() - samples[0].position()) / (samples.length > 1 ? 1 : audioChannels)) : 0;
        if (samples != null && samples[0] instanceof ByteBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
            for (int i = 0; i < data.length; i++) {
                data[i] = new BytePointer((ByteBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof ShortBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
            for (int i = 0; i < data.length; i++) {
                data[i] = new ShortPointer((ShortBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof IntBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
            for (int i = 0; i < data.length; i++) {
                data[i] = new IntPointer((IntBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof FloatBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
            for (int i = 0; i < data.length; i++) {
                data[i] = new FloatPointer((FloatBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof DoubleBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
            for (int i = 0; i < data.length; i++) {
                data[i] = new DoublePointer((DoubleBuffer) samples[i]);
            }
        } else if (samples != null) {
            for (int i = 0; i < data.length; i++) {
                data[i] = new Pointer(samples[i]);
            }
        }
        av_samples_fill_arrays(new PointerPointer(samples_frame), samples_frame.linesize(), new BytePointer(data[0]), audioChannels, sampleSize, sampleFormat, 1);
        for (int i = 0; i < samples.length; i++) {
            samples_frame.data(i, new BytePointer(data[i]));
        }
        samples_frame.channels(audioChannels);
        samples_frame.channel_layout(av_get_default_channel_layout(audioChannels));
        samples_frame.nb_samples(sampleSize);
        samples_frame.format(sampleFormat);
        samples_frame.sample_rate(sampleRate);
        /* push the decoded frame into the filtergraph */
        if ((ret = av_buffersrc_add_frame_flags(abuffersrc_ctx[n], samples_frame, AV_BUFFERSRC_FLAG_KEEP_REF | AV_BUFFERSRC_FLAG_PUSH)) < 0) {
            throw new Exception("av_buffersrc_add_frame_flags() error " + ret + ": Error while feeding the filtergraph.");
        }
    }
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) FloatBuffer(java.nio.FloatBuffer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) ShortPointer(org.bytedeco.javacpp.ShortPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Aggregations

PointerScope (org.bytedeco.javacpp.PointerScope)11 BytePointer (org.bytedeco.javacpp.BytePointer)8 IOException (java.io.IOException)6 IntPointer (org.bytedeco.javacpp.IntPointer)6 PointerPointer (org.bytedeco.javacpp.PointerPointer)6 ByteBuffer (java.nio.ByteBuffer)5 DoublePointer (org.bytedeco.javacpp.DoublePointer)4 FloatPointer (org.bytedeco.javacpp.FloatPointer)4 ShortPointer (org.bytedeco.javacpp.ShortPointer)4 FloatBuffer (java.nio.FloatBuffer)3 ShortBuffer (java.nio.ShortBuffer)3 Pointer (org.bytedeco.javacpp.Pointer)3 DoubleBuffer (java.nio.DoubleBuffer)2 IntBuffer (java.nio.IntBuffer)2 BufferedInputStream (java.io.BufferedInputStream)1 File (java.io.File)1 FileInputStream (java.io.FileInputStream)1 FileOutputStream (java.io.FileOutputStream)1 UByteIndexer (org.bytedeco.javacpp.indexer.UByteIndexer)1 Test (org.junit.Test)1