Search in sources :

Example 1 with DoublePointer

use of org.bytedeco.javacpp.DoublePointer in project bigbluebutton by bigbluebutton.

the class Frame method createIndexer.

/** Returns an {@link Indexer} for the <i>i</i>th image plane. */
public <I extends Indexer> I createIndexer(boolean direct, int i) {
    long[] sizes = { imageHeight, imageWidth, imageChannels };
    long[] strides = { imageStride, imageChannels, 1 };
    Buffer buffer = image[i];
    Object array = buffer.hasArray() ? buffer.array() : null;
    switch(imageDepth) {
        case DEPTH_UBYTE:
            return array != null ? (I) UByteIndexer.create((byte[]) array, sizes, strides) : direct ? (I) UByteIndexer.create((ByteBuffer) buffer, sizes, strides) : (I) UByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false);
        case DEPTH_BYTE:
            return array != null ? (I) ByteIndexer.create((byte[]) array, sizes, strides) : direct ? (I) ByteIndexer.create((ByteBuffer) buffer, sizes, strides) : (I) ByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false);
        case DEPTH_USHORT:
            return array != null ? (I) UShortIndexer.create((short[]) array, sizes, strides) : direct ? (I) UShortIndexer.create((ShortBuffer) buffer, sizes, strides) : (I) UShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false);
        case DEPTH_SHORT:
            return array != null ? (I) ShortIndexer.create((short[]) array, sizes, strides) : direct ? (I) ShortIndexer.create((ShortBuffer) buffer, sizes, strides) : (I) ShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false);
        case DEPTH_INT:
            return array != null ? (I) IntIndexer.create((int[]) array, sizes, strides) : direct ? (I) IntIndexer.create((IntBuffer) buffer, sizes, strides) : (I) IntIndexer.create(new IntPointer((IntBuffer) buffer), sizes, strides, false);
        case DEPTH_LONG:
            return array != null ? (I) LongIndexer.create((long[]) array, sizes, strides) : direct ? (I) LongIndexer.create((LongBuffer) buffer, sizes, strides) : (I) LongIndexer.create(new LongPointer((LongBuffer) buffer), sizes, strides, false);
        case DEPTH_FLOAT:
            return array != null ? (I) FloatIndexer.create((float[]) array, sizes, strides) : direct ? (I) FloatIndexer.create((FloatBuffer) buffer, sizes, strides) : (I) FloatIndexer.create(new FloatPointer((FloatBuffer) buffer), sizes, strides, false);
        case DEPTH_DOUBLE:
            return array != null ? (I) DoubleIndexer.create((double[]) array, sizes, strides) : direct ? (I) DoubleIndexer.create((DoubleBuffer) buffer, sizes, strides) : (I) DoubleIndexer.create(new DoublePointer((DoubleBuffer) buffer), sizes, strides, false);
        default:
            assert false;
    }
    return null;
}
Also used : FloatBuffer(java.nio.FloatBuffer) DoubleBuffer(java.nio.DoubleBuffer) ShortBuffer(java.nio.ShortBuffer) ByteBuffer(java.nio.ByteBuffer) IntBuffer(java.nio.IntBuffer) Buffer(java.nio.Buffer) LongBuffer(java.nio.LongBuffer) DoubleBuffer(java.nio.DoubleBuffer) LongBuffer(java.nio.LongBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ShortPointer(org.bytedeco.javacpp.ShortPointer) LongPointer(org.bytedeco.javacpp.LongPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Example 2 with DoublePointer

use of org.bytedeco.javacpp.DoublePointer in project bigbluebutton by bigbluebutton.

the class FFmpegFrameGrabber method processImage.

private void processImage() throws Exception {
    frame.imageWidth = imageWidth > 0 ? imageWidth : video_c.width();
    frame.imageHeight = imageHeight > 0 ? imageHeight : video_c.height();
    frame.imageDepth = Frame.DEPTH_UBYTE;
    switch(imageMode) {
        case COLOR:
        case GRAY:
            // Deinterlace Picture
            if (deinterlace) {
                throw new Exception("Cannot deinterlace: Functionality moved to FFmpegFrameFilter.");
            }
            // Convert the image into BGR or GRAY format that OpenCV uses
            img_convert_ctx = sws_getCachedContext(img_convert_ctx, video_c.width(), video_c.height(), video_c.pix_fmt(), frame.imageWidth, frame.imageHeight, getPixelFormat(), SWS_BILINEAR, null, null, (DoublePointer) null);
            if (img_convert_ctx == null) {
                throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
            }
            // Convert the image from its native format to RGB or GRAY
            sws_scale(img_convert_ctx, new PointerPointer(picture), picture.linesize(), 0, video_c.height(), new PointerPointer(picture_rgb), picture_rgb.linesize());
            frame.imageStride = picture_rgb.linesize(0);
            frame.image = image_buf;
            break;
        case RAW:
            frame.imageStride = picture.linesize(0);
            BytePointer ptr = picture.data(0);
            if (ptr != null && !ptr.equals(image_ptr[0])) {
                image_ptr[0] = ptr.capacity(frame.imageHeight * frame.imageStride);
                image_buf[0] = ptr.asBuffer();
            }
            frame.image = image_buf;
            break;
        default:
            assert false;
    }
    frame.image[0].limit(frame.imageHeight * frame.imageStride);
    frame.imageChannels = frame.imageStride / frame.imageWidth;
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 3 with DoublePointer

use of org.bytedeco.javacpp.DoublePointer in project bigbluebutton by bigbluebutton.

the class FFmpegFrameRecorder method recordImage.

public boolean recordImage(int width, int height, int depth, int channels, int stride, int pixelFormat, long frameTimestamp, Buffer... image) throws Exception {
    if (video_st == null) {
        throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
    }
    int ret;
    if (image == null || image.length == 0) {
    /* no more frame to compress. The codec has a latency of a few
               frames if using B frames, so we get the last frames by
               passing the same picture again */
    } else {
        int step = stride * Math.abs(depth) / 8;
        BytePointer data = image[0] instanceof ByteBuffer ? new BytePointer((ByteBuffer) image[0].position(0)) : new BytePointer(new Pointer(image[0].position(0)));
        if (pixelFormat == AV_PIX_FMT_NONE) {
            if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 3) {
                pixelFormat = AV_PIX_FMT_BGR24;
            } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 1) {
                pixelFormat = AV_PIX_FMT_GRAY8;
            } else if ((depth == Frame.DEPTH_USHORT || depth == Frame.DEPTH_SHORT) && channels == 1) {
                pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
            } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 4) {
                pixelFormat = AV_PIX_FMT_RGBA;
            } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 2) {
                // Android's camera capture format
                pixelFormat = AV_PIX_FMT_NV21;
                step = width;
            } else {
                throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
            }
        }
        if (video_c.pix_fmt() != pixelFormat || video_c.width() != width || video_c.height() != height) {
            /* convert to the codec pixel format if needed */
            img_convert_ctx = sws_getCachedContext(img_convert_ctx, width, height, pixelFormat, video_c.width(), video_c.height(), video_c.pix_fmt(), SWS_BILINEAR, null, null, (DoublePointer) null);
            if (img_convert_ctx == null) {
                throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
            }
            avpicture_fill(new AVPicture(tmp_picture), data, pixelFormat, width, height);
            avpicture_fill(new AVPicture(picture), picture_buf, video_c.pix_fmt(), video_c.width(), video_c.height());
            tmp_picture.linesize(0, step);
            tmp_picture.format(pixelFormat);
            tmp_picture.width(width);
            tmp_picture.height(height);
            picture.format(video_c.pix_fmt());
            picture.width(video_c.width());
            picture.height(video_c.height());
            sws_scale(img_convert_ctx, new PointerPointer(tmp_picture), tmp_picture.linesize(), 0, height, new PointerPointer(picture), picture.linesize());
        } else {
            avpicture_fill(new AVPicture(picture), data, pixelFormat, width, height);
            picture.linesize(0, step);
            picture.format(pixelFormat);
            picture.width(width);
            picture.height(height);
        }
    }
    if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
        if (image == null || image.length == 0) {
            return false;
        }
        /* raw video case. The API may change slightly in the future for that? */
        av_init_packet(video_pkt);
        video_pkt.flags(video_pkt.flags() | AV_PKT_FLAG_KEY);
        video_pkt.stream_index(video_st.index());
        video_pkt.data(new BytePointer(picture));
        video_pkt.size(Loader.sizeof(AVPicture.class));
    } else {
        /* encode the image */
        av_init_packet(video_pkt);
        video_pkt.data(video_outbuf);
        video_pkt.size(video_outbuf_size);
        picture.quality(video_c.global_quality());
        if ((ret = avcodec_encode_video2(video_c, video_pkt, image == null || image.length == 0 ? null : picture, got_video_packet)) < 0) {
            throw new Exception("avcodec_encode_video2() error " + ret + ": Could not encode video packet.");
        }
        // magic required by libx264
        picture.pts(picture.pts() + 1);
        /* if zero size, it means the image was buffered */
        if (got_video_packet[0] != 0) {
            if (video_pkt.pts() != AV_NOPTS_VALUE) {
                // Override timestamp from system screen grabber. Otherwise, we will have skewed recorded file.
                // FfmpegFrameRecorder needs to propagate this timestamp into the avpacket sent to the server.
                // ralam - Sept. 14, 2016
                video_pkt.pts(frameTimestamp);
            //video_pkt.pts(av_rescale_q(video_pkt.pts(), video_c.time_base(), video_st.time_base()));
            }
            if (video_pkt.dts() != AV_NOPTS_VALUE) {
                video_pkt.dts(frameTimestamp);
            //video_pkt.dts(av_rescale_q(video_pkt.dts(), video_c.time_base(), video_st.time_base()));
            }
            video_pkt.stream_index(video_st.index());
        } else {
            return false;
        }
    }
    writePacket(AVMEDIA_TYPE_VIDEO, video_pkt);
    return image != null ? (video_pkt.flags() & AV_PKT_FLAG_KEY) != 0 : got_video_packet[0] != 0;
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) ByteBuffer(java.nio.ByteBuffer)

Example 4 with DoublePointer

use of org.bytedeco.javacpp.DoublePointer in project bigbluebutton by bigbluebutton.

the class FFmpegFrameRecorder method recordSamples.

public boolean recordSamples(int sampleRate, int audioChannels, Buffer... samples) throws Exception {
    if (audio_st == null) {
        throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
    }
    int ret;
    if (sampleRate <= 0) {
        sampleRate = audio_c.sample_rate();
    }
    if (audioChannels <= 0) {
        audioChannels = audio_c.channels();
    }
    int inputSize = samples != null ? samples[0].limit() - samples[0].position() : 0;
    int inputFormat = AV_SAMPLE_FMT_NONE;
    int inputChannels = samples != null && samples.length > 1 ? 1 : audioChannels;
    int inputDepth = 0;
    int outputFormat = audio_c.sample_fmt();
    int outputChannels = samples_out.length > 1 ? 1 : audio_c.channels();
    int outputDepth = av_get_bytes_per_sample(outputFormat);
    if (samples != null && samples[0] instanceof ByteBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
        inputDepth = 1;
        for (int i = 0; i < samples.length; i++) {
            ByteBuffer b = (ByteBuffer) samples[i];
            if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                ((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samples_in[i] = new BytePointer(b);
            }
        }
    } else if (samples != null && samples[0] instanceof ShortBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
        inputDepth = 2;
        for (int i = 0; i < samples.length; i++) {
            ShortBuffer b = (ShortBuffer) samples[i];
            if (samples_in[i] instanceof ShortPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                ((ShortPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
            } else {
                samples_in[i] = new ShortPointer(b);
            }
        }
    } else if (samples != null && samples[0] instanceof IntBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
        inputDepth = 4;
        for (int i = 0; i < samples.length; i++) {
            IntBuffer b = (IntBuffer) samples[i];
            if (samples_in[i] instanceof IntPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                ((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
            } else {
                samples_in[i] = new IntPointer(b);
            }
        }
    } else if (samples != null && samples[0] instanceof FloatBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
        inputDepth = 4;
        for (int i = 0; i < samples.length; i++) {
            FloatBuffer b = (FloatBuffer) samples[i];
            if (samples_in[i] instanceof FloatPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                ((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samples_in[i] = new FloatPointer(b);
            }
        }
    } else if (samples != null && samples[0] instanceof DoubleBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
        inputDepth = 8;
        for (int i = 0; i < samples.length; i++) {
            DoubleBuffer b = (DoubleBuffer) samples[i];
            if (samples_in[i] instanceof DoublePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                ((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samples_in[i] = new DoublePointer(b);
            }
        }
    } else if (samples != null) {
        throw new Exception("Audio samples Buffer has unsupported type: " + samples);
    }
    if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) {
        samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, audio_c.channel_layout(), outputFormat, audio_c.sample_rate(), av_get_default_channel_layout(audioChannels), inputFormat, sampleRate, 0, null);
        if (samples_convert_ctx == null) {
            throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
        } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
            throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
        }
        samples_channels = audioChannels;
        samples_format = inputFormat;
        samples_rate = sampleRate;
    }
    for (int i = 0; samples != null && i < samples.length; i++) {
        samples_in[i].position(samples_in[i].position() * inputDepth).limit((samples_in[i].position() + inputSize) * inputDepth);
    }
    while (true) {
        int inputCount = (int) Math.min(samples != null ? (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth) : 0, Integer.MAX_VALUE);
        int outputCount = (int) Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE);
        inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate());
        for (int i = 0; samples != null && i < samples.length; i++) {
            samples_in_ptr.put(i, samples_in[i]);
        }
        for (int i = 0; i < samples_out.length; i++) {
            samples_out_ptr.put(i, samples_out[i]);
        }
        if ((ret = swr_convert(samples_convert_ctx, samples_out_ptr, outputCount, samples_in_ptr, inputCount)) < 0) {
            throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
        } else if (ret == 0) {
            break;
        }
        for (int i = 0; samples != null && i < samples.length; i++) {
            samples_in[i].position(samples_in[i].position() + inputCount * inputChannels * inputDepth);
        }
        for (int i = 0; i < samples_out.length; i++) {
            samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
        }
        if (samples == null || samples_out[0].position() >= samples_out[0].limit()) {
            frame.nb_samples(audio_input_frame_size);
            avcodec_fill_audio_frame(frame, audio_c.channels(), outputFormat, samples_out[0], (int) Math.min(samples_out[0].limit(), Integer.MAX_VALUE), 0);
            for (int i = 0; i < samples_out.length; i++) {
                frame.data(i, samples_out[i].position(0));
                frame.linesize(i, (int) Math.min(samples_out[i].limit(), Integer.MAX_VALUE));
            }
            frame.quality(audio_c.global_quality());
            record(frame);
        }
    }
    return samples != null ? frame.key_frame() != 0 : record((AVFrame) null);
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ShortPointer(org.bytedeco.javacpp.ShortPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Aggregations

BytePointer (org.bytedeco.javacpp.BytePointer)4 DoublePointer (org.bytedeco.javacpp.DoublePointer)4 ByteBuffer (java.nio.ByteBuffer)3 FloatPointer (org.bytedeco.javacpp.FloatPointer)3 IntPointer (org.bytedeco.javacpp.IntPointer)3 ShortPointer (org.bytedeco.javacpp.ShortPointer)3 DoubleBuffer (java.nio.DoubleBuffer)2 FloatBuffer (java.nio.FloatBuffer)2 IntBuffer (java.nio.IntBuffer)2 ShortBuffer (java.nio.ShortBuffer)2 PointerPointer (org.bytedeco.javacpp.PointerPointer)2 Buffer (java.nio.Buffer)1 LongBuffer (java.nio.LongBuffer)1 LongPointer (org.bytedeco.javacpp.LongPointer)1 Pointer (org.bytedeco.javacpp.Pointer)1