Search in sources :

Example 16 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class OpenCVFrameConverter method convertToOrgOpenCvCoreMat.

public org.opencv.core.Mat convertToOrgOpenCvCoreMat(Frame frame) {
    if (frame == null || frame.image == null) {
        return null;
    } else if (frame.opaque instanceof org.opencv.core.Mat) {
        return (org.opencv.core.Mat) frame.opaque;
    } else if (!isEqual(frame, mat)) {
        int depth = getMatDepth(frame.imageDepth);
        orgOpenCvCoreMat = depth < 0 ? null : new org.opencv.core.Mat(frame.imageHeight, frame.imageWidth, CV_MAKETYPE(depth, frame.imageChannels), new BytePointer(new Pointer(frame.image[0].position(0))).capacity(frame.image[0].capacity() * Math.abs(frame.imageDepth) / 8).asByteBuffer(), frame.imageStride * Math.abs(frame.imageDepth) / 8);
    }
    return orgOpenCvCoreMat;
}
Also used : org.bytedeco.opencv.global.opencv_core(org.bytedeco.opencv.global.opencv_core) org.bytedeco.opencv.opencv_core(org.bytedeco.opencv.opencv_core) BytePointer(org.bytedeco.javacpp.BytePointer) Pointer(org.bytedeco.javacpp.Pointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 17 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameFilter method pushSamples.

public synchronized void pushSamples(int n, int audioChannels, int sampleRate, int sampleFormat, Buffer... samples) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        int ret;
        Pointer[] data = new Pointer[samples.length];
        int sampleSize = samples != null ? ((samples[0].limit() - samples[0].position()) / (samples.length > 1 ? 1 : audioChannels)) : 0;
        if (samples != null && samples[0] instanceof ByteBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
            for (int i = 0; i < data.length; i++) {
                data[i] = new BytePointer((ByteBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof ShortBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
            for (int i = 0; i < data.length; i++) {
                data[i] = new ShortPointer((ShortBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof IntBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
            for (int i = 0; i < data.length; i++) {
                data[i] = new IntPointer((IntBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof FloatBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
            for (int i = 0; i < data.length; i++) {
                data[i] = new FloatPointer((FloatBuffer) samples[i]);
            }
        } else if (samples != null && samples[0] instanceof DoubleBuffer) {
            sampleFormat = data.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
            for (int i = 0; i < data.length; i++) {
                data[i] = new DoublePointer((DoubleBuffer) samples[i]);
            }
        } else if (samples != null) {
            for (int i = 0; i < data.length; i++) {
                data[i] = new Pointer(samples[i]);
            }
        }
        av_samples_fill_arrays(new PointerPointer(samples_frame), samples_frame.linesize(), new BytePointer(data[0]), audioChannels, sampleSize, sampleFormat, 1);
        for (int i = 0; i < samples.length; i++) {
            samples_frame.data(i, new BytePointer(data[i]));
        }
        samples_frame.channels(audioChannels);
        samples_frame.channel_layout(av_get_default_channel_layout(audioChannels));
        samples_frame.nb_samples(sampleSize);
        samples_frame.format(sampleFormat);
        samples_frame.sample_rate(sampleRate);
        /* push the decoded frame into the filtergraph */
        if ((ret = av_buffersrc_add_frame_flags(abuffersrc_ctx[n], samples_frame, AV_BUFFERSRC_FLAG_KEEP_REF | AV_BUFFERSRC_FLAG_PUSH)) < 0) {
            throw new Exception("av_buffersrc_add_frame_flags() error " + ret + ": Error while feeding the filtergraph.");
        }
    }
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) FloatBuffer(java.nio.FloatBuffer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) ShortPointer(org.bytedeco.javacpp.ShortPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Example 18 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameFilter method pushImage.

public synchronized void pushImage(int n, int width, int height, int depth, int channels, int stride, int pixelFormat, Buffer... image) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        int ret;
        int step = stride * Math.abs(depth) / 8;
        BytePointer data = image[0] instanceof ByteBuffer ? new BytePointer((ByteBuffer) image[0]).position(0) : new BytePointer(new Pointer(image[0]).position(0));
        if (pixelFormat == AV_PIX_FMT_NONE) {
            if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 3) {
                pixelFormat = AV_PIX_FMT_BGR24;
            } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 1) {
                pixelFormat = AV_PIX_FMT_GRAY8;
            } else if ((depth == Frame.DEPTH_USHORT || depth == Frame.DEPTH_SHORT) && channels == 1) {
                pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
            } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 4) {
                pixelFormat = AV_PIX_FMT_RGBA;
            } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 2) {
                // Android's camera capture format
                pixelFormat = AV_PIX_FMT_NV21;
            } else {
                throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
            }
        }
        if (pixelFormat == AV_PIX_FMT_NV21) {
            step = width;
        }
        av_image_fill_arrays(new PointerPointer(image_frame), image_frame.linesize(), data, pixelFormat, width, height, 1);
        image_frame.linesize(0, step);
        image_frame.format(pixelFormat);
        image_frame.width(width);
        image_frame.height(height);
        /* push the decoded frame into the filtergraph */
        if ((ret = av_buffersrc_add_frame_flags(buffersrc_ctx[n], image_frame, AV_BUFFERSRC_FLAG_KEEP_REF | AV_BUFFERSRC_FLAG_PUSH)) < 0) {
            throw new Exception("av_buffersrc_add_frame_flags() error " + ret + ": Error while feeding the filtergraph.");
        }
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer)

Example 19 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameGrabber method processSamples.

private void processSamples() throws Exception {
    int ret;
    int sample_format = samples_frame.format();
    int planes = av_sample_fmt_is_planar(sample_format) != 0 ? (int) samples_frame.channels() : 1;
    int data_size = av_samples_get_buffer_size((IntPointer) null, audio_c.channels(), samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;
    if (samples_buf == null || samples_buf.length != planes) {
        samples_ptr = new BytePointer[planes];
        samples_buf = new Buffer[planes];
    }
    frame.sampleRate = audio_c.sample_rate();
    frame.audioChannels = audio_c.channels();
    frame.samples = samples_buf;
    frame.opaque = samples_frame;
    int sample_size = data_size / av_get_bytes_per_sample(sample_format);
    for (int i = 0; i < planes; i++) {
        BytePointer p = samples_frame.data(i);
        if (!p.equals(samples_ptr[i]) || samples_ptr[i].capacity() < data_size) {
            samples_ptr[i] = p.capacity(data_size);
            ByteBuffer b = p.asBuffer();
            switch(sample_format) {
                case AV_SAMPLE_FMT_U8:
                case AV_SAMPLE_FMT_U8P:
                    samples_buf[i] = b;
                    break;
                case AV_SAMPLE_FMT_S16:
                case AV_SAMPLE_FMT_S16P:
                    samples_buf[i] = b.asShortBuffer();
                    break;
                case AV_SAMPLE_FMT_S32:
                case AV_SAMPLE_FMT_S32P:
                    samples_buf[i] = b.asIntBuffer();
                    break;
                case AV_SAMPLE_FMT_FLT:
                case AV_SAMPLE_FMT_FLTP:
                    samples_buf[i] = b.asFloatBuffer();
                    break;
                case AV_SAMPLE_FMT_DBL:
                case AV_SAMPLE_FMT_DBLP:
                    samples_buf[i] = b.asDoubleBuffer();
                    break;
                default:
                    assert false;
            }
        }
        samples_buf[i].position(0).limit(sample_size);
    }
    if (audio_c.channels() != getAudioChannels() || audio_c.sample_fmt() != getSampleFormat() || audio_c.sample_rate() != getSampleRate()) {
        if (samples_convert_ctx == null || samples_channels != getAudioChannels() || samples_format != getSampleFormat() || samples_rate != getSampleRate()) {
            samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, av_get_default_channel_layout(getAudioChannels()), getSampleFormat(), getSampleRate(), av_get_default_channel_layout(audio_c.channels()), audio_c.sample_fmt(), audio_c.sample_rate(), 0, null);
            if (samples_convert_ctx == null) {
                throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
            } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
                throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
            }
            samples_channels = getAudioChannels();
            samples_format = getSampleFormat();
            samples_rate = getSampleRate();
        }
        int sample_size_in = samples_frame.nb_samples();
        int planes_out = av_sample_fmt_is_planar(samples_format) != 0 ? (int) samples_frame.channels() : 1;
        int sample_size_out = swr_get_out_samples(samples_convert_ctx, sample_size_in);
        int sample_bytes_out = av_get_bytes_per_sample(samples_format);
        int buffer_size_out = sample_size_out * sample_bytes_out * (planes_out > 1 ? 1 : samples_channels);
        if (samples_buf_out == null || samples_buf.length != planes_out || samples_ptr_out[0].capacity() < buffer_size_out) {
            for (int i = 0; samples_ptr_out != null && i < samples_ptr_out.length; i++) {
                av_free(samples_ptr_out[i].position(0));
            }
            samples_ptr_out = new BytePointer[planes_out];
            samples_buf_out = new Buffer[planes_out];
            for (int i = 0; i < planes_out; i++) {
                samples_ptr_out[i] = new BytePointer(av_malloc(buffer_size_out)).capacity(buffer_size_out);
                ByteBuffer b = samples_ptr_out[i].asBuffer();
                switch(samples_format) {
                    case AV_SAMPLE_FMT_U8:
                    case AV_SAMPLE_FMT_U8P:
                        samples_buf_out[i] = b;
                        break;
                    case AV_SAMPLE_FMT_S16:
                    case AV_SAMPLE_FMT_S16P:
                        samples_buf_out[i] = b.asShortBuffer();
                        break;
                    case AV_SAMPLE_FMT_S32:
                    case AV_SAMPLE_FMT_S32P:
                        samples_buf_out[i] = b.asIntBuffer();
                        break;
                    case AV_SAMPLE_FMT_FLT:
                    case AV_SAMPLE_FMT_FLTP:
                        samples_buf_out[i] = b.asFloatBuffer();
                        break;
                    case AV_SAMPLE_FMT_DBL:
                    case AV_SAMPLE_FMT_DBLP:
                        samples_buf_out[i] = b.asDoubleBuffer();
                        break;
                    default:
                        assert false;
                }
            }
        }
        frame.sampleRate = samples_rate;
        frame.audioChannels = samples_channels;
        frame.samples = samples_buf_out;
        if ((ret = swr_convert(samples_convert_ctx, plane_ptr.put(samples_ptr_out), sample_size_out, plane_ptr2.put(samples_ptr), sample_size_in)) < 0) {
            throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
        }
        for (int i = 0; i < planes_out; i++) {
            samples_ptr_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
            samples_buf_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
        }
    }
}
Also used : IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException)

Example 20 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameGrabber method processImage.

private void processImage() throws Exception {
    frame.imageWidth = imageWidth > 0 ? imageWidth : video_c.width();
    frame.imageHeight = imageHeight > 0 ? imageHeight : video_c.height();
    frame.imageDepth = Frame.DEPTH_UBYTE;
    switch(imageMode) {
        case COLOR:
        case GRAY:
            // Deinterlace Picture
            if (deinterlace) {
                throw new Exception("Cannot deinterlace: Functionality moved to FFmpegFrameFilter.");
            }
            // Has the size changed?
            if (frame.imageWidth != picture_rgb.width() || frame.imageHeight != picture_rgb.height()) {
                initPictureRGB();
            }
            // Copy "metadata" fields
            av_frame_copy_props(picture_rgb, picture);
            // Convert the image into BGR or GRAY format that OpenCV uses
            img_convert_ctx = sws_getCachedContext(img_convert_ctx, video_c.width(), video_c.height(), video_c.pix_fmt(), frame.imageWidth, frame.imageHeight, getPixelFormat(), imageScalingFlags != 0 ? imageScalingFlags : SWS_BILINEAR, null, null, (DoublePointer) null);
            if (img_convert_ctx == null) {
                throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
            }
            // Convert the image from its native format to RGB or GRAY
            sws_scale(img_convert_ctx, new PointerPointer(picture), picture.linesize(), 0, video_c.height(), new PointerPointer(picture_rgb), picture_rgb.linesize());
            frame.imageStride = picture_rgb.linesize(0);
            frame.image = image_buf;
            frame.opaque = picture_rgb;
            break;
        case RAW:
            frame.imageStride = picture.linesize(0);
            BytePointer ptr = picture.data(0);
            if (ptr != null && !ptr.equals(image_ptr[0])) {
                image_ptr[0] = ptr.capacity(frame.imageHeight * frame.imageStride);
                image_buf[0] = ptr.asBuffer();
            }
            frame.image = image_buf;
            frame.opaque = picture;
            break;
        default:
            assert false;
    }
    frame.image[0].limit(frame.imageHeight * frame.imageStride);
    frame.imageChannels = frame.imageStride / frame.imageWidth;
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) BytePointer(org.bytedeco.javacpp.BytePointer) IOException(java.io.IOException)

Aggregations

BytePointer (org.bytedeco.javacpp.BytePointer)72 IntPointer (org.bytedeco.javacpp.IntPointer)21 PointerPointer (org.bytedeco.javacpp.PointerPointer)19 ByteBuffer (java.nio.ByteBuffer)18 Pointer (org.bytedeco.javacpp.Pointer)16 IOException (java.io.IOException)14 PointerScope (org.bytedeco.javacpp.PointerScope)13 DoublePointer (org.bytedeco.javacpp.DoublePointer)12 FloatPointer (org.bytedeco.javacpp.FloatPointer)10 CompressedDataBuffer (org.nd4j.linalg.compression.CompressedDataBuffer)10 CompressionDescriptor (org.nd4j.linalg.compression.CompressionDescriptor)10 ShortBuffer (java.nio.ShortBuffer)9 ShortPointer (org.bytedeco.javacpp.ShortPointer)9 DoubleBuffer (java.nio.DoubleBuffer)6 FloatBuffer (java.nio.FloatBuffer)6 IntBuffer (java.nio.IntBuffer)6 Nonnull (javax.annotation.Nonnull)5 LongPointer (org.bytedeco.javacpp.LongPointer)5 TF_Status (org.tensorflow.internal.c_api.TF_Status)4 ByteOrder (java.nio.ByteOrder)3