Search in sources :

Example 66 with Pointer

use of org.bytedeco.javacpp.Pointer in project javacv by bytedeco.

the class OpenCVFrameConverter method convertToIplImage.

public IplImage convertToIplImage(Frame frame) {
    if (frame == null || frame.image == null) {
        return null;
    } else if (frame.opaque instanceof IplImage) {
        return (IplImage) frame.opaque;
    } else if (!isEqual(frame, img)) {
        int depth = getIplImageDepth(frame.imageDepth);
        if (img != null) {
            img.releaseReference();
        }
        img = depth < 0 ? null : (IplImage) IplImage.create(frame.imageWidth, frame.imageHeight, depth, frame.imageChannels, new Pointer(frame.image[0].position(0))).widthStep(frame.imageStride * Math.abs(frame.imageDepth) / 8).imageSize(frame.image[0].capacity() * Math.abs(frame.imageDepth) / 8).retainReference();
    }
    return img;
}
Also used : Pointer(org.bytedeco.javacpp.Pointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 67 with Pointer

use of org.bytedeco.javacpp.Pointer in project javacv by bytedeco.

the class FFmpegFrameRecorder method recordImage.

public synchronized boolean recordImage(int width, int height, int depth, int channels, int stride, int pixelFormat, Buffer... image) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (video_st == null) {
            throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
        }
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        int ret;
        if (image == null || image.length == 0) {
        /* no more frame to compress. The codec has a latency of a few
               frames if using B frames, so we get the last frames by
               passing the same picture again */
        } else {
            int step = stride * Math.abs(depth) / 8;
            BytePointer data = image[0] instanceof ByteBuffer ? new BytePointer((ByteBuffer) image[0]).position(0) : new BytePointer(new Pointer(image[0]).position(0));
            if (pixelFormat == AV_PIX_FMT_NONE) {
                if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 3) {
                    pixelFormat = AV_PIX_FMT_BGR24;
                } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 1) {
                    pixelFormat = AV_PIX_FMT_GRAY8;
                } else if ((depth == Frame.DEPTH_USHORT || depth == Frame.DEPTH_SHORT) && channels == 1) {
                    pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
                } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 4) {
                    pixelFormat = AV_PIX_FMT_RGBA;
                } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 2) {
                    // Android's camera capture format
                    pixelFormat = AV_PIX_FMT_NV21;
                } else {
                    throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
                }
            }
            if (pixelFormat == AV_PIX_FMT_NV21) {
                step = width;
            }
            if (video_c.pix_fmt() != pixelFormat || video_c.width() != width || video_c.height() != height) {
                /* convert to the codec pixel format if needed */
                img_convert_ctx = sws_getCachedContext(img_convert_ctx, width, height, pixelFormat, video_c.width(), video_c.height(), video_c.pix_fmt(), imageScalingFlags != 0 ? imageScalingFlags : SWS_BILINEAR, null, null, (DoublePointer) null);
                if (img_convert_ctx == null) {
                    throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
                }
                av_image_fill_arrays(new PointerPointer(tmp_picture), tmp_picture.linesize(), data, pixelFormat, width, height, 1);
                av_image_fill_arrays(new PointerPointer(picture), picture.linesize(), picture_buf, video_c.pix_fmt(), video_c.width(), video_c.height(), 1);
                tmp_picture.linesize(0, step);
                tmp_picture.format(pixelFormat);
                tmp_picture.width(width);
                tmp_picture.height(height);
                picture.format(video_c.pix_fmt());
                picture.width(video_c.width());
                picture.height(video_c.height());
                sws_scale(img_convert_ctx, new PointerPointer(tmp_picture), tmp_picture.linesize(), 0, height, new PointerPointer(picture), picture.linesize());
            } else {
                av_image_fill_arrays(new PointerPointer(picture), picture.linesize(), data, pixelFormat, width, height, 1);
                picture.linesize(0, step);
                picture.format(pixelFormat);
                picture.width(width);
                picture.height(height);
            }
        }
        // if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
        // if (image == null || image.length == 0) {
        // return false;
        // }
        // /* raw video case. The API may change slightly in the future for that? */
        // av_init_packet(video_pkt);
        // video_pkt.flags(video_pkt.flags() | AV_PKT_FLAG_KEY);
        // video_pkt.stream_index(video_st.index());
        // video_pkt.data(new BytePointer(picture));
        // video_pkt.size(Loader.sizeof(AVFrame.class));
        // } else {
        /* encode the image */
        picture.quality(video_c.global_quality());
        if ((ret = avcodec_send_frame(video_c, image == null || image.length == 0 ? null : picture)) < 0 && image != null && image.length != 0) {
            throw new Exception("avcodec_send_frame() error " + ret + ": Error sending a video frame for encoding.");
        }
        // magic required by libx264
        picture.pts(picture.pts() + 1);
        /* if zero size, it means the image was buffered */
        got_video_packet[0] = 0;
        while (ret >= 0) {
            av_new_packet(video_pkt, video_outbuf_size);
            ret = avcodec_receive_packet(video_c, video_pkt);
            if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                av_packet_unref(video_pkt);
                break;
            } else if (ret < 0) {
                av_packet_unref(video_pkt);
                throw new Exception("avcodec_receive_packet() error " + ret + ": Error during video encoding.");
            }
            got_video_packet[0] = 1;
            if (video_pkt.pts() != AV_NOPTS_VALUE) {
                video_pkt.pts(av_rescale_q(video_pkt.pts(), video_c.time_base(), video_st.time_base()));
            }
            if (video_pkt.dts() != AV_NOPTS_VALUE) {
                video_pkt.dts(av_rescale_q(video_pkt.dts(), video_c.time_base(), video_st.time_base()));
            }
            video_pkt.stream_index(video_st.index());
            /* write the compressed frame in the media file */
            writePacket(AVMEDIA_TYPE_VIDEO, video_pkt);
        }
        // }
        return image != null ? (video_pkt.flags() & AV_PKT_FLAG_KEY) != 0 : got_video_packet[0] != 0;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException)

Example 68 with Pointer

use of org.bytedeco.javacpp.Pointer in project javacv by bytedeco.

the class Frame method close.

@Override
public void close() {
    if (opaque instanceof Pointer[]) {
        for (Pointer p : (Pointer[]) opaque) {
            if (p != null) {
                p.releaseReference();
                p = null;
            }
        }
        opaque = null;
    }
}
Also used : LongPointer(org.bytedeco.javacpp.LongPointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) Pointer(org.bytedeco.javacpp.Pointer)

Aggregations

Pointer (org.bytedeco.javacpp.Pointer)68 FloatPointer (org.bytedeco.javacpp.FloatPointer)33 DoublePointer (org.bytedeco.javacpp.DoublePointer)31 IntPointer (org.bytedeco.javacpp.IntPointer)29 BytePointer (org.bytedeco.javacpp.BytePointer)23 CudaContext (org.nd4j.linalg.jcublas.context.CudaContext)23 INDArray (org.nd4j.linalg.api.ndarray.INDArray)21 ShortPointer (org.bytedeco.javacpp.ShortPointer)20 CudaPointer (org.nd4j.jita.allocator.pointers.CudaPointer)19 DataBuffer (org.nd4j.linalg.api.buffer.DataBuffer)18 GridExecutioner (org.nd4j.linalg.api.ops.executioner.GridExecutioner)16 ByteBuffer (java.nio.ByteBuffer)13 PointerPointer (org.bytedeco.javacpp.PointerPointer)12 LongPointer (org.bytedeco.javacpp.LongPointer)10 CUstream_st (org.bytedeco.javacpp.cuda.CUstream_st)10 org.nd4j.jita.allocator.pointers.cuda.cusolverDnHandle_t (org.nd4j.jita.allocator.pointers.cuda.cusolverDnHandle_t)10 CublasPointer (org.nd4j.linalg.jcublas.CublasPointer)10 FunctionPointer (org.bytedeco.javacpp.FunctionPointer)9 BoolPointer (org.bytedeco.javacpp.BoolPointer)8 CLongPointer (org.bytedeco.javacpp.CLongPointer)8