Search in sources :

Example 1 with DoublePointer

use of com.googlecode.javacpp.DoublePointer in project VideoRecorder by qdrzwd.

the class NewFFmpegFrameRecorder method record.

@Override
public boolean record(int sampleRate, Buffer... samples) throws Exception {
    if (audioSt == null) {
        throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
    }
    int inputSize = samples[0].limit() - samples[0].position();
    int inputDepth;
    if (sampleRate <= 0) {
        sampleRate = audioC.sample_rate();
    }
    int inputFormat;
    if (samples[0] instanceof ByteBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
        inputDepth = 1;
        for (int i = 0; i < samples.length; i++) {
            ByteBuffer b = (ByteBuffer) samples[i];
            if (samplesIn[i] instanceof BytePointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((BytePointer) samplesIn[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samplesIn[i] = new BytePointer(b);
            }
        }
    } else if (samples[0] instanceof ShortBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
        inputDepth = 2;
        for (int i = 0; i < samples.length; i++) {
            ShortBuffer b = (ShortBuffer) samples[i];
            if (samplesIn[i] instanceof ShortPointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((ShortPointer) samplesIn[i]).position(0).put(b.array(), samples[i].position(), inputSize);
            } else {
                samplesIn[i] = new ShortPointer(b);
            }
        }
    } else if (samples[0] instanceof IntBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
        inputDepth = 4;
        for (int i = 0; i < samples.length; i++) {
            IntBuffer b = (IntBuffer) samples[i];
            if (samplesIn[i] instanceof IntPointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((IntPointer) samplesIn[i]).position(0).put(b.array(), samples[i].position(), inputSize);
            } else {
                samplesIn[i] = new IntPointer(b);
            }
        }
    } else if (samples[0] instanceof FloatBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
        inputDepth = 4;
        for (int i = 0; i < samples.length; i++) {
            FloatBuffer b = (FloatBuffer) samples[i];
            if (samplesIn[i] instanceof FloatPointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((FloatPointer) samplesIn[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samplesIn[i] = new FloatPointer(b);
            }
        }
    } else if (samples[0] instanceof DoubleBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
        inputDepth = 8;
        for (int i = 0; i < samples.length; i++) {
            DoubleBuffer b = (DoubleBuffer) samples[i];
            if (samplesIn[i] instanceof DoublePointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((DoublePointer) samplesIn[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samplesIn[i] = new DoublePointer(b);
            }
        }
    } else {
        throw new Exception("Audio samples Buffer has unsupported type: " + samples);
    }
    int ret;
    int outputFormat = audioC.sample_fmt();
    if (samplesConvertCtx == null) {
        samplesConvertCtx = swr_alloc_set_opts(null, audioC.channel_layout(), outputFormat, audioC.sample_rate(), audioC.channel_layout(), inputFormat, sampleRate, 0, null);
        if (samplesConvertCtx == null) {
            throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
        } else if ((ret = swr_init(samplesConvertCtx)) < 0) {
            throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
        }
    }
    for (int i = 0; i < samples.length; i++) {
        samplesIn[i].position(samplesIn[i].position() * inputDepth).limit((samplesIn[i].position() + inputSize) * inputDepth);
    }
    int outputChannels = samplesOut.length > 1 ? 1 : audioChannels;
    int outputDepth = av_get_bytes_per_sample(outputFormat);
    int inputChannels = samples.length > 1 ? 1 : audioChannels;
    while (true) {
        int inputCount = (samplesIn[0].limit() - samplesIn[0].position()) / (inputChannels * inputDepth);
        int outputCount = (samplesOut[0].limit() - samplesOut[0].position()) / (outputChannels * outputDepth);
        inputCount = Math.min(inputCount, 2 * (outputCount * sampleRate) / audioC.sample_rate());
        for (int i = 0; i < samples.length; i++) {
            samplesInPtr.put(i, samplesIn[i]);
        }
        for (int i = 0; i < samplesOut.length; i++) {
            samplesOutPtr.put(i, samplesOut[i]);
        }
        if ((ret = swr_convert(samplesConvertCtx, samplesOutPtr, outputCount, samplesInPtr, inputCount)) < 0) {
            throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
        } else if (ret == 0) {
            break;
        }
        for (int i = 0; i < samples.length; i++) {
            samplesIn[i].position(samplesIn[i].position() + inputCount * inputChannels * inputDepth);
        }
        for (int i = 0; i < samplesOut.length; i++) {
            samplesOut[i].position(samplesOut[i].position() + ret * outputChannels * outputDepth);
        }
        if (samplesOut[0].position() >= samplesOut[0].limit()) {
            frame.nb_samples(audioInputFrameSize);
            avcodec_fill_audio_frame(frame, audioC.channels(), outputFormat, samplesOut[0], samplesOut[0].limit(), 0);
            for (int i = 0; i < samplesOut.length; i++) {
                frame.data(i, samplesOut[i].position(0));
                frame.linesize(i, samplesOut[i].limit());
            }
            frame.quality(audioC.global_quality());
            record(frame);
        }
    }
    return frame.key_frame() != 0;
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) ShortPointer(com.googlecode.javacpp.ShortPointer) FloatPointer(com.googlecode.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(com.googlecode.javacpp.IntPointer) BytePointer(com.googlecode.javacpp.BytePointer) DoublePointer(com.googlecode.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ShortBuffer(java.nio.ShortBuffer)

Example 2 with DoublePointer

use of com.googlecode.javacpp.DoublePointer in project VideoRecorder by qdrzwd.

the class NewFFmpegFrameRecorder method record.

public boolean record(IplImage image, int pixelFormat) throws Exception {
    if (videoSt == null) {
        throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
    }
    if (image == null) {
    /* no more frame to compress. The codec has a latency of a few
               frames if using B frames, so we get the last frames by
               passing the same picture again */
    } else {
        // image = rotate(image,90);
        int width = image.width();
        int step = image.widthStep();
        if (pixelFormat == AV_PIX_FMT_NONE) {
            int depth = image.depth();
            int channels = image.nChannels();
            if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 3) {
                pixelFormat = AV_PIX_FMT_BGR24;
            } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 1) {
                pixelFormat = AV_PIX_FMT_GRAY8;
            } else if ((depth == IPL_DEPTH_16U || depth == IPL_DEPTH_16S) && channels == 1) {
                pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
            } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 4) {
                pixelFormat = AV_PIX_FMT_RGBA;
            } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 2) {
                // Android's camera capture format
                pixelFormat = AV_PIX_FMT_NV21;
                step = width;
            } else {
                throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
            }
        }
        BytePointer data = image.imageData();
        int height = image.height();
        if (videoC.pix_fmt() != pixelFormat || videoC.width() != width || videoC.height() != height) {
            /* convert to the codec pixel format if needed */
            imgConvertCtx = sws_getCachedContext(imgConvertCtx, videoC.width(), videoC.height(), pixelFormat, videoC.width(), videoC.height(), videoC.pix_fmt(), SWS_BILINEAR, null, null, (DoublePointer) null);
            if (imgConvertCtx == null) {
                throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
            }
            avpicture_fill(new AVPicture(tmpPicture), data, pixelFormat, width, height);
            avpicture_fill(new AVPicture(picture), pictureBuf, videoC.pix_fmt(), videoC.width(), videoC.height());
            tmpPicture.linesize(0, step);
            sws_scale(imgConvertCtx, new PointerPointer(tmpPicture), tmpPicture.linesize(), 0, height, new PointerPointer(picture), picture.linesize());
        } else {
            avpicture_fill(new AVPicture(picture), data, pixelFormat, width, height);
            picture.linesize(0, step);
        }
    }
    int ret;
    if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
        if (image == null) {
            return false;
        }
        /* raw video case. The API may change slightly in the future for that? */
        av_init_packet(videoPkt);
        videoPkt.flags(videoPkt.flags() | AV_PKT_FLAG_KEY);
        videoPkt.stream_index(videoSt.index());
        videoPkt.data(new BytePointer(picture));
        videoPkt.size(Loader.sizeof(AVPicture.class));
    } else {
        /* encode the image */
        av_init_packet(videoPkt);
        videoPkt.data(videoOutbuf);
        videoPkt.size(videoOutbufSize);
        picture.quality(videoC.global_quality());
        if ((ret = avcodec_encode_video2(videoC, videoPkt, image == null ? null : picture, gotVideoPacket)) < 0) {
            throw new Exception("avcodec_encode_video2() error " + ret + ": Could not encode video packet.");
        }
        // magic required by libx264
        picture.pts(picture.pts() + 1);
        /* if zero size, it means the image was buffered */
        if (gotVideoPacket[0] != 0) {
            if (videoPkt.pts() != AV_NOPTS_VALUE) {
                videoPkt.pts(av_rescale_q(videoPkt.pts(), videoC.time_base(), videoSt.time_base()));
            }
            if (videoPkt.dts() != AV_NOPTS_VALUE) {
                videoPkt.dts(av_rescale_q(videoPkt.dts(), videoC.time_base(), videoSt.time_base()));
            }
            videoPkt.stream_index(videoSt.index());
        } else {
            return false;
        }
    }
    synchronized (oc) {
        /* write the compressed frame in the media file */
        if (interleaved && audioSt != null) {
            if ((ret = av_interleaved_write_frame(oc, videoPkt)) < 0) {
                throw new Exception("av_interleaved_write_frame() error " + ret + " while writing interleaved video frame.");
            }
        } else {
            if ((ret = av_write_frame(oc, videoPkt)) < 0) {
                throw new Exception("av_write_frame() error " + ret + " while writing video frame.");
            }
        }
    }
    return picture.key_frame() != 0;
}
Also used : PointerPointer(com.googlecode.javacpp.PointerPointer) BytePointer(com.googlecode.javacpp.BytePointer) DoublePointer(com.googlecode.javacpp.DoublePointer)

Example 3 with DoublePointer

use of com.googlecode.javacpp.DoublePointer in project VideoRecorder by qdrzwd.

the class FFmpegFrameRecorder method record.

public boolean record(IplImage image, int pixelFormat) throws Exception {
    if (videoSt == null) {
        throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
    }
    if (image == null) {
    /* no more frame to compress. The codec has a latency of a few
               frames if using B frames, so we get the last frames by
               passing the same picture again */
    } else {
        int width = image.width();
        int step = image.widthStep();
        if (pixelFormat == AV_PIX_FMT_NONE) {
            int depth = image.depth();
            int channels = image.nChannels();
            if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 3) {
                pixelFormat = AV_PIX_FMT_BGR24;
            } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 1) {
                pixelFormat = AV_PIX_FMT_GRAY8;
            } else if ((depth == IPL_DEPTH_16U || depth == IPL_DEPTH_16S) && channels == 1) {
                pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
            } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 4) {
                pixelFormat = AV_PIX_FMT_RGBA;
            } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 2) {
                // Android's camera capture format
                pixelFormat = AV_PIX_FMT_NV21;
                step = width;
            } else {
                throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
            }
        }
        int height = image.height();
        BytePointer data = image.imageData();
        if (videoC.pix_fmt() != pixelFormat || videoC.width() != width || videoC.height() != height) {
            /* convert to the codec pixel format if needed */
            imgConvertCtx = sws_getCachedContext(imgConvertCtx, videoC.width(), videoC.height(), pixelFormat, videoC.width(), videoC.height(), videoC.pix_fmt(), SWS_BILINEAR, null, null, (DoublePointer) null);
            if (imgConvertCtx == null) {
                throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
            }
            avpicture_fill(new AVPicture(tmpPicture), data, pixelFormat, width, height);
            avpicture_fill(new AVPicture(picture), pictureBuf, videoC.pix_fmt(), videoC.width(), videoC.height());
            tmpPicture.linesize(0, step);
            sws_scale(imgConvertCtx, new PointerPointer(tmpPicture), tmpPicture.linesize(), 0, height, new PointerPointer(picture), picture.linesize());
        } else {
            avpicture_fill(new AVPicture(picture), data, pixelFormat, width, height);
            picture.linesize(0, step);
        }
    }
    int ret;
    if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
        if (image == null) {
            return false;
        }
        /* raw video case. The API may change slightly in the future for that? */
        av_init_packet(videoPkt);
        videoPkt.flags(videoPkt.flags() | AV_PKT_FLAG_KEY);
        videoPkt.stream_index(videoSt.index());
        videoPkt.data(new BytePointer(picture));
        videoPkt.size(Loader.sizeof(AVPicture.class));
    } else {
        /* encode the image */
        av_init_packet(videoPkt);
        videoPkt.data(videoOutbuf);
        videoPkt.size(videoOutbufSize);
        picture.quality(videoC.global_quality());
        if ((ret = avcodec_encode_video2(videoC, videoPkt, image == null ? null : picture, gotVideoPacket)) < 0) {
            throw new Exception("avcodec_encode_video2() error " + ret + ": Could not encode video packet.");
        }
        // magic required by libx264
        picture.pts(picture.pts() + 1);
        /* if zero size, it means the image was buffered */
        if (gotVideoPacket[0] != 0) {
            if (videoPkt.pts() != AV_NOPTS_VALUE) {
                videoPkt.pts(av_rescale_q(videoPkt.pts(), videoC.time_base(), videoSt.time_base()));
            }
            if (videoPkt.dts() != AV_NOPTS_VALUE) {
                videoPkt.dts(av_rescale_q(videoPkt.dts(), videoC.time_base(), videoSt.time_base()));
            }
            videoPkt.stream_index(videoSt.index());
        } else {
            return false;
        }
    }
    synchronized (oc) {
        /* write the compressed frame in the media file */
        if (interleaved && audioSt != null) {
            if ((ret = av_interleaved_write_frame(oc, videoPkt)) < 0) {
                throw new Exception("av_interleaved_write_frame() error " + ret + " while writing interleaved video frame.");
            }
        } else {
            if ((ret = av_write_frame(oc, videoPkt)) < 0) {
                throw new Exception("av_write_frame() error " + ret + " while writing video frame.");
            }
        }
    }
    return picture.key_frame() != 0;
}
Also used : PointerPointer(com.googlecode.javacpp.PointerPointer) BytePointer(com.googlecode.javacpp.BytePointer) DoublePointer(com.googlecode.javacpp.DoublePointer)

Example 4 with DoublePointer

use of com.googlecode.javacpp.DoublePointer in project VideoRecorder by qdrzwd.

the class FFmpegFrameRecorder method record.

@Override
public boolean record(int sampleRate, Buffer... samples) throws Exception {
    if (audioSt == null) {
        throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
    }
    int inputSize = samples[0].limit() - samples[0].position();
    int inputFormat;
    int inputDepth;
    if (sampleRate <= 0) {
        sampleRate = audioC.sample_rate();
    }
    if (samples[0] instanceof ByteBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
        inputDepth = 1;
        for (int i = 0; i < samples.length; i++) {
            ByteBuffer b = (ByteBuffer) samples[i];
            if (samplesIn[i] instanceof BytePointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((BytePointer) samplesIn[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samplesIn[i] = new BytePointer(b);
            }
        }
    } else if (samples[0] instanceof ShortBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
        inputDepth = 2;
        for (int i = 0; i < samples.length; i++) {
            ShortBuffer b = (ShortBuffer) samples[i];
            if (samplesIn[i] instanceof ShortPointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((ShortPointer) samplesIn[i]).position(0).put(b.array(), samples[i].position(), inputSize);
            } else {
                samplesIn[i] = new ShortPointer(b);
            }
        }
    } else if (samples[0] instanceof IntBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
        inputDepth = 4;
        for (int i = 0; i < samples.length; i++) {
            IntBuffer b = (IntBuffer) samples[i];
            if (samplesIn[i] instanceof IntPointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((IntPointer) samplesIn[i]).position(0).put(b.array(), samples[i].position(), inputSize);
            } else {
                samplesIn[i] = new IntPointer(b);
            }
        }
    } else if (samples[0] instanceof FloatBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
        inputDepth = 4;
        for (int i = 0; i < samples.length; i++) {
            FloatBuffer b = (FloatBuffer) samples[i];
            if (samplesIn[i] instanceof FloatPointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((FloatPointer) samplesIn[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samplesIn[i] = new FloatPointer(b);
            }
        }
    } else if (samples[0] instanceof DoubleBuffer) {
        inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
        inputDepth = 8;
        for (int i = 0; i < samples.length; i++) {
            DoubleBuffer b = (DoubleBuffer) samples[i];
            if (samplesIn[i] instanceof DoublePointer && samplesIn[i].capacity() >= inputSize && b.hasArray()) {
                ((DoublePointer) samplesIn[i]).position(0).put(b.array(), b.position(), inputSize);
            } else {
                samplesIn[i] = new DoublePointer(b);
            }
        }
    } else {
        throw new Exception("Audio samples Buffer has unsupported type: " + samples);
    }
    int ret;
    int outputFormat = audioC.sample_fmt();
    if (samplesConvertCtx == null) {
        samplesConvertCtx = swr_alloc_set_opts(null, audioC.channel_layout(), outputFormat, audioC.sample_rate(), audioC.channel_layout(), inputFormat, sampleRate, 0, null);
        if (samplesConvertCtx == null) {
            throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
        } else if ((ret = swr_init(samplesConvertCtx)) < 0) {
            throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
        }
    }
    for (int i = 0; i < samples.length; i++) {
        samplesIn[i].position(samplesIn[i].position() * inputDepth).limit((samplesIn[i].position() + inputSize) * inputDepth);
    }
    int outputChannels = samplesOut.length > 1 ? 1 : audioChannels;
    int inputChannels = samples.length > 1 ? 1 : audioChannels;
    int outputDepth = av_get_bytes_per_sample(outputFormat);
    while (true) {
        int inputCount = (samplesIn[0].limit() - samplesIn[0].position()) / (inputChannels * inputDepth);
        int outputCount = (samplesOut[0].limit() - samplesOut[0].position()) / (outputChannels * outputDepth);
        inputCount = Math.min(inputCount, 2 * (outputCount * sampleRate) / audioC.sample_rate());
        for (int i = 0; i < samples.length; i++) {
            samplesInPtr.put(i, samplesIn[i]);
        }
        for (int i = 0; i < samplesOut.length; i++) {
            samplesOutPtr.put(i, samplesOut[i]);
        }
        if ((ret = swr_convert(samplesConvertCtx, samplesOutPtr, outputCount, samplesInPtr, inputCount)) < 0) {
            throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
        } else if (ret == 0) {
            break;
        }
        for (int i = 0; i < samples.length; i++) {
            samplesIn[i].position(samplesIn[i].position() + inputCount * inputChannels * inputDepth);
        }
        for (int i = 0; i < samplesOut.length; i++) {
            samplesOut[i].position(samplesOut[i].position() + ret * outputChannels * outputDepth);
        }
        if (samplesOut[0].position() >= samplesOut[0].limit()) {
            frame.nb_samples(audioInputFrameSize);
            avcodec_fill_audio_frame(frame, audioC.channels(), outputFormat, samplesOut[0], samplesOut[0].limit(), 0);
            for (int i = 0; i < samplesOut.length; i++) {
                frame.data(i, samplesOut[i].position(0));
                frame.linesize(i, samplesOut[i].limit());
            }
            frame.quality(audioC.global_quality());
            record(frame);
        }
    }
    return frame.key_frame() != 0;
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) ShortPointer(com.googlecode.javacpp.ShortPointer) FloatPointer(com.googlecode.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(com.googlecode.javacpp.IntPointer) BytePointer(com.googlecode.javacpp.BytePointer) DoublePointer(com.googlecode.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ShortBuffer(java.nio.ShortBuffer)

Aggregations

BytePointer (com.googlecode.javacpp.BytePointer)4 DoublePointer (com.googlecode.javacpp.DoublePointer)4 FloatPointer (com.googlecode.javacpp.FloatPointer)2 IntPointer (com.googlecode.javacpp.IntPointer)2 PointerPointer (com.googlecode.javacpp.PointerPointer)2 ShortPointer (com.googlecode.javacpp.ShortPointer)2 ByteBuffer (java.nio.ByteBuffer)2 DoubleBuffer (java.nio.DoubleBuffer)2 FloatBuffer (java.nio.FloatBuffer)2 IntBuffer (java.nio.IntBuffer)2 ShortBuffer (java.nio.ShortBuffer)2