Search in sources :

Example 21 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameGrabber method initPictureRGB.

private void initPictureRGB() {
    int width = imageWidth > 0 ? imageWidth : video_c.width();
    int height = imageHeight > 0 ? imageHeight : video_c.height();
    switch(imageMode) {
        case COLOR:
        case GRAY:
            // If size changes I new allocation is needed -> free the old one.
            if (image_ptr != null) {
                // First kill all references, then free it.
                image_buf = null;
                BytePointer[] temp = image_ptr;
                image_ptr = null;
                av_free(temp[0]);
            }
            int fmt = getPixelFormat();
            // work around bug in swscale: https://trac.ffmpeg.org/ticket/1031
            int align = 32;
            int stride = width;
            for (int i = 1; i <= align; i += i) {
                stride = (width + (i - 1)) & ~(i - 1);
                av_image_fill_linesizes(picture_rgb.linesize(), fmt, stride);
                if ((picture_rgb.linesize(0) & (align - 1)) == 0) {
                    break;
                }
            }
            // Determine required buffer size and allocate buffer
            int size = av_image_get_buffer_size(fmt, stride, height, 1);
            image_ptr = new BytePointer[] { new BytePointer(av_malloc(size)).capacity(size) };
            image_buf = new Buffer[] { image_ptr[0].asBuffer() };
            // Assign appropriate parts of buffer to image planes in picture_rgb
            // Note that picture_rgb is an AVFrame, but AVFrame is a superset of AVPicture
            av_image_fill_arrays(new PointerPointer(picture_rgb), picture_rgb.linesize(), image_ptr[0], fmt, stride, height, 1);
            picture_rgb.format(fmt);
            picture_rgb.width(width);
            picture_rgb.height(height);
            break;
        case RAW:
            image_ptr = new BytePointer[] { null };
            image_buf = new Buffer[] { null };
            break;
        default:
            assert false;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 22 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameGrabber method startUnsafe.

public synchronized void startUnsafe(boolean findStreamInfo) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (oc != null && !oc.isNull()) {
            throw new Exception("start() has already been called: Call stop() before calling start() again.");
        }
        int ret;
        img_convert_ctx = null;
        oc = new AVFormatContext(null);
        video_c = null;
        audio_c = null;
        plane_ptr = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
        plane_ptr2 = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
        pkt = new AVPacket().retainReference();
        frameGrabbed = false;
        frame = new Frame();
        timestamp = 0;
        frameNumber = 0;
        pkt.stream_index(-1);
        // Open video file
        AVInputFormat f = null;
        if (format != null && format.length() > 0) {
            if ((f = av_find_input_format(format)) == null) {
                throw new Exception("av_find_input_format() error: Could not find input format \"" + format + "\".");
            }
        }
        AVDictionary options = new AVDictionary(null);
        if (frameRate > 0) {
            AVRational r = av_d2q(frameRate, 1001000);
            av_dict_set(options, "framerate", r.num() + "/" + r.den(), 0);
        }
        if (pixelFormat >= 0) {
            av_dict_set(options, "pixel_format", av_get_pix_fmt_name(pixelFormat).getString(), 0);
        } else if (imageMode != ImageMode.RAW) {
            av_dict_set(options, "pixel_format", imageMode == ImageMode.COLOR ? "bgr24" : "gray8", 0);
        }
        if (imageWidth > 0 && imageHeight > 0) {
            av_dict_set(options, "video_size", imageWidth + "x" + imageHeight, 0);
        }
        if (sampleRate > 0) {
            av_dict_set(options, "sample_rate", "" + sampleRate, 0);
        }
        if (audioChannels > 0) {
            av_dict_set(options, "channels", "" + audioChannels, 0);
        }
        for (Entry<String, String> e : this.options.entrySet()) {
            av_dict_set(options, e.getKey(), e.getValue(), 0);
        }
        if (inputStream != null) {
            if (!inputStream.markSupported()) {
                inputStream = new BufferedInputStream(inputStream);
            }
            inputStream.mark(maximumSize);
            oc = avformat_alloc_context();
            avio = avio_alloc_context(new BytePointer(av_malloc(4096)), 4096, 0, oc, readCallback, null, maximumSize > 0 ? seekCallback : null);
            oc.pb(avio);
            filename = inputStream.toString();
            inputStreams.put(oc, inputStream);
        }
        if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
            av_dict_set(options, "pixel_format", null, 0);
            if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
                throw new Exception("avformat_open_input() error " + ret + ": Could not open input \"" + filename + "\". (Has setFormat() been called?)");
            }
        }
        av_dict_free(options);
        oc.max_delay(maxDelay);
        // Retrieve stream information, if desired
        if (findStreamInfo && (ret = avformat_find_stream_info(oc, (PointerPointer) null)) < 0) {
            throw new Exception("avformat_find_stream_info() error " + ret + ": Could not find stream information.");
        }
        if (av_log_get_level() >= AV_LOG_INFO) {
            // Dump information about file onto standard error
            av_dump_format(oc, 0, filename, 0);
        }
        // Find the first video and audio stream, unless the user specified otherwise
        video_st = audio_st = null;
        AVCodecParameters video_par = null, audio_par = null;
        int nb_streams = oc.nb_streams();
        streams = new int[nb_streams];
        for (int i = 0; i < nb_streams; i++) {
            AVStream st = oc.streams(i);
            // Get a pointer to the codec context for the video or audio stream
            AVCodecParameters par = st.codecpar();
            streams[i] = par.codec_type();
            if (video_st == null && par.codec_type() == AVMEDIA_TYPE_VIDEO && (videoStream < 0 || videoStream == i)) {
                video_st = st;
                video_par = par;
                videoStream = i;
            } else if (audio_st == null && par.codec_type() == AVMEDIA_TYPE_AUDIO && (audioStream < 0 || audioStream == i)) {
                audio_st = st;
                audio_par = par;
                audioStream = i;
            }
        }
        if (video_st == null && audio_st == null) {
            throw new Exception("Did not find a video or audio stream inside \"" + filename + "\" for videoStream == " + videoStream + " and audioStream == " + audioStream + ".");
        }
        if (video_st != null) {
            // Find the decoder for the video stream
            AVCodec codec = avcodec_find_decoder_by_name(videoCodecName);
            if (codec == null) {
                codec = avcodec_find_decoder(video_par.codec_id());
            }
            if (codec == null) {
                throw new Exception("avcodec_find_decoder() error: Unsupported video format or codec not found: " + video_par.codec_id() + ".");
            }
            /* Allocate a codec context for the decoder */
            if ((video_c = avcodec_alloc_context3(codec)) == null) {
                throw new Exception("avcodec_alloc_context3() error: Could not allocate video decoding context.");
            }
            /* copy the stream parameters from the muxer */
            if ((ret = avcodec_parameters_to_context(video_c, video_st.codecpar())) < 0) {
                releaseUnsafe();
                throw new Exception("avcodec_parameters_to_context() error " + ret + ": Could not copy the video stream parameters.");
            }
            options = new AVDictionary(null);
            for (Entry<String, String> e : videoOptions.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            // Enable multithreading when available
            video_c.thread_count(0);
            // Open video codec
            if ((ret = avcodec_open2(video_c, codec, options)) < 0) {
                throw new Exception("avcodec_open2() error " + ret + ": Could not open video codec.");
            }
            av_dict_free(options);
            // Hack to correct wrong frame rates that seem to be generated by some codecs
            if (video_c.time_base().num() > 1000 && video_c.time_base().den() == 1) {
                video_c.time_base().den(1000);
            }
            // Allocate video frame and an AVFrame structure for the RGB image
            if ((picture = av_frame_alloc()) == null) {
                throw new Exception("av_frame_alloc() error: Could not allocate raw picture frame.");
            }
            if ((picture_rgb = av_frame_alloc()) == null) {
                throw new Exception("av_frame_alloc() error: Could not allocate RGB picture frame.");
            }
            initPictureRGB();
        }
        if (audio_st != null) {
            // Find the decoder for the audio stream
            AVCodec codec = avcodec_find_decoder_by_name(audioCodecName);
            if (codec == null) {
                codec = avcodec_find_decoder(audio_par.codec_id());
            }
            if (codec == null) {
                throw new Exception("avcodec_find_decoder() error: Unsupported audio format or codec not found: " + audio_par.codec_id() + ".");
            }
            /* Allocate a codec context for the decoder */
            if ((audio_c = avcodec_alloc_context3(codec)) == null) {
                throw new Exception("avcodec_alloc_context3() error: Could not allocate audio decoding context.");
            }
            /* copy the stream parameters from the muxer */
            if ((ret = avcodec_parameters_to_context(audio_c, audio_st.codecpar())) < 0) {
                releaseUnsafe();
                throw new Exception("avcodec_parameters_to_context() error " + ret + ": Could not copy the audio stream parameters.");
            }
            options = new AVDictionary(null);
            for (Entry<String, String> e : audioOptions.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            // Enable multithreading when available
            audio_c.thread_count(0);
            // Open audio codec
            if ((ret = avcodec_open2(audio_c, codec, options)) < 0) {
                throw new Exception("avcodec_open2() error " + ret + ": Could not open audio codec.");
            }
            av_dict_free(options);
            // Allocate audio samples frame
            if ((samples_frame = av_frame_alloc()) == null) {
                throw new Exception("av_frame_alloc() error: Could not allocate audio frame.");
            }
            samples_ptr = new BytePointer[] { null };
            samples_buf = new Buffer[] { null };
        }
        started = true;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) PointerScope(org.bytedeco.javacpp.PointerScope) IOException(java.io.IOException) BufferedInputStream(java.io.BufferedInputStream)

Example 23 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameRecorder method recordSamples.

public synchronized boolean recordSamples(int sampleRate, int audioChannels, Buffer... samples) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (audio_st == null) {
            throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
        }
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        if (samples == null && samples_out[0].position() > 0) {
            // Typically samples_out[0].limit() is double the audio_input_frame_size --> sampleDivisor = 2
            double sampleDivisor = Math.floor((int) Math.min(samples_out[0].limit(), Integer.MAX_VALUE) / audio_input_frame_size);
            writeSamples((int) Math.floor((int) samples_out[0].position() / sampleDivisor));
            return writeFrame((AVFrame) null);
        }
        int ret;
        if (sampleRate <= 0) {
            sampleRate = audio_c.sample_rate();
        }
        if (audioChannels <= 0) {
            audioChannels = audio_c.channels();
        }
        int inputSize = samples != null ? samples[0].limit() - samples[0].position() : 0;
        int inputFormat = samples_format;
        int inputChannels = samples != null && samples.length > 1 ? 1 : audioChannels;
        int inputDepth = 0;
        int outputFormat = audio_c.sample_fmt();
        int outputChannels = samples_out.length > 1 ? 1 : audio_c.channels();
        int outputDepth = av_get_bytes_per_sample(outputFormat);
        if (samples != null && samples[0] instanceof ByteBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
            inputDepth = 1;
            for (int i = 0; i < samples.length; i++) {
                ByteBuffer b = (ByteBuffer) samples[i];
                if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new BytePointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof ShortBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
            inputDepth = 2;
            for (int i = 0; i < samples.length; i++) {
                ShortBuffer b = (ShortBuffer) samples[i];
                if (samples_in[i] instanceof ShortPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((ShortPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new ShortPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof IntBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
            inputDepth = 4;
            for (int i = 0; i < samples.length; i++) {
                IntBuffer b = (IntBuffer) samples[i];
                if (samples_in[i] instanceof IntPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new IntPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof FloatBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
            inputDepth = 4;
            for (int i = 0; i < samples.length; i++) {
                FloatBuffer b = (FloatBuffer) samples[i];
                if (samples_in[i] instanceof FloatPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new FloatPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof DoubleBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
            inputDepth = 8;
            for (int i = 0; i < samples.length; i++) {
                DoubleBuffer b = (DoubleBuffer) samples[i];
                if (samples_in[i] instanceof DoublePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new DoublePointer(b).retainReference();
                }
            }
        } else if (samples != null) {
            throw new Exception("Audio samples Buffer has unsupported type: " + samples);
        }
        if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) {
            samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, audio_c.channel_layout(), outputFormat, audio_c.sample_rate(), av_get_default_channel_layout(audioChannels), inputFormat, sampleRate, 0, null);
            if (samples_convert_ctx == null) {
                throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
            } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
                throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
            }
            samples_channels = audioChannels;
            samples_format = inputFormat;
            samples_rate = sampleRate;
        }
        for (int i = 0; samples != null && i < samples.length; i++) {
            samples_in[i].position(samples_in[i].position() * inputDepth).limit((samples_in[i].position() + inputSize) * inputDepth);
        }
        while (true) {
            int inputCount = (int) Math.min(samples != null ? (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth) : 0, Integer.MAX_VALUE);
            int outputCount = (int) Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE);
            inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate());
            for (int i = 0; samples != null && i < samples.length; i++) {
                plane_ptr.put(i, samples_in[i]);
            }
            for (int i = 0; i < samples_out.length; i++) {
                plane_ptr2.put(i, samples_out[i]);
            }
            if ((ret = swr_convert(samples_convert_ctx, plane_ptr2, outputCount, plane_ptr, inputCount)) < 0) {
                throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
            } else if (ret == 0) {
                break;
            }
            for (int i = 0; samples != null && i < samples.length; i++) {
                samples_in[i].position(samples_in[i].position() + inputCount * inputChannels * inputDepth);
            }
            for (int i = 0; i < samples_out.length; i++) {
                samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
            }
            if (samples == null || samples_out[0].position() >= samples_out[0].limit()) {
                writeSamples(audio_input_frame_size);
            }
        }
        return samples != null ? frame.key_frame() != 0 : writeFrame((AVFrame) null);
    }
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException) ShortPointer(org.bytedeco.javacpp.ShortPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Example 24 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FlyCapture2FrameGrabber method grab.

public Frame grab() throws FrameGrabber.Exception {
    Error error = camera.RetrieveBuffer(raw_image);
    if (error.notEquals(PGRERROR_OK)) {
        throw new FrameGrabber.Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)");
    }
    int w = raw_image.GetCols();
    int h = raw_image.GetRows();
    int format = raw_image.GetPixelFormat();
    int depth = getDepth(format);
    int stride = raw_image.GetStride();
    int size = h * stride;
    int numChannels = getNumChannels(format);
    error = camera.ReadRegister(IMAGE_DATA_FORMAT, regOut);
    if (error.notEquals(PGRERROR_OK)) {
        throw new FrameGrabber.Exception("flycaptureGetCameraRegister() Error " + error);
    }
    ByteOrder frameEndian = (regOut[0] & 0x1) != 0 ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN;
    boolean alreadySwapped = false;
    boolean colorbayer = raw_image.GetBayerTileFormat() != NONE;
    boolean colorrgb = format == PIXEL_FORMAT_RGB8 || format == PIXEL_FORMAT_RGB16 || format == PIXEL_FORMAT_BGR || format == PIXEL_FORMAT_BGRU;
    boolean coloryuv = format == PIXEL_FORMAT_411YUV8 || format == PIXEL_FORMAT_422YUV8 || format == PIXEL_FORMAT_444YUV8;
    BytePointer imageData = raw_image.GetData().capacity(raw_image.GetDataSize());
    if ((depth == IPL_DEPTH_8U || frameEndian.equals(ByteOrder.nativeOrder())) && (imageMode == FrameGrabber.ImageMode.RAW || (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 3) || (imageMode == FrameGrabber.ImageMode.GRAY && numChannels == 1 && !colorbayer))) {
        if (return_image == null) {
            return_image = IplImage.createHeader(w, h, depth, numChannels);
        }
        return_image.widthStep(stride);
        return_image.imageSize(size);
        return_image.imageData(imageData);
    } else {
        if (return_image == null) {
            return_image = IplImage.create(w, h, depth, imageMode == FrameGrabber.ImageMode.COLOR ? 3 : 1);
        }
        if (temp_image == null) {
            if (imageMode == FrameGrabber.ImageMode.COLOR && (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) {
                temp_image = IplImage.create(w, h, depth, numChannels);
            } else if (imageMode == FrameGrabber.ImageMode.GRAY && colorbayer) {
                temp_image = IplImage.create(w, h, depth, 3);
            } else if (imageMode == FrameGrabber.ImageMode.GRAY && colorrgb) {
                temp_image = IplImage.createHeader(w, h, depth, 3);
            } else if (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
                temp_image = IplImage.createHeader(w, h, depth, 1);
            } else {
                temp_image = return_image;
            }
        }
        setStride(conv_image, temp_image.widthStep());
        conv_image.SetData(temp_image.imageData(), temp_image.width() * temp_image.height() * temp_image.depth());
        if (depth == IPL_DEPTH_8U) {
            setPixelFormat(conv_image, imageMode == FrameGrabber.ImageMode.RAW ? PIXEL_FORMAT_RAW8 : temp_image.nChannels() == 1 ? PIXEL_FORMAT_MONO8 : PIXEL_FORMAT_BGR);
        } else {
            setPixelFormat(conv_image, imageMode == FrameGrabber.ImageMode.RAW ? PIXEL_FORMAT_RAW16 : temp_image.nChannels() == 1 ? PIXEL_FORMAT_MONO16 : PIXEL_FORMAT_RGB16);
        }
        if (depth != IPL_DEPTH_8U && conv_image.GetPixelFormat() == format && conv_image.GetStride() == stride) {
            // we just need a copy to swap bytes..
            ShortBuffer in = imageData.asByteBuffer().order(frameEndian).asShortBuffer();
            ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer();
            out.put(in);
            alreadySwapped = true;
        } else if ((imageMode == FrameGrabber.ImageMode.GRAY && colorrgb) || (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) {
            temp_image.widthStep(stride);
            temp_image.imageSize(size);
            temp_image.imageData(imageData);
        } else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) {
            error = raw_image.Convert(conv_image);
            // error = flycaptureConvertImage(context, raw_image, conv_image);
            if (error.notEquals(PGRERROR_OK)) {
                PrintError(error);
                throw new FrameGrabber.Exception("raw_image.Convert Error " + error);
            }
        }
        if (!alreadySwapped && depth != IPL_DEPTH_8U && !frameEndian.equals(ByteOrder.nativeOrder())) {
            // ack, the camera's endianness doesn't correspond to our machine ...
            // swap bytes of 16-bit images
            ByteBuffer bb = temp_image.getByteBuffer();
            ShortBuffer in = bb.order(frameEndian).asShortBuffer();
            ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer();
            out.put(in);
        }
        if (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
            cvCvtColor(temp_image, return_image, CV_GRAY2BGR);
        } else if (imageMode == FrameGrabber.ImageMode.GRAY && (colorbayer || colorrgb)) {
            cvCvtColor(temp_image, return_image, CV_BGR2GRAY);
        }
    }
    int bayerFormat = cameraInfo.bayerTileFormat();
    switch(bayerFormat) {
        case BGGR:
            sensorPattern = SENSOR_PATTERN_BGGR;
            break;
        case GBRG:
            sensorPattern = SENSOR_PATTERN_GBRG;
            break;
        case GRBG:
            sensorPattern = SENSOR_PATTERN_GRBG;
            break;
        case RGGB:
            sensorPattern = SENSOR_PATTERN_RGGB;
            break;
        default:
            sensorPattern = -1L;
    }
    TimeStamp timeStamp = raw_image.GetTimeStamp();
    timestamp = timeStamp.seconds() * 1000000L + timeStamp.microSeconds();
    return converter.convert(return_image);
}
Also used : BytePointer(org.bytedeco.javacpp.BytePointer) Error(org.bytedeco.flycapture.FlyCapture2.Error) ByteOrder(java.nio.ByteOrder) ShortBuffer(java.nio.ShortBuffer) ByteBuffer(java.nio.ByteBuffer)

Example 25 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class Frame method createIndexer.

/**
 * Returns an {@link Indexer} for the <i>i</i>th image plane.
 */
public <I extends Indexer> I createIndexer(boolean direct, int i) {
    long[] sizes = { imageHeight, imageWidth, imageChannels };
    long[] strides = { imageStride, imageChannels, 1 };
    Buffer buffer = image[i];
    Object array = buffer.hasArray() ? buffer.array() : null;
    switch(imageDepth) {
        case DEPTH_UBYTE:
            return array != null ? (I) UByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) UByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) UByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_BYTE:
            return array != null ? (I) ByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) ByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) ByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_USHORT:
            return array != null ? (I) UShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) UShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) UShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_SHORT:
            return array != null ? (I) ShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) ShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) ShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_INT:
            return array != null ? (I) IntIndexer.create((int[]) array, sizes, strides).indexable(this) : direct ? (I) IntIndexer.create((IntBuffer) buffer, sizes, strides).indexable(this) : (I) IntIndexer.create(new IntPointer((IntBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_LONG:
            return array != null ? (I) LongIndexer.create((long[]) array, sizes, strides).indexable(this) : direct ? (I) LongIndexer.create((LongBuffer) buffer, sizes, strides).indexable(this) : (I) LongIndexer.create(new LongPointer((LongBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_FLOAT:
            return array != null ? (I) FloatIndexer.create((float[]) array, sizes, strides).indexable(this) : direct ? (I) FloatIndexer.create((FloatBuffer) buffer, sizes, strides).indexable(this) : (I) FloatIndexer.create(new FloatPointer((FloatBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_DOUBLE:
            return array != null ? (I) DoubleIndexer.create((double[]) array, sizes, strides).indexable(this) : direct ? (I) DoubleIndexer.create((DoubleBuffer) buffer, sizes, strides).indexable(this) : (I) DoubleIndexer.create(new DoublePointer((DoubleBuffer) buffer), sizes, strides, false).indexable(this);
        default:
            assert false;
    }
    return null;
}
Also used : FloatBuffer(java.nio.FloatBuffer) ShortBuffer(java.nio.ShortBuffer) ByteBuffer(java.nio.ByteBuffer) IntBuffer(java.nio.IntBuffer) Buffer(java.nio.Buffer) DoubleBuffer(java.nio.DoubleBuffer) LongBuffer(java.nio.LongBuffer) DoubleBuffer(java.nio.DoubleBuffer) LongBuffer(java.nio.LongBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ShortPointer(org.bytedeco.javacpp.ShortPointer) LongPointer(org.bytedeco.javacpp.LongPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntPointer(org.bytedeco.javacpp.IntPointer) IntBuffer(java.nio.IntBuffer) ShortBuffer(java.nio.ShortBuffer)

Aggregations

BytePointer (org.bytedeco.javacpp.BytePointer)79 IntPointer (org.bytedeco.javacpp.IntPointer)22 PointerPointer (org.bytedeco.javacpp.PointerPointer)20 ByteBuffer (java.nio.ByteBuffer)19 IOException (java.io.IOException)16 Pointer (org.bytedeco.javacpp.Pointer)16 PointerScope (org.bytedeco.javacpp.PointerScope)13 DoublePointer (org.bytedeco.javacpp.DoublePointer)12 FloatPointer (org.bytedeco.javacpp.FloatPointer)12 CompressedDataBuffer (org.nd4j.linalg.compression.CompressedDataBuffer)10 CompressionDescriptor (org.nd4j.linalg.compression.CompressionDescriptor)10 ShortBuffer (java.nio.ShortBuffer)9 ShortPointer (org.bytedeco.javacpp.ShortPointer)9 IntBuffer (java.nio.IntBuffer)7 DoubleBuffer (java.nio.DoubleBuffer)6 FloatBuffer (java.nio.FloatBuffer)6 Nonnull (javax.annotation.Nonnull)5 LongPointer (org.bytedeco.javacpp.LongPointer)5 TF_Status (org.tensorflow.internal.c_api.TF_Status)4 ByteOrder (java.nio.ByteOrder)3