Search in sources :

Example 66 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameRecorder method recordImage.

public synchronized boolean recordImage(int width, int height, int depth, int channels, int stride, int pixelFormat, Buffer... image) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (video_st == null) {
            throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
        }
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        int ret;
        if (image == null || image.length == 0) {
        /* no more frame to compress. The codec has a latency of a few
               frames if using B frames, so we get the last frames by
               passing the same picture again */
        } else {
            int step = stride * Math.abs(depth) / 8;
            BytePointer data = image[0] instanceof ByteBuffer ? new BytePointer((ByteBuffer) image[0]).position(0) : new BytePointer(new Pointer(image[0]).position(0));
            if (pixelFormat == AV_PIX_FMT_NONE) {
                if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 3) {
                    pixelFormat = AV_PIX_FMT_BGR24;
                } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 1) {
                    pixelFormat = AV_PIX_FMT_GRAY8;
                } else if ((depth == Frame.DEPTH_USHORT || depth == Frame.DEPTH_SHORT) && channels == 1) {
                    pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
                } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 4) {
                    pixelFormat = AV_PIX_FMT_RGBA;
                } else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 2) {
                    // Android's camera capture format
                    pixelFormat = AV_PIX_FMT_NV21;
                } else {
                    throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
                }
            }
            if (pixelFormat == AV_PIX_FMT_NV21) {
                step = width;
            }
            if (video_c.pix_fmt() != pixelFormat || video_c.width() != width || video_c.height() != height) {
                /* convert to the codec pixel format if needed */
                img_convert_ctx = sws_getCachedContext(img_convert_ctx, width, height, pixelFormat, video_c.width(), video_c.height(), video_c.pix_fmt(), imageScalingFlags != 0 ? imageScalingFlags : SWS_BILINEAR, null, null, (DoublePointer) null);
                if (img_convert_ctx == null) {
                    throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
                }
                av_image_fill_arrays(new PointerPointer(tmp_picture), tmp_picture.linesize(), data, pixelFormat, width, height, 1);
                av_image_fill_arrays(new PointerPointer(picture), picture.linesize(), picture_buf, video_c.pix_fmt(), video_c.width(), video_c.height(), 1);
                tmp_picture.linesize(0, step);
                tmp_picture.format(pixelFormat);
                tmp_picture.width(width);
                tmp_picture.height(height);
                picture.format(video_c.pix_fmt());
                picture.width(video_c.width());
                picture.height(video_c.height());
                sws_scale(img_convert_ctx, new PointerPointer(tmp_picture), tmp_picture.linesize(), 0, height, new PointerPointer(picture), picture.linesize());
            } else {
                av_image_fill_arrays(new PointerPointer(picture), picture.linesize(), data, pixelFormat, width, height, 1);
                picture.linesize(0, step);
                picture.format(pixelFormat);
                picture.width(width);
                picture.height(height);
            }
        }
        // if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
        // if (image == null || image.length == 0) {
        // return false;
        // }
        // /* raw video case. The API may change slightly in the future for that? */
        // av_init_packet(video_pkt);
        // video_pkt.flags(video_pkt.flags() | AV_PKT_FLAG_KEY);
        // video_pkt.stream_index(video_st.index());
        // video_pkt.data(new BytePointer(picture));
        // video_pkt.size(Loader.sizeof(AVFrame.class));
        // } else {
        /* encode the image */
        picture.quality(video_c.global_quality());
        if ((ret = avcodec_send_frame(video_c, image == null || image.length == 0 ? null : picture)) < 0 && image != null && image.length != 0) {
            throw new Exception("avcodec_send_frame() error " + ret + ": Error sending a video frame for encoding.");
        }
        // magic required by libx264
        picture.pts(picture.pts() + 1);
        /* if zero size, it means the image was buffered */
        got_video_packet[0] = 0;
        while (ret >= 0) {
            av_new_packet(video_pkt, video_outbuf_size);
            ret = avcodec_receive_packet(video_c, video_pkt);
            if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
                av_packet_unref(video_pkt);
                break;
            } else if (ret < 0) {
                av_packet_unref(video_pkt);
                throw new Exception("avcodec_receive_packet() error " + ret + ": Error during video encoding.");
            }
            got_video_packet[0] = 1;
            if (video_pkt.pts() != AV_NOPTS_VALUE) {
                video_pkt.pts(av_rescale_q(video_pkt.pts(), video_c.time_base(), video_st.time_base()));
            }
            if (video_pkt.dts() != AV_NOPTS_VALUE) {
                video_pkt.dts(av_rescale_q(video_pkt.dts(), video_c.time_base(), video_st.time_base()));
            }
            video_pkt.stream_index(video_st.index());
            /* write the compressed frame in the media file */
            writePacket(AVMEDIA_TYPE_VIDEO, video_pkt);
        }
        // }
        return image != null ? (video_pkt.flags() & AV_PKT_FLAG_KEY) != 0 : got_video_packet[0] != 0;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) ShortPointer(org.bytedeco.javacpp.ShortPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException)

Example 67 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FlyCaptureFrameGrabber method grab.

public Frame grab() throws Exception {
    int error = flycaptureGrabImage2(context, raw_image);
    if (error != FLYCAPTURE_OK) {
        throw new Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)");
    }
    int w = raw_image.iCols();
    int h = raw_image.iRows();
    int format = raw_image.pixelFormat();
    int depth = getDepth(format);
    int stride = raw_image.iRowInc();
    int size = h * stride;
    int numChannels = getNumChannels(format);
    error = flycaptureGetCameraRegister(context, IMAGE_DATA_FORMAT, regOut);
    if (error != FLYCAPTURE_OK) {
        throw new Exception("flycaptureGetCameraRegister() Error " + error);
    }
    ByteOrder frameEndian = (regOut[0] & 0x1) != 0 ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN;
    boolean alreadySwapped = false;
    boolean colorbayer = raw_image.bStippled();
    boolean colorrgb = format == FLYCAPTURE_RGB8 || format == FLYCAPTURE_RGB16 || format == FLYCAPTURE_BGR || format == FLYCAPTURE_BGRU;
    boolean coloryuv = format == FLYCAPTURE_411YUV8 || format == FLYCAPTURE_422YUV8 || format == FLYCAPTURE_444YUV8;
    BytePointer imageData = raw_image.pData();
    if ((depth == IPL_DEPTH_8U || frameEndian.equals(ByteOrder.nativeOrder())) && (imageMode == ImageMode.RAW || (imageMode == ImageMode.COLOR && numChannels == 3) || (imageMode == ImageMode.GRAY && numChannels == 1 && !colorbayer))) {
        if (return_image == null) {
            return_image = IplImage.createHeader(w, h, depth, numChannels);
        }
        return_image.widthStep(stride);
        return_image.imageSize(size);
        return_image.imageData(imageData);
    } else {
        if (return_image == null) {
            return_image = IplImage.create(w, h, depth, imageMode == ImageMode.COLOR ? 3 : 1);
        }
        if (temp_image == null) {
            if (imageMode == ImageMode.COLOR && (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) {
                temp_image = IplImage.create(w, h, depth, numChannels);
            } else if (imageMode == ImageMode.GRAY && colorbayer) {
                temp_image = IplImage.create(w, h, depth, 3);
            } else if (imageMode == ImageMode.GRAY && colorrgb) {
                temp_image = IplImage.createHeader(w, h, depth, 3);
            } else if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
                temp_image = IplImage.createHeader(w, h, depth, 1);
            } else {
                temp_image = return_image;
            }
        }
        conv_image.iRowInc(temp_image.widthStep());
        conv_image.pData(temp_image.imageData());
        if (depth == IPL_DEPTH_8U) {
            conv_image.pixelFormat(imageMode == ImageMode.RAW ? FLYCAPTURE_RAW8 : temp_image.nChannels() == 1 ? FLYCAPTURE_MONO8 : FLYCAPTURE_BGR);
        } else {
            conv_image.pixelFormat(imageMode == ImageMode.RAW ? FLYCAPTURE_RAW16 : temp_image.nChannels() == 1 ? FLYCAPTURE_MONO16 : FLYCAPTURE_RGB16);
        }
        if (depth != IPL_DEPTH_8U && conv_image.pixelFormat() == format && conv_image.iRowInc() == stride) {
            // we just need a copy to swap bytes..
            ShortBuffer in = raw_image.getByteBuffer().order(frameEndian).asShortBuffer();
            ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer();
            out.put(in);
            alreadySwapped = true;
        } else if ((imageMode == ImageMode.GRAY && colorrgb) || (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) {
            temp_image.widthStep(stride);
            temp_image.imageSize(size);
            temp_image.imageData(imageData);
        } else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) {
            error = flycaptureConvertImage(context, raw_image, conv_image);
            if (error != FLYCAPTURE_OK) {
                throw new Exception("flycaptureConvertImage() Error " + error);
            }
        }
        if (!alreadySwapped && depth != IPL_DEPTH_8U && !frameEndian.equals(ByteOrder.nativeOrder())) {
            // ack, the camera's endianness doesn't correspond to our machine ...
            // swap bytes of 16-bit images
            ByteBuffer bb = temp_image.getByteBuffer();
            ShortBuffer in = bb.order(frameEndian).asShortBuffer();
            ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer();
            out.put(in);
        }
        if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
            cvCvtColor(temp_image, return_image, CV_GRAY2BGR);
        } else if (imageMode == ImageMode.GRAY && (colorbayer || colorrgb)) {
            cvCvtColor(temp_image, return_image, CV_BGR2GRAY);
        }
    }
    error = flycaptureGetColorTileFormat(context, regOut);
    if (error != FLYCAPTURE_OK) {
        sensorPattern = -1L;
    } else
        switch(regOut[0]) {
            case FLYCAPTURE_STIPPLEDFORMAT_BGGR:
                sensorPattern = SENSOR_PATTERN_BGGR;
                break;
            case FLYCAPTURE_STIPPLEDFORMAT_GBRG:
                sensorPattern = SENSOR_PATTERN_GBRG;
                break;
            case FLYCAPTURE_STIPPLEDFORMAT_GRBG:
                sensorPattern = SENSOR_PATTERN_GRBG;
                break;
            case FLYCAPTURE_STIPPLEDFORMAT_RGGB:
                sensorPattern = SENSOR_PATTERN_RGGB;
                break;
            default:
                sensorPattern = -1L;
        }
    FlyCaptureTimestamp timeStamp = raw_image.timeStamp();
    timestamp = timeStamp.ulSeconds() * 1000000L + timeStamp.ulMicroSeconds();
    return converter.convert(return_image);
}
Also used : BytePointer(org.bytedeco.javacpp.BytePointer) ByteOrder(java.nio.ByteOrder) ShortBuffer(java.nio.ShortBuffer) ByteBuffer(java.nio.ByteBuffer)

Example 68 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class NvEncoder method getSequenceParams.

/**
 * @brief This function is used to get sequence and picture parameter headers.
 * Application can call this function after encoder is initialized to get SPS and PPS
 * nalus for the current encoder instance. The sequence header data might change when
 * application calls Reconfigure() function.
 */
public void getSequenceParams(Vector<Byte> seqParams) {
    // Assume maximum spspps data is 1KB or less
    byte[] data = new byte[1024];
    BytePointer spsppsData = new BytePointer(data);
    memset(spsppsData, 0, data.length);
    NV_ENC_SEQUENCE_PARAM_PAYLOAD payload = new NV_ENC_SEQUENCE_PARAM_PAYLOAD();
    payload.version(NV_ENC_SEQUENCE_PARAM_PAYLOAD_VER);
    IntPointer spsppsSize = new IntPointer();
    payload.spsppsBuffer(spsppsData);
    payload.inBufferSize(spsppsData.sizeof());
    payload.outSPSPPSPayloadSize(spsppsSize);
    try {
        checkNvCodecApiCall(this.nvEncodeApiFunctionList.nvEncGetSequenceParams().call(this.encoder, payload));
    } catch (NvCodecException e) {
        e.printStackTrace();
    }
    spsppsData.asBuffer().get(data);
    seqParams.clear();
    for (int index = 0; index < data.length; index++) {
        seqParams.add(data[index]);
    }
}
Also used : NvCodecException(org.bytedeco.nvcodec.samples.exceptions.NvCodecException) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 69 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class AppDec method decodeMediaFile.

/**
 * @param cuContext       - Handle to CUDA context
 * @param inFilePath      - Path to file to be decoded
 * @param outFilePath     - Path to output file into which raw frames are stored
 * @param outPlanar       - Flag to indicate whether output needs to be converted to planar format
 * @param cropRectangle   - Cropping rectangle coordinates
 * @param resizeDimension - Resizing dimensions for output
 * @brief Function to decode media file and write raw frames into an output file.
 */
public static void decodeMediaFile(CUctx_st cuContext, String inFilePath, String outFilePath, boolean outPlanar, Rectangle cropRectangle, Dimension resizeDimension) {
    try (FileOutputStream outputStream = new FileOutputStream(outFilePath)) {
        try (MP4Demuxer demuxer = MP4Demuxer.createMP4Demuxer(NIOUtils.readableChannel(new File(inFilePath)))) {
            DemuxerTrack videoTrack = demuxer.getVideoTrack();
            int codec = convertToNvCodec(videoTrack.getMeta().getCodec());
            NvDecoder nvDecoder = new NvDecoder(cuContext, false, codec, false, false, cropRectangle, resizeDimension, 0, 0, 1000);
            int frame = 0;
            int frameReturned;
            BytePointer framePointer;
            boolean decodedOutSemiPlanar;
            Packet packet;
            do {
                packet = videoTrack.nextFrame();
                byte[] packetDataArray;
                if (packet == null) {
                    packetDataArray = new byte[0];
                } else {
                    packetDataArray = packet.getData().array();
                }
                BytePointer bytePointer = new BytePointer(packetDataArray);
                frameReturned = nvDecoder.decode(bytePointer, packetDataArray.length, 0, 0);
                if (frame == 0 && frameReturned != 0) {
                    System.out.println(nvDecoder.getVideoInfo());
                }
                decodedOutSemiPlanar = (nvDecoder.getOutputFormat() == cudaVideoSurfaceFormat_NV12) || (nvDecoder.getOutputFormat() == cudaVideoSurfaceFormat_P016);
                for (int index = 0; index < frameReturned; index++) {
                    framePointer = nvDecoder.getFrame(null);
                    if (outPlanar && decodedOutSemiPlanar) {
                        convertSemiPlanarToPlanar(framePointer, nvDecoder.getWidth(), nvDecoder.getHeight(), nvDecoder.getBitDepth());
                    }
                    byte[] frameData = new byte[nvDecoder.getFrameSize()];
                    framePointer.get(frameData);
                    outputStream.write(frameData);
                }
                frame += frameReturned;
            } while (packet != null);
            String[] aszDecodeOutFormat = new String[] { "NV12", "P016", "YUV444", "YUV444P16" };
            if (outPlanar) {
                aszDecodeOutFormat[0] = "iyuv";
                aszDecodeOutFormat[1] = "yuv420p16";
            }
            System.out.println("Total frame decoded: " + frame);
            System.out.println("Saved in file " + outFilePath + " in " + aszDecodeOutFormat[nvDecoder.getOutputFormat()] + " format");
            nvDecoder.dispose();
        }
    } catch (NvCodecException | IOException e) {
        e.printStackTrace();
    }
}
Also used : Packet(org.jcodec.common.model.Packet) MP4Demuxer(org.jcodec.containers.mp4.demuxer.MP4Demuxer) BytePointer(org.bytedeco.javacpp.BytePointer) NvDecoder(org.bytedeco.nvcodec.samples.decoder.NvDecoder) NvCodecException(org.bytedeco.nvcodec.samples.exceptions.NvCodecException) DemuxerTrack(org.jcodec.common.DemuxerTrack) NvCodecUtil.checkInputFile(org.bytedeco.nvcodec.samples.util.NvCodecUtil.checkInputFile)

Example 70 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class AppEncCuda method main.

public static void main(String[] args) {
    try {
        parseCommandLine(args.length, args);
        NvCodecUtil.checkInputFile(szInputFilePath);
        NvCodecUtil.validateResolution(width, height);
        if (szOutputFilePath == null) {
            szOutputFilePath = initParam.isCodecH264() ? "out.h264" : "out.hevc";
        }
        try {
            checkCudaApiCall(cuInit(0));
            IntPointer nGpu = new IntPointer(1);
            checkCudaApiCall(cuDeviceGetCount(nGpu));
            if (iGpu < 0 || iGpu >= nGpu.get()) {
                System.out.println("GPU ordinal out of range. Should be within [0 ," + (nGpu.get() - 1) + "]");
                return;
            }
            IntPointer cuDevice = new IntPointer(1);
            checkCudaApiCall(cuDeviceGet(cuDevice, iGpu));
            BytePointer szDeviceName = new BytePointer(80);
            checkCudaApiCall(cuDeviceGetName(szDeviceName, (int) szDeviceName.limit(), cuDevice.get()));
            System.out.println("GPU in use: " + szDeviceName.getString());
            CUctx_st cuContext = new CUctx_st();
            checkCudaApiCall(cuCtxCreate(cuContext, 0, cuDevice.get()));
            // Open input file
            FileInputStream input = new FileInputStream(szInputFilePath);
            // Open output file
            FileOutputStream output = new FileOutputStream(szOutputFilePath);
            // Encode
            if (bOutputInVidMem) {
                encodeCudaOpInVidMem(width, height, eFormat, initParam, cuContext, input, output, cuStreamType);
            } else {
                encodeCuda(width, height, eFormat, initParam, cuContext, input, output);
            }
            output.close();
            input.close();
            System.out.println("Bitstream saved in file " + szOutputFilePath);
        } catch (IOException e) {
            e.printStackTrace();
        }
    } catch (CudaException | InvalidArgument e) {
        e.printStackTrace();
    }
}
Also used : CUctx_st(org.bytedeco.cuda.cudart.CUctx_st) CudaException(org.bytedeco.nvcodec.samples.exceptions.CudaException) InvalidArgument(org.bytedeco.nvcodec.samples.exceptions.InvalidArgument) IntPointer(org.bytedeco.javacpp.IntPointer) FileOutputStream(java.io.FileOutputStream) BytePointer(org.bytedeco.javacpp.BytePointer) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream)

Aggregations

BytePointer (org.bytedeco.javacpp.BytePointer)84 IntPointer (org.bytedeco.javacpp.IntPointer)23 ByteBuffer (java.nio.ByteBuffer)20 PointerPointer (org.bytedeco.javacpp.PointerPointer)20 IOException (java.io.IOException)16 Pointer (org.bytedeco.javacpp.Pointer)16 PointerScope (org.bytedeco.javacpp.PointerScope)13 DoublePointer (org.bytedeco.javacpp.DoublePointer)12 FloatPointer (org.bytedeco.javacpp.FloatPointer)12 CompressedDataBuffer (org.nd4j.linalg.compression.CompressedDataBuffer)10 CompressionDescriptor (org.nd4j.linalg.compression.CompressionDescriptor)10 ShortBuffer (java.nio.ShortBuffer)9 ShortPointer (org.bytedeco.javacpp.ShortPointer)9 IntBuffer (java.nio.IntBuffer)7 DoubleBuffer (java.nio.DoubleBuffer)6 FloatBuffer (java.nio.FloatBuffer)6 Nonnull (javax.annotation.Nonnull)5 LongPointer (org.bytedeco.javacpp.LongPointer)5 TF_Status (org.tensorflow.internal.c_api.TF_Status)4 ByteOrder (java.nio.ByteOrder)3