Search in sources :

Example 31 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class OpticalFlowTracker method main.

public static void main(String[] args) {
    // Load two images and allocate other structures
    IplImage imgA = cvLoadImage("image0.png", CV_LOAD_IMAGE_GRAYSCALE);
    IplImage imgB = cvLoadImage("image1.png", CV_LOAD_IMAGE_GRAYSCALE);
    CvSize img_sz = cvGetSize(imgA);
    int win_size = 15;
    // IplImage imgC = cvLoadImage("OpticalFlow1.png",
    // CV_LOAD_IMAGE_UNCHANGED);
    IplImage imgC = cvLoadImage("image0.png", CV_LOAD_IMAGE_UNCHANGED);
    // Get the features for tracking
    IplImage eig_image = cvCreateImage(img_sz, IPL_DEPTH_32F, 1);
    IplImage tmp_image = cvCreateImage(img_sz, IPL_DEPTH_32F, 1);
    IntPointer corner_count = new IntPointer(1).put(MAX_CORNERS);
    CvPoint2D32f cornersA = new CvPoint2D32f(MAX_CORNERS);
    CvArr mask = null;
    cvGoodFeaturesToTrack(imgA, eig_image, tmp_image, cornersA, corner_count, 0.05, 5.0, mask, 3, 0, 0.04);
    cvFindCornerSubPix(imgA, cornersA, corner_count.get(), cvSize(win_size, win_size), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));
    // Call Lucas Kanade algorithm
    BytePointer features_found = new BytePointer(MAX_CORNERS);
    FloatPointer feature_errors = new FloatPointer(MAX_CORNERS);
    CvSize pyr_sz = cvSize(imgA.width() + 8, imgB.height() / 3);
    IplImage pyrA = cvCreateImage(pyr_sz, IPL_DEPTH_32F, 1);
    IplImage pyrB = cvCreateImage(pyr_sz, IPL_DEPTH_32F, 1);
    CvPoint2D32f cornersB = new CvPoint2D32f(MAX_CORNERS);
    cvCalcOpticalFlowPyrLK(imgA, imgB, pyrA, pyrB, cornersA, cornersB, corner_count.get(), cvSize(win_size, win_size), 5, features_found, feature_errors, cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.3), 0);
    // Make an image of the results
    for (int i = 0; i < corner_count.get(); i++) {
        if (features_found.get(i) == 0 || feature_errors.get(i) > 550) {
            System.out.println("Error is " + feature_errors.get(i) + "/n");
            continue;
        }
        System.out.println("Got it/n");
        cornersA.position(i);
        cornersB.position(i);
        CvPoint p0 = cvPoint(Math.round(cornersA.x()), Math.round(cornersA.y()));
        CvPoint p1 = cvPoint(Math.round(cornersB.x()), Math.round(cornersB.y()));
        cvLine(imgC, p0, p1, CV_RGB(255, 0, 0), 2, 8, 0);
    }
    cvSaveImage("image0-1.png", imgC);
    cvNamedWindow("LKpyr_OpticalFlow", 0);
    cvShowImage("LKpyr_OpticalFlow", imgC);
    cvWaitKey(0);
}
Also used : FloatPointer(org.bytedeco.javacpp.FloatPointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 32 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class AppEncCuda method encodeCuda.

public static void encodeCuda(int nWidth, int nHeight, int eFormat, NvEncoderInitParam encodeCLIOptions, CUctx_st cuContext, FileInputStream input, FileOutputStream output) throws IOException {
    NvEncoderCuda encoder = new NvEncoderCuda(cuContext, nWidth, nHeight, eFormat);
    initializeEncoder(encoder, encodeCLIOptions, eFormat);
    int nFrameSize = encoder.getFrameSize();
    byte[] pHostFrame = new byte[nFrameSize];
    int nFrame = 0;
    while (true) {
        // Load the next frame from disk
        int nRead = input.read(pHostFrame);
        // For receiving encoded packets
        Vector<Vector<Byte>> vPacket = new Vector<>();
        if (nRead == nFrameSize) {
            final NvEncoderInputFrame encoderInputFrame = encoder.getNextInputFrame();
            NvEncoderCuda.copyToDeviceFrame(cuContext, new BytePointer(pHostFrame), 0, encoderInputFrame.getInputPointer().getPointer(LongPointer.class), encoderInputFrame.getPitch(), encoder.getEncodeWidth(), encoder.getEncodeHeight(), CU_MEMORYTYPE_HOST, encoderInputFrame.getBufferFormat(), encoderInputFrame.getChromaOffsets(), encoderInputFrame.getNumChromaPlanes());
            encoder.encodeFrame(vPacket);
        } else {
            encoder.endEncode(vPacket);
        }
        nFrame += vPacket.size();
        for (Vector<Byte> packet : vPacket) {
            // For each encoded packet
            byte[] bytes = new byte[packet.size()];
            for (int index = 0; index < packet.size(); index++) {
                bytes[index] = packet.get(index);
            }
            output.write(bytes);
        }
        if (nRead != nFrameSize)
            break;
    }
    encoder.dispose();
    encoder.destroyEncoder();
    System.out.println("Total frames encoded: " + nFrame);
}
Also used : LongPointer(org.bytedeco.javacpp.LongPointer) BytePointer(org.bytedeco.javacpp.BytePointer) Vector(java.util.Vector)

Example 33 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class AppEncCuda method showEncoderCapability.

public static void showEncoderCapability() {
    StringBuilder sb = new StringBuilder();
    try {
        checkCudaApiCall(cuInit(0));
        IntPointer gpuNum = new IntPointer();
        checkCudaApiCall(cuDeviceGetCount(gpuNum));
        sb.append("Encoder Capability \n\n");
        for (int iGpu = 0; iGpu < gpuNum.get(); iGpu++) {
            IntPointer cuDevice = new IntPointer();
            checkCudaApiCall(cuDeviceGet(cuDevice, iGpu));
            BytePointer szDeviceName = new BytePointer(80);
            checkCudaApiCall(cuDeviceGetName(szDeviceName, szDeviceName.sizeof(), cuDevice.get()));
            CUctx_st cuContext = new CUctx_st();
            checkCudaApiCall(cuCtxCreate(cuContext, 0, cuDevice.get()));
            NvEncoderCuda enc = new NvEncoderCuda(cuContext, 1280, 720, NV_ENC_BUFFER_FORMAT_NV12);
            sb.append("GPU ").append(iGpu).append(" - ").append(szDeviceName.getString()).append("\n");
            sb.append("\tH264:\t\t  ").append(enc.getCapabilityValue(NV_ENC_CODEC_H264_GUID(), NV_ENC_CAPS_SUPPORTED_RATECONTROL_MODES) != 0 ? "yes" : "no").append("\n").append("\tH264_444:\t  ").append(enc.getCapabilityValue(NV_ENC_CODEC_H264_GUID(), NV_ENC_CAPS_SUPPORT_YUV444_ENCODE) != 0 ? "yes" : "no").append("\n").append("\tH264_ME:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_H264_GUID(), NV_ENC_CAPS_SUPPORT_MEONLY_MODE) != 0 ? "yes" : "no").append("\n").append("\tH264_WxH:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_H264_GUID(), NV_ENC_CAPS_WIDTH_MAX)).append("*").append(enc.getCapabilityValue(NV_ENC_CODEC_H264_GUID(), NV_ENC_CAPS_HEIGHT_MAX)).append("\n").append("\tHEVC:\t\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_SUPPORTED_RATECONTROL_MODES) != 0 ? "yes" : "no").append("\n").append("\tHEVC_Main10:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_SUPPORT_10BIT_ENCODE) != 0 ? "yes" : "no").append("\n").append("\tHEVC_Lossless:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_SUPPORT_LOSSLESS_ENCODE) != 0 ? "yes" : "no").append("\n").append("\tHEVC_SAO:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_SUPPORT_SAO) != 0 ? "yes" : "no").append("\n").append("\tHEVC_444:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_SUPPORT_YUV444_ENCODE) != 0 ? "yes" : "no").append("\n").append("\tHEVC_ME:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_SUPPORT_MEONLY_MODE) != 0 ? "yes" : "no").append("\n").append("\tHEVC_WxH:\t").append("  ").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_WIDTH_MAX)).append("*").append(enc.getCapabilityValue(NV_ENC_CODEC_HEVC_GUID(), NV_ENC_CAPS_HEIGHT_MAX)).append("\n\n");
            System.out.println(sb.toString());
            enc.destroyEncoder();
            checkCudaApiCall(cuCtxDestroy(cuContext));
        }
    } catch (CudaException e) {
        e.printStackTrace();
    }
}
Also used : CUctx_st(org.bytedeco.cuda.cudart.CUctx_st) CudaException(org.bytedeco.nvcodec.samples.exceptions.CudaException) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer)

Example 34 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class EmitBitcode method EvaluateBitcode.

/**
 * Sample code for importing a LLVM bitcode file and running a function
 * inside of the imported module
 * <p>
 * This sample depends on EmitBitcode to produce the bitcode file. Make sure
 * you've ran the EmitBitcode sample and have the 'sum.bc' bitcode file.
 * <p>
 * This sample contains code for the following steps:
 * <p>
 * 1. Initializing required LLVM components
 * 2. Load and parse the bitcode
 * 3. Run the 'sum' function inside the module
 * 4. Dispose of the allocated resources
 */
public static void EvaluateBitcode() {
    // Stage 1: Initialize LLVM components
    LLVMInitializeCore(LLVMGetGlobalPassRegistry());
    LLVMInitializeNativeAsmPrinter();
    LLVMInitializeNativeAsmParser();
    LLVMInitializeNativeTarget();
    // Stage 2: Load and parse bitcode
    LLVMContextRef context = LLVMContextCreate();
    LLVMTypeRef i32Type = LLVMInt32TypeInContext(context);
    LLVMModuleRef module = new LLVMModuleRef();
    LLVMMemoryBufferRef membuf = new LLVMMemoryBufferRef();
    BytePointer inputFile = new BytePointer("./sum.bc");
    if (LLVMCreateMemoryBufferWithContentsOfFile(inputFile, membuf, error) != 0) {
        System.err.println("Failed to read file into memory buffer: " + error.getString());
        LLVMDisposeMessage(error);
        return;
    }
    if (LLVMParseBitcodeInContext2(context, membuf, module) != 0) {
        System.err.println("Failed to parser module from bitcode");
        return;
    }
    LLVMExecutionEngineRef engine = new LLVMExecutionEngineRef();
    if (LLVMCreateInterpreterForModule(engine, module, error) != 0) {
        System.err.println("Failed to create LLVM interpreter: " + error.getString());
        LLVMDisposeMessage(error);
        return;
    }
    LLVMValueRef sum = LLVMGetNamedFunction(module, "sum");
    PointerPointer<Pointer> arguments = new PointerPointer<>(2).put(0, LLVMCreateGenericValueOfInt(i32Type, 42, /* signExtend */
    0)).put(1, LLVMCreateGenericValueOfInt(i32Type, 30, /* signExtend */
    0));
    LLVMGenericValueRef result = LLVMRunFunction(engine, sum, 2, arguments);
    System.out.println();
    System.out.print("The result of add(42, 30) imported from bitcode and executed with LLVM interpreter is: ");
    System.out.println(LLVMGenericValueToInt(result, /* signExtend */
    0));
    // Stage 4: Dispose of the allocated resources
    LLVMDisposeModule(module);
    LLVMContextDispose(context);
}
Also used : LLVMExecutionEngineRef(org.bytedeco.llvm.LLVM.LLVMExecutionEngineRef) LLVMContextRef(org.bytedeco.llvm.LLVM.LLVMContextRef) LLVMMemoryBufferRef(org.bytedeco.llvm.LLVM.LLVMMemoryBufferRef) PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) LLVMValueRef(org.bytedeco.llvm.LLVM.LLVMValueRef) BytePointer(org.bytedeco.javacpp.BytePointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) LLVMTypeRef(org.bytedeco.llvm.LLVM.LLVMTypeRef) LLVMModuleRef(org.bytedeco.llvm.LLVM.LLVMModuleRef) LLVMGenericValueRef(org.bytedeco.llvm.LLVM.LLVMGenericValueRef)

Example 35 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacpp-presets by bytedeco.

the class RealSense2Show method main.

public static void main(String[] args) {
    final int width = 640;
    final int height = 480;
    final int fps = 30;
    final int COLOR_STREAM_INDEX = -1;
    final int DEPTH_STREAM_INDEX = -1;
    rs2_error e = new rs2_error();
    realsense2.rs2_log_to_console(realsense2.RS2_LOG_SEVERITY_ERROR, e);
    if (!check_error(e)) {
        return;
    }
    rs2_context ctx = realsense2.rs2_create_context(realsense2.RS2_API_VERSION, e);
    if (!check_error(e)) {
        return;
    }
    rs2_device_list list = realsense2.rs2_query_devices(ctx, e);
    if (!check_error(e)) {
        return;
    }
    int rs2_list_size = realsense2.rs2_get_device_count(list, e);
    if (!check_error(e)) {
        return;
    }
    System.out.printf("realsense device %d\n", rs2_list_size);
    if (rs2_list_size == 0) {
        return;
    }
    rs2_device rsdev = realsense2.rs2_create_device(list, 0, e);
    if (!check_error(e)) {
        return;
    }
    if (rsdev == null) {
        System.err.println("device not found. serial number = ");
        return;
    }
    // Declare RealSense pipeline, encapsulating the actual device and sensors
    rs2_pipeline pipe = realsense2.rs2_create_pipeline(ctx, e);
    // Create a configuration for configuring the pipeline with a non default profile
    rs2_config cfg = realsense2.rs2_create_config(e);
    // Add desired streams to configuration
    realsense2.rs2_config_enable_stream(cfg, realsense2.RS2_STREAM_COLOR, COLOR_STREAM_INDEX, width, height, realsense2.RS2_FORMAT_RGB8, fps, e);
    realsense2.rs2_config_enable_stream(cfg, realsense2.RS2_STREAM_DEPTH, DEPTH_STREAM_INDEX, width, height, realsense2.RS2_FORMAT_Z16, fps, e);
    if (!check_error(e)) {
        return;
    }
    // Start streaming with default recommended configuration
    // The default video configuration contains Depth and Color streams
    // If a device is capable to stream IMU data, both Gyro and Accelerometer are enabled by default
    rs2_pipeline_profile selection = realsense2.rs2_pipeline_start_with_config(pipe, cfg, e);
    if (!check_error(e)) {
        return;
    }
    // Define align object that will be used to align to color viewport.
    // Creating align object is an expensive operation
    // that should not be performed in the main loop.
    rs2_processing_block align_to_color = realsense2.rs2_create_align(realsense2.RS2_STREAM_COLOR, e);
    if (!check_error(e)) {
        return;
    }
    rs2_frame_queue align_queue = realsense2.rs2_create_frame_queue(1, e);
    if (!check_error(e)) {
        return;
    }
    realsense2.rs2_start_processing_queue(align_to_color, align_queue, e);
    if (!check_error(e)) {
        return;
    }
    int psize = 100;
    IntPointer stream = new IntPointer(psize);
    IntPointer format = new IntPointer(psize);
    IntPointer indexP = new IntPointer(psize);
    IntPointer unique_id = new IntPointer(psize);
    IntPointer framerate = new IntPointer(psize);
    rs2_frame color_frame = null;
    rs2_frame depth_frame = null;
    while (true) {
        rs2_frame tmpFrames = realsense2.rs2_pipeline_wait_for_frames(pipe, realsense2.RS2_DEFAULT_TIMEOUT, e);
        if (!check_error(e)) {
            continue;
        }
        // Align depth frame to color viewport
        realsense2.rs2_frame_add_ref(tmpFrames, e);
        if (!check_error(e)) {
            rs2_release_frames(color_frame, depth_frame, tmpFrames);
            continue;
        }
        realsense2.rs2_process_frame(align_to_color, tmpFrames, e);
        rs2_frame frames = realsense2.rs2_wait_for_frame(align_queue, 5000, e);
        if (!check_error(e)) {
            rs2_release_frames(color_frame, depth_frame, tmpFrames, frames);
            continue;
        }
        rs2_release_frames(tmpFrames);
        int num_of_frames = realsense2.rs2_embedded_frames_count(frames, e);
        // retrieve each frame
        for (int i = 0; i < num_of_frames; i++) {
            rs2_frame frame = realsense2.rs2_extract_frame(frames, i, e);
            rs2_stream_profile mode = realsense2.rs2_get_frame_stream_profile(frame, e);
            realsense2.rs2_get_stream_profile_data(mode, stream, format, indexP, unique_id, framerate, e);
            String stream_type = realsense2.rs2_stream_to_string(stream.get()).getString();
            // retrieve each frame
            switch(stream_type.toLowerCase()) {
                case "color":
                    color_frame = frame;
                    break;
                case "depth":
                    depth_frame = frame;
                    break;
                default:
                    System.err.println("invalid stream data " + stream_type);
                    break;
            }
        }
        if (color_frame == null || depth_frame == null) {
            // release frames
            rs2_release_frames(color_frame, depth_frame, frames);
            continue;
        }
        BytePointer color_pointer = new BytePointer(realsense2.rs2_get_frame_data(color_frame, e));
        int color_data_size = realsense2.rs2_get_frame_data_size(color_frame, e);
        byte[] color_byte = new byte[color_data_size];
        color_pointer.get(color_byte, 0, color_data_size);
        color_pointer.close();
        BytePointer depth_pointer = new BytePointer(realsense2.rs2_get_frame_data(depth_frame, e));
        int depth_data_size = realsense2.rs2_get_frame_data_size(depth_frame, e);
        byte[] depth_byte = new byte[depth_data_size];
        depth_pointer.get(depth_byte, 0, depth_data_size);
        depth_pointer.close();
        byte[] color_depth_byte = toColorMap(depth_byte, width, height, -1, -1);
        // show rgb
        OpenCVFX.imshow("color", color_byte, width, height);
        // show depth
        OpenCVFX.imshow("depth", color_depth_byte, width, height);
        // release frames
        rs2_release_frames(color_frame, depth_frame, frames);
        if (OpenCVFX.waitKey() == 27) {
            break;
        }
    }
    OpenCVFX.destroyAllWindows();
    // Stop pipeline streaming
    realsense2.rs2_pipeline_stop(pipe, e);
    // Release resources
    realsense2.rs2_delete_pipeline_profile(selection);
    realsense2.rs2_delete_processing_block(align_to_color);
    realsense2.rs2_delete_frame_queue(align_queue);
    realsense2.rs2_delete_config(cfg);
    realsense2.rs2_delete_pipeline(pipe);
    realsense2.rs2_delete_device(rsdev);
    realsense2.rs2_delete_device_list(list);
    realsense2.rs2_delete_context(ctx);
}
Also used : BytePointer(org.bytedeco.javacpp.BytePointer) IntPointer(org.bytedeco.javacpp.IntPointer)

Aggregations

BytePointer (org.bytedeco.javacpp.BytePointer)79 IntPointer (org.bytedeco.javacpp.IntPointer)22 PointerPointer (org.bytedeco.javacpp.PointerPointer)20 ByteBuffer (java.nio.ByteBuffer)19 IOException (java.io.IOException)16 Pointer (org.bytedeco.javacpp.Pointer)16 PointerScope (org.bytedeco.javacpp.PointerScope)13 DoublePointer (org.bytedeco.javacpp.DoublePointer)12 FloatPointer (org.bytedeco.javacpp.FloatPointer)12 CompressedDataBuffer (org.nd4j.linalg.compression.CompressedDataBuffer)10 CompressionDescriptor (org.nd4j.linalg.compression.CompressionDescriptor)10 ShortBuffer (java.nio.ShortBuffer)9 ShortPointer (org.bytedeco.javacpp.ShortPointer)9 IntBuffer (java.nio.IntBuffer)7 DoubleBuffer (java.nio.DoubleBuffer)6 FloatBuffer (java.nio.FloatBuffer)6 Nonnull (javax.annotation.Nonnull)5 LongPointer (org.bytedeco.javacpp.LongPointer)5 TF_Status (org.tensorflow.internal.c_api.TF_Status)4 ByteOrder (java.nio.ByteOrder)3