Search in sources :

Example 11 with IntPointer

use of org.bytedeco.javacpp.IntPointer in project javacv by bytedeco.

the class OpenCVFaceRecognizer method main.

public static void main(String[] args) {
    String trainingDir = args[0];
    Mat testImage = imread(args[1], IMREAD_GRAYSCALE);
    File root = new File(trainingDir);
    FilenameFilter imgFilter = new FilenameFilter() {

        public boolean accept(File dir, String name) {
            name = name.toLowerCase();
            return name.endsWith(".jpg") || name.endsWith(".pgm") || name.endsWith(".png");
        }
    };
    File[] imageFiles = root.listFiles(imgFilter);
    MatVector images = new MatVector(imageFiles.length);
    Mat labels = new Mat(imageFiles.length, 1, CV_32SC1);
    IntBuffer labelsBuf = labels.createBuffer();
    int counter = 0;
    for (File image : imageFiles) {
        Mat img = imread(image.getAbsolutePath(), IMREAD_GRAYSCALE);
        int label = Integer.parseInt(image.getName().split("\\-")[0]);
        images.put(counter, img);
        labelsBuf.put(counter, label);
        counter++;
    }
    FaceRecognizer faceRecognizer = FisherFaceRecognizer.create();
    // FaceRecognizer faceRecognizer = EigenFaceRecognizer.create();
    // FaceRecognizer faceRecognizer = LBPHFaceRecognizer.create();
    faceRecognizer.train(images, labels);
    IntPointer label = new IntPointer(1);
    DoublePointer confidence = new DoublePointer(1);
    faceRecognizer.predict(testImage, label, confidence);
    int predictedLabel = label.get(0);
    System.out.println("Predicted label: " + predictedLabel);
}
Also used : FilenameFilter(java.io.FilenameFilter) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) File(java.io.File)

Example 12 with IntPointer

use of org.bytedeco.javacpp.IntPointer in project javacv by bytedeco.

the class FaceRecognizerInVideo method main.

public static void main(String[] args) throws Exception {
    OpenCVFrameConverter.ToMat converterToMat = new OpenCVFrameConverter.ToMat();
    if (args.length < 2) {
        System.out.println("Two parameters are required to run this program, first parameter is the analized video and second parameter is the trained result for fisher faces.");
    }
    String videoFileName = args[0];
    String trainedResult = args[1];
    CascadeClassifier face_cascade = new CascadeClassifier("data\\haarcascade_frontalface_default.xml");
    FaceRecognizer lbphFaceRecognizer = LBPHFaceRecognizer.create();
    lbphFaceRecognizer.read(trainedResult);
    File f = new File(videoFileName);
    OpenCVFrameGrabber grabber = null;
    try {
        grabber = OpenCVFrameGrabber.createDefault(f);
        grabber.start();
    } catch (Exception e) {
        System.err.println("Failed start the grabber.");
    }
    Frame videoFrame = null;
    Mat videoMat = new Mat();
    while (true) {
        videoFrame = grabber.grab();
        videoMat = converterToMat.convert(videoFrame);
        Mat videoMatGray = new Mat();
        // Convert the current frame to grayscale:
        cvtColor(videoMat, videoMatGray, COLOR_BGRA2GRAY);
        equalizeHist(videoMatGray, videoMatGray);
        Point p = new Point();
        RectVector faces = new RectVector();
        // Find the faces in the frame:
        face_cascade.detectMultiScale(videoMatGray, faces);
        // annotate it in the video. Cool or what?
        for (int i = 0; i < faces.size(); i++) {
            Rect face_i = faces.get(i);
            Mat face = new Mat(videoMatGray, face_i);
            // If fisher face recognizer is used, the face need to be
            // resized.
            // resize(face, face_resized, new Size(im_width, im_height),
            // 1.0, 1.0, INTER_CUBIC);
            // Now perform the prediction, see how easy that is:
            IntPointer label = new IntPointer(1);
            DoublePointer confidence = new DoublePointer(1);
            lbphFaceRecognizer.predict(face, label, confidence);
            int prediction = label.get(0);
            // And finally write all we've found out to the original image!
            // First of all draw a green rectangle around the detected face:
            rectangle(videoMat, face_i, new Scalar(0, 255, 0, 1));
            // Create the text we will annotate the box with:
            String box_text = "Prediction = " + prediction;
            // Calculate the position for annotated text (make sure we don't
            // put illegal values in there):
            int pos_x = Math.max(face_i.tl().x() - 10, 0);
            int pos_y = Math.max(face_i.tl().y() - 10, 0);
            // And now put it into the image:
            putText(videoMat, box_text, new Point(pos_x, pos_y), FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0, 2.0));
        }
        // Show the result:
        imshow("face_recognizer", videoMat);
        char key = (char) waitKey(20);
        // Exit this loop on escape:
        if (key == 27) {
            destroyAllWindows();
            break;
        }
    }
}
Also used : Frame(org.bytedeco.javacv.Frame) DoublePointer(org.bytedeco.javacpp.DoublePointer) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) Exception(org.bytedeco.javacv.FrameGrabber.Exception) IntPointer(org.bytedeco.javacpp.IntPointer) OpenCVFrameConverter(org.bytedeco.javacv.OpenCVFrameConverter) File(java.io.File)

Example 13 with IntPointer

use of org.bytedeco.javacpp.IntPointer in project javacv by bytedeco.

the class FFmpegFrameGrabber method processSamples.

private void processSamples() throws Exception {
    int ret;
    int sample_format = samples_frame.format();
    int planes = av_sample_fmt_is_planar(sample_format) != 0 ? (int) samples_frame.channels() : 1;
    int data_size = av_samples_get_buffer_size((IntPointer) null, audio_c.channels(), samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;
    if (samples_buf == null || samples_buf.length != planes) {
        samples_ptr = new BytePointer[planes];
        samples_buf = new Buffer[planes];
    }
    frame.sampleRate = audio_c.sample_rate();
    frame.audioChannels = audio_c.channels();
    frame.samples = samples_buf;
    frame.opaque = samples_frame;
    int sample_size = data_size / av_get_bytes_per_sample(sample_format);
    for (int i = 0; i < planes; i++) {
        BytePointer p = samples_frame.data(i);
        if (!p.equals(samples_ptr[i]) || samples_ptr[i].capacity() < data_size) {
            samples_ptr[i] = p.capacity(data_size);
            ByteBuffer b = p.asBuffer();
            switch(sample_format) {
                case AV_SAMPLE_FMT_U8:
                case AV_SAMPLE_FMT_U8P:
                    samples_buf[i] = b;
                    break;
                case AV_SAMPLE_FMT_S16:
                case AV_SAMPLE_FMT_S16P:
                    samples_buf[i] = b.asShortBuffer();
                    break;
                case AV_SAMPLE_FMT_S32:
                case AV_SAMPLE_FMT_S32P:
                    samples_buf[i] = b.asIntBuffer();
                    break;
                case AV_SAMPLE_FMT_FLT:
                case AV_SAMPLE_FMT_FLTP:
                    samples_buf[i] = b.asFloatBuffer();
                    break;
                case AV_SAMPLE_FMT_DBL:
                case AV_SAMPLE_FMT_DBLP:
                    samples_buf[i] = b.asDoubleBuffer();
                    break;
                default:
                    assert false;
            }
        }
        samples_buf[i].position(0).limit(sample_size);
    }
    if (audio_c.channels() != getAudioChannels() || audio_c.sample_fmt() != getSampleFormat() || audio_c.sample_rate() != getSampleRate()) {
        if (samples_convert_ctx == null || samples_channels != getAudioChannels() || samples_format != getSampleFormat() || samples_rate != getSampleRate()) {
            samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, av_get_default_channel_layout(getAudioChannels()), getSampleFormat(), getSampleRate(), av_get_default_channel_layout(audio_c.channels()), audio_c.sample_fmt(), audio_c.sample_rate(), 0, null);
            if (samples_convert_ctx == null) {
                throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
            } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
                throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
            }
            samples_channels = getAudioChannels();
            samples_format = getSampleFormat();
            samples_rate = getSampleRate();
        }
        int sample_size_in = samples_frame.nb_samples();
        int planes_out = av_sample_fmt_is_planar(samples_format) != 0 ? (int) samples_frame.channels() : 1;
        int sample_size_out = swr_get_out_samples(samples_convert_ctx, sample_size_in);
        int sample_bytes_out = av_get_bytes_per_sample(samples_format);
        int buffer_size_out = sample_size_out * sample_bytes_out * (planes_out > 1 ? 1 : samples_channels);
        if (samples_buf_out == null || samples_buf.length != planes_out || samples_ptr_out[0].capacity() < buffer_size_out) {
            for (int i = 0; samples_ptr_out != null && i < samples_ptr_out.length; i++) {
                av_free(samples_ptr_out[i].position(0));
            }
            samples_ptr_out = new BytePointer[planes_out];
            samples_buf_out = new Buffer[planes_out];
            for (int i = 0; i < planes_out; i++) {
                samples_ptr_out[i] = new BytePointer(av_malloc(buffer_size_out)).capacity(buffer_size_out);
                ByteBuffer b = samples_ptr_out[i].asBuffer();
                switch(samples_format) {
                    case AV_SAMPLE_FMT_U8:
                    case AV_SAMPLE_FMT_U8P:
                        samples_buf_out[i] = b;
                        break;
                    case AV_SAMPLE_FMT_S16:
                    case AV_SAMPLE_FMT_S16P:
                        samples_buf_out[i] = b.asShortBuffer();
                        break;
                    case AV_SAMPLE_FMT_S32:
                    case AV_SAMPLE_FMT_S32P:
                        samples_buf_out[i] = b.asIntBuffer();
                        break;
                    case AV_SAMPLE_FMT_FLT:
                    case AV_SAMPLE_FMT_FLTP:
                        samples_buf_out[i] = b.asFloatBuffer();
                        break;
                    case AV_SAMPLE_FMT_DBL:
                    case AV_SAMPLE_FMT_DBLP:
                        samples_buf_out[i] = b.asDoubleBuffer();
                        break;
                    default:
                        assert false;
                }
            }
        }
        frame.sampleRate = samples_rate;
        frame.audioChannels = samples_channels;
        frame.samples = samples_buf_out;
        if ((ret = swr_convert(samples_convert_ctx, plane_ptr.put(samples_ptr_out), sample_size_out, plane_ptr2.put(samples_ptr), sample_size_in)) < 0) {
            throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
        }
        for (int i = 0; i < planes_out; i++) {
            samples_ptr_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
            samples_buf_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
        }
    }
}
Also used : IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException)

Example 14 with IntPointer

use of org.bytedeco.javacpp.IntPointer in project javacv by bytedeco.

the class FFmpegFrameRecorder method recordSamples.

public synchronized boolean recordSamples(int sampleRate, int audioChannels, Buffer... samples) throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (audio_st == null) {
            throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
        }
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        if (samples == null && samples_out[0].position() > 0) {
            // Typically samples_out[0].limit() is double the audio_input_frame_size --> sampleDivisor = 2
            double sampleDivisor = Math.floor((int) Math.min(samples_out[0].limit(), Integer.MAX_VALUE) / audio_input_frame_size);
            writeSamples((int) Math.floor((int) samples_out[0].position() / sampleDivisor));
            return writeFrame((AVFrame) null);
        }
        int ret;
        if (sampleRate <= 0) {
            sampleRate = audio_c.sample_rate();
        }
        if (audioChannels <= 0) {
            audioChannels = audio_c.channels();
        }
        int inputSize = samples != null ? samples[0].limit() - samples[0].position() : 0;
        int inputFormat = samples_format;
        int inputChannels = samples != null && samples.length > 1 ? 1 : audioChannels;
        int inputDepth = 0;
        int outputFormat = audio_c.sample_fmt();
        int outputChannels = samples_out.length > 1 ? 1 : audio_c.channels();
        int outputDepth = av_get_bytes_per_sample(outputFormat);
        if (samples != null && samples[0] instanceof ByteBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
            inputDepth = 1;
            for (int i = 0; i < samples.length; i++) {
                ByteBuffer b = (ByteBuffer) samples[i];
                if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new BytePointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof ShortBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
            inputDepth = 2;
            for (int i = 0; i < samples.length; i++) {
                ShortBuffer b = (ShortBuffer) samples[i];
                if (samples_in[i] instanceof ShortPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((ShortPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new ShortPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof IntBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
            inputDepth = 4;
            for (int i = 0; i < samples.length; i++) {
                IntBuffer b = (IntBuffer) samples[i];
                if (samples_in[i] instanceof IntPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new IntPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof FloatBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
            inputDepth = 4;
            for (int i = 0; i < samples.length; i++) {
                FloatBuffer b = (FloatBuffer) samples[i];
                if (samples_in[i] instanceof FloatPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new FloatPointer(b).retainReference();
                }
            }
        } else if (samples != null && samples[0] instanceof DoubleBuffer) {
            inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
            inputDepth = 8;
            for (int i = 0; i < samples.length; i++) {
                DoubleBuffer b = (DoubleBuffer) samples[i];
                if (samples_in[i] instanceof DoublePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
                    ((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
                } else {
                    if (samples_in[i] != null) {
                        samples_in[i].releaseReference();
                    }
                    samples_in[i] = new DoublePointer(b).retainReference();
                }
            }
        } else if (samples != null) {
            throw new Exception("Audio samples Buffer has unsupported type: " + samples);
        }
        if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) {
            samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, audio_c.channel_layout(), outputFormat, audio_c.sample_rate(), av_get_default_channel_layout(audioChannels), inputFormat, sampleRate, 0, null);
            if (samples_convert_ctx == null) {
                throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
            } else if ((ret = swr_init(samples_convert_ctx)) < 0) {
                throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
            }
            samples_channels = audioChannels;
            samples_format = inputFormat;
            samples_rate = sampleRate;
        }
        for (int i = 0; samples != null && i < samples.length; i++) {
            samples_in[i].position(samples_in[i].position() * inputDepth).limit((samples_in[i].position() + inputSize) * inputDepth);
        }
        while (true) {
            int inputCount = (int) Math.min(samples != null ? (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth) : 0, Integer.MAX_VALUE);
            int outputCount = (int) Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE);
            inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate());
            for (int i = 0; samples != null && i < samples.length; i++) {
                plane_ptr.put(i, samples_in[i]);
            }
            for (int i = 0; i < samples_out.length; i++) {
                plane_ptr2.put(i, samples_out[i]);
            }
            if ((ret = swr_convert(samples_convert_ctx, plane_ptr2, outputCount, plane_ptr, inputCount)) < 0) {
                throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
            } else if (ret == 0) {
                break;
            }
            for (int i = 0; samples != null && i < samples.length; i++) {
                samples_in[i].position(samples_in[i].position() + inputCount * inputChannels * inputDepth);
            }
            for (int i = 0; i < samples_out.length; i++) {
                samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
            }
            if (samples == null || samples_out[0].position() >= samples_out[0].limit()) {
                writeSamples(audio_input_frame_size);
            }
        }
        return samples != null ? frame.key_frame() != 0 : writeFrame((AVFrame) null);
    }
}
Also used : DoubleBuffer(java.nio.DoubleBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer) IOException(java.io.IOException) ShortPointer(org.bytedeco.javacpp.ShortPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntBuffer(java.nio.IntBuffer) IntPointer(org.bytedeco.javacpp.IntPointer) ShortBuffer(java.nio.ShortBuffer)

Example 15 with IntPointer

use of org.bytedeco.javacpp.IntPointer in project javacv by bytedeco.

the class Frame method createIndexer.

/**
 * Returns an {@link Indexer} for the <i>i</i>th image plane.
 */
public <I extends Indexer> I createIndexer(boolean direct, int i) {
    long[] sizes = { imageHeight, imageWidth, imageChannels };
    long[] strides = { imageStride, imageChannels, 1 };
    Buffer buffer = image[i];
    Object array = buffer.hasArray() ? buffer.array() : null;
    switch(imageDepth) {
        case DEPTH_UBYTE:
            return array != null ? (I) UByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) UByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) UByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_BYTE:
            return array != null ? (I) ByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) ByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) ByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_USHORT:
            return array != null ? (I) UShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) UShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) UShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_SHORT:
            return array != null ? (I) ShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) ShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) ShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_INT:
            return array != null ? (I) IntIndexer.create((int[]) array, sizes, strides).indexable(this) : direct ? (I) IntIndexer.create((IntBuffer) buffer, sizes, strides).indexable(this) : (I) IntIndexer.create(new IntPointer((IntBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_LONG:
            return array != null ? (I) LongIndexer.create((long[]) array, sizes, strides).indexable(this) : direct ? (I) LongIndexer.create((LongBuffer) buffer, sizes, strides).indexable(this) : (I) LongIndexer.create(new LongPointer((LongBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_FLOAT:
            return array != null ? (I) FloatIndexer.create((float[]) array, sizes, strides).indexable(this) : direct ? (I) FloatIndexer.create((FloatBuffer) buffer, sizes, strides).indexable(this) : (I) FloatIndexer.create(new FloatPointer((FloatBuffer) buffer), sizes, strides, false).indexable(this);
        case DEPTH_DOUBLE:
            return array != null ? (I) DoubleIndexer.create((double[]) array, sizes, strides).indexable(this) : direct ? (I) DoubleIndexer.create((DoubleBuffer) buffer, sizes, strides).indexable(this) : (I) DoubleIndexer.create(new DoublePointer((DoubleBuffer) buffer), sizes, strides, false).indexable(this);
        default:
            assert false;
    }
    return null;
}
Also used : FloatBuffer(java.nio.FloatBuffer) ShortBuffer(java.nio.ShortBuffer) ByteBuffer(java.nio.ByteBuffer) IntBuffer(java.nio.IntBuffer) Buffer(java.nio.Buffer) DoubleBuffer(java.nio.DoubleBuffer) LongBuffer(java.nio.LongBuffer) DoubleBuffer(java.nio.DoubleBuffer) LongBuffer(java.nio.LongBuffer) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ShortPointer(org.bytedeco.javacpp.ShortPointer) LongPointer(org.bytedeco.javacpp.LongPointer) FloatPointer(org.bytedeco.javacpp.FloatPointer) IntPointer(org.bytedeco.javacpp.IntPointer) IntBuffer(java.nio.IntBuffer) ShortBuffer(java.nio.ShortBuffer)

Aggregations

IntPointer (org.bytedeco.javacpp.IntPointer)35 DoublePointer (org.bytedeco.javacpp.DoublePointer)19 FloatPointer (org.bytedeco.javacpp.FloatPointer)19 Pointer (org.bytedeco.javacpp.Pointer)14 DataBuffer (org.nd4j.linalg.api.buffer.DataBuffer)13 INDArray (org.nd4j.linalg.api.ndarray.INDArray)13 CUstream_st (org.bytedeco.javacpp.cuda.CUstream_st)12 CublasPointer (org.nd4j.linalg.jcublas.CublasPointer)12 CudaContext (org.nd4j.linalg.jcublas.context.CudaContext)12 BytePointer (org.bytedeco.javacpp.BytePointer)11 CudaPointer (org.nd4j.jita.allocator.pointers.CudaPointer)10 org.nd4j.jita.allocator.pointers.cuda.cusolverDnHandle_t (org.nd4j.jita.allocator.pointers.cuda.cusolverDnHandle_t)10 GridExecutioner (org.nd4j.linalg.api.ops.executioner.GridExecutioner)10 ByteBuffer (java.nio.ByteBuffer)8 BlasException (org.nd4j.linalg.api.blas.BlasException)8 IntBuffer (java.nio.IntBuffer)7 DoubleBuffer (java.nio.DoubleBuffer)6 FloatBuffer (java.nio.FloatBuffer)6 ShortBuffer (java.nio.ShortBuffer)6 ShortPointer (org.bytedeco.javacpp.ShortPointer)6