use of org.bytedeco.javacpp.Pointer in project javacv by bytedeco.
the class FFmpegFrameFilter method pushImage.
public void pushImage(int width, int height, int depth, int channels, int stride, int pixelFormat, Buffer... image) throws Exception {
int step = stride * Math.abs(depth) / 8;
BytePointer data = image[0] instanceof ByteBuffer ? new BytePointer((ByteBuffer) image[0].position(0)) : new BytePointer(new Pointer(image[0].position(0)));
if (pixelFormat == AV_PIX_FMT_NONE) {
if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 3) {
pixelFormat = AV_PIX_FMT_BGR24;
} else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 1) {
pixelFormat = AV_PIX_FMT_GRAY8;
} else if ((depth == Frame.DEPTH_USHORT || depth == Frame.DEPTH_SHORT) && channels == 1) {
pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
} else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 4) {
pixelFormat = AV_PIX_FMT_RGBA;
} else if ((depth == Frame.DEPTH_UBYTE || depth == Frame.DEPTH_BYTE) && channels == 2) {
// Android's camera capture format
pixelFormat = AV_PIX_FMT_NV21;
} else {
throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
}
}
if (pixelFormat == AV_PIX_FMT_NV21) {
step = width;
}
av_image_fill_arrays(new PointerPointer(image_frame), image_frame.linesize(), data, pixelFormat, width, height, 1);
image_frame.linesize(0, step);
image_frame.format(pixelFormat);
image_frame.width(width);
image_frame.height(height);
/* push the decoded frame into the filtergraph */
if (av_buffersrc_add_frame_flags(buffersrc_ctx, image_frame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
throw new Exception("av_buffersrc_add_frame_flags(): Error while feeding the filtergraph.");
}
}
use of org.bytedeco.javacpp.Pointer in project javacv by bytedeco.
the class OpenCVFrameConverter method convertToMat.
public Mat convertToMat(Frame frame) {
if (frame == null || frame.image == null) {
return null;
} else if (frame.opaque instanceof Mat) {
return (Mat) frame.opaque;
} else if (!isEqual(frame, mat)) {
int depth = getMatDepth(frame.imageDepth);
mat = depth < 0 ? null : new Mat(frame.imageHeight, frame.imageWidth, CV_MAKETYPE(depth, frame.imageChannels), new Pointer(frame.image[0].position(0)), frame.imageStride * Math.abs(frame.imageDepth) / 8);
}
return mat;
}
use of org.bytedeco.javacpp.Pointer in project javacv by bytedeco.
the class OpenKinect2FrameGrabber method grabIR.
protected void grabIR() {
/**
* 512x424 float. Range is [0.0, 65535.0].
*/
freenect2.Frame IRImage = frames.get(freenect2.Frame.Ir);
int channels = 1;
int iplDepth = IPL_DEPTH_32F;
int bpp = (int) IRImage.bytes_per_pixel();
int deviceWidth = (int) IRImage.width();
int deviceHeight = (int) IRImage.height();
Pointer rawIRData = IRImage.data();
if (rawIRImage == null) {
rawIRImage = IplImage.createHeader(deviceWidth, deviceHeight, iplDepth, channels);
}
cvSetData(rawIRImage, rawIRData, deviceWidth * channels * iplDepth / 8);
}
use of org.bytedeco.javacpp.Pointer in project nd4j by deeplearning4j.
the class Nd4j method createNpyFromInputStream.
/**
* Create a numpy array based on the passed in
* input stream
* @param is the input stream to read
* @return the loaded ndarray
* @throws IOException
*/
public static INDArray createNpyFromInputStream(InputStream is) throws IOException {
byte[] content = IOUtils.toByteArray(is);
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(content.length);
byteBuffer.put(content);
byteBuffer.rewind();
Pointer pointer = new Pointer(byteBuffer);
return createFromNpyPointer(pointer);
}
use of org.bytedeco.javacpp.Pointer in project nd4j by deeplearning4j.
the class ProtectedCudaConstantHandler method ensureMaps.
private void ensureMaps(Integer deviceId) {
if (!buffersCache.containsKey(deviceId)) {
if (flowController == null)
flowController = AtomicAllocator.getInstance().getFlowController();
try {
synchronized (this) {
if (!buffersCache.containsKey(deviceId)) {
// TODO: this op call should be checked
// nativeOps.setDevice(new CudaPointer(deviceId));
buffersCache.put(deviceId, new ConcurrentHashMap<ArrayDescriptor, DataBuffer>());
constantOffsets.put(deviceId, new AtomicLong(0));
deviceLocks.put(deviceId, new Semaphore(1));
Pointer cAddr = NativeOpsHolder.getInstance().getDeviceNativeOps().getConstantSpace();
// logger.info("constant pointer: {}", cAddr.address() );
deviceAddresses.put(deviceId, cAddr);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
Aggregations