use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.
the class Frame method cloneBufferArray.
/**
* This private method takes a buffer array as input and returns a deep copy.
* It is assumed that all buffers in the input array are of the same subclass.
*
* @param srcBuffers - Buffer array to be cloned
* @param clonedBuffers - Buffer array to fill with clones
* @return Opaque object to store
*
* @author Extension proposed by Dragos Dutu
*/
private static Pointer cloneBufferArray(Buffer[] srcBuffers, Buffer[] clonedBuffers) {
Pointer opaque = null;
if (srcBuffers != null && srcBuffers.length > 0) {
int totalCapacity = 0;
for (int i = 0; i < srcBuffers.length; i++) {
srcBuffers[i].rewind();
totalCapacity += srcBuffers[i].capacity();
}
if (srcBuffers[0] instanceof ByteBuffer) {
BytePointer pointer = new BytePointer(totalCapacity);
for (int i = 0; i < srcBuffers.length; i++) {
clonedBuffers[i] = pointer.limit(pointer.position() + srcBuffers[i].limit()).asBuffer().put((ByteBuffer) srcBuffers[i]);
pointer.position(pointer.limit());
}
opaque = pointer;
} else if (srcBuffers[0] instanceof ShortBuffer) {
ShortPointer pointer = new ShortPointer(totalCapacity);
for (int i = 0; i < srcBuffers.length; i++) {
clonedBuffers[i] = pointer.limit(pointer.position() + srcBuffers[i].limit()).asBuffer().put((ShortBuffer) srcBuffers[i]);
pointer.position(pointer.limit());
}
opaque = pointer;
} else if (srcBuffers[0] instanceof IntBuffer) {
IntPointer pointer = new IntPointer(totalCapacity);
for (int i = 0; i < srcBuffers.length; i++) {
clonedBuffers[i] = pointer.limit(pointer.position() + srcBuffers[i].limit()).asBuffer().put((IntBuffer) srcBuffers[i]);
pointer.position(pointer.limit());
}
opaque = pointer;
} else if (srcBuffers[0] instanceof LongBuffer) {
LongPointer pointer = new LongPointer(totalCapacity);
for (int i = 0; i < srcBuffers.length; i++) {
clonedBuffers[i] = pointer.limit(pointer.position() + srcBuffers[i].limit()).asBuffer().put((LongBuffer) srcBuffers[i]);
pointer.position(pointer.limit());
}
opaque = pointer;
} else if (srcBuffers[0] instanceof FloatBuffer) {
FloatPointer pointer = new FloatPointer(totalCapacity);
for (int i = 0; i < srcBuffers.length; i++) {
clonedBuffers[i] = pointer.limit(pointer.position() + srcBuffers[i].limit()).asBuffer().put((FloatBuffer) srcBuffers[i]);
pointer.position(pointer.limit());
}
opaque = pointer;
} else if (srcBuffers[0] instanceof DoubleBuffer) {
DoublePointer pointer = new DoublePointer(totalCapacity);
for (int i = 0; i < srcBuffers.length; i++) {
clonedBuffers[i] = pointer.limit(pointer.position() + srcBuffers[i].limit()).asBuffer().put((DoubleBuffer) srcBuffers[i]);
pointer.position(pointer.limit());
}
opaque = pointer;
}
for (int i = 0; i < srcBuffers.length; i++) {
srcBuffers[i].rewind();
clonedBuffers[i].rewind();
}
}
if (opaque != null) {
opaque.retainReference();
}
return opaque;
}
use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.
the class OpenCVFeatures2dSerialization method serializeMemory.
private static String serializeMemory(Mat matrix) throws UnsupportedEncodingException {
try (KeyPointVector keyPointVectorSerialize = new KeyPointVector();
Mat objectDescriptorsSerialize = new Mat();
AKAZE akaze = AKAZE.create();
FileStorage fileStorage = new FileStorage(".xml", FileStorage.WRITE | FileStorage.MEMORY, StandardCharsets.UTF_8.name())) {
akaze.detectAndCompute(matrix, new Mat(), keyPointVectorSerialize, objectDescriptorsSerialize, false);
System.out.println("Vector size: " + keyPointVectorSerialize.size());
System.out.println("Descriptor size: " + objectDescriptorsSerialize.cols());
write(fileStorage, "keyPoints", keyPointVectorSerialize);
write(fileStorage, "descriptors", objectDescriptorsSerialize);
BytePointer bytePointer = fileStorage.releaseAndGetString();
return bytePointer.getString(StandardCharsets.UTF_8.name());
}
}
use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.
the class IPCameraFrameGrabber method grab.
@Override
public Frame grab() throws Exception {
try {
final byte[] b = readImage();
final Mat mat = new Mat(1, b.length, CV_8UC1, new BytePointer(b));
releaseDecoded();
return converter.convert(decoded = imdecode(mat, IMREAD_COLOR));
} catch (IOException e) {
throw new Exception(e.getMessage(), e);
}
}
use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.
the class DC1394FrameGrabber method grab.
public Frame grab() throws Exception {
enqueue();
if (linux) {
fds.events(POLLIN);
if (poll(fds, 1, timeout) == 0) {
throw new Exception("poll() Error: Timeout occured. (Has start() been called?)");
}
}
int i = 0;
int err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, raw_image[i]);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_capture_dequeue(WAIT) Error " + err + ": Could not capture a frame. (Has start() been called?)");
}
// try to poll for more images, to get the most recent one...
int numDequeued = 0;
while (!raw_image[i].isNull()) {
enqueue();
enqueue_image = raw_image[i];
i = (i + 1) % 2;
numDequeued++;
err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, raw_image[i]);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_capture_dequeue(POLL) Error " + err + ": Could not capture a frame.");
}
}
frame = raw_image[(i + 1) % 2];
int w = frame.size(0);
int h = frame.size(1);
int depth = frame.data_depth();
int iplDepth = 0;
switch(depth) {
case 8:
iplDepth = IPL_DEPTH_8U;
break;
case 16:
iplDepth = IPL_DEPTH_16U;
break;
default:
assert false;
}
int stride = frame.stride();
int size = frame.image_bytes();
int numChannels = stride / w * 8 / depth;
ByteOrder frameEndian = frame.little_endian() != 0 ? ByteOrder.LITTLE_ENDIAN : ByteOrder.BIG_ENDIAN;
boolean alreadySwapped = false;
int color_coding = frame.color_coding();
boolean colorbayer = color_coding == DC1394_COLOR_CODING_RAW8 || color_coding == DC1394_COLOR_CODING_RAW16;
boolean colorrgb = color_coding == DC1394_COLOR_CODING_RGB8 || color_coding == DC1394_COLOR_CODING_RGB16;
boolean coloryuv = color_coding == DC1394_COLOR_CODING_YUV411 || color_coding == DC1394_COLOR_CODING_YUV422 || color_coding == DC1394_COLOR_CODING_YUV444;
BytePointer imageData = frame.image();
if ((depth <= 8 || frameEndian.equals(ByteOrder.nativeOrder())) && !coloryuv && (imageMode == ImageMode.RAW || (imageMode == ImageMode.COLOR && numChannels == 3) || (imageMode == ImageMode.GRAY && numChannels == 1 && !colorbayer))) {
if (return_image == null) {
return_image = IplImage.createHeader(w, h, iplDepth, numChannels);
}
return_image.widthStep(stride);
return_image.imageSize(size);
return_image.imageData(imageData);
} else {
// in the padding, there's sometimes timeframe information and stuff
// that libdc1394 will copy for us, so we need to allocate it
int padding_bytes = frame.padding_bytes();
int padding1 = (int) Math.ceil((double) padding_bytes / (w * depth / 8));
int padding3 = (int) Math.ceil((double) padding_bytes / (w * 3 * depth / 8));
if (return_image == null) {
int c = imageMode == ImageMode.COLOR ? 3 : 1;
int padding = imageMode == ImageMode.COLOR ? padding3 : padding1;
return_image = IplImage.create(w, h + padding, iplDepth, c);
return_image.height(return_image.height() - padding);
}
if (temp_image == null) {
if (imageMode == ImageMode.COLOR && (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) {
temp_image = IplImage.create(w, h + padding1, iplDepth, numChannels);
temp_image.height(temp_image.height() - padding1);
} else if (imageMode == ImageMode.GRAY && (coloryuv || colorbayer || (colorrgb && depth > 8))) {
temp_image = IplImage.create(w, h + padding3, iplDepth, 3);
temp_image.height(temp_image.height() - padding3);
} else if (imageMode == ImageMode.GRAY && colorrgb) {
temp_image = IplImage.createHeader(w, h, iplDepth, 3);
} else if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
temp_image = IplImage.createHeader(w, h, iplDepth, 1);
} else {
temp_image = return_image;
}
}
conv_image.size(0, temp_image.width());
conv_image.size(1, temp_image.height());
if (depth > 8) {
conv_image.color_coding(imageMode == ImageMode.RAW ? DC1394_COLOR_CODING_RAW16 : temp_image.nChannels() == 1 ? DC1394_COLOR_CODING_MONO16 : DC1394_COLOR_CODING_RGB16);
conv_image.data_depth(16);
} else {
conv_image.color_coding(imageMode == ImageMode.RAW ? DC1394_COLOR_CODING_RAW8 : temp_image.nChannels() == 1 ? DC1394_COLOR_CODING_MONO8 : DC1394_COLOR_CODING_RGB8);
conv_image.data_depth(8);
}
conv_image.stride(temp_image.widthStep());
int temp_size = temp_image.imageSize();
conv_image.allocated_image_bytes(temp_size).total_bytes(temp_size).image_bytes(temp_size);
conv_image.image(temp_image.imageData());
if (colorbayer) {
// from raw Bayer... invert R and B to get BGR images
// (like OpenCV wants them) instead of RGB
int c = frame.color_filter();
if (c == DC1394_COLOR_FILTER_RGGB) {
frame.color_filter(DC1394_COLOR_FILTER_BGGR);
} else if (c == DC1394_COLOR_FILTER_GBRG) {
frame.color_filter(DC1394_COLOR_FILTER_GRBG);
} else if (c == DC1394_COLOR_FILTER_GRBG) {
frame.color_filter(DC1394_COLOR_FILTER_GBRG);
} else if (c == DC1394_COLOR_FILTER_BGGR) {
frame.color_filter(DC1394_COLOR_FILTER_RGGB);
} else {
assert false;
}
// other better methods than "simple" give garbage at 16 bits..
err = dc1394_debayer_frames(frame, conv_image, DC1394_BAYER_METHOD_SIMPLE);
frame.color_filter(c);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_debayer_frames() Error " + err + ": Could not debayer frame.");
}
} else if (depth > 8 && frame.data_depth() == conv_image.data_depth() && frame.color_coding() == conv_image.color_coding() && frame.stride() == conv_image.stride()) {
// we just need a copy to swap bytes..
ShortBuffer in = frame.getByteBuffer().order(frameEndian).asShortBuffer();
ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
alreadySwapped = true;
} else if ((imageMode == ImageMode.GRAY && colorrgb) || (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) {
temp_image.widthStep(stride);
temp_image.imageSize(size);
temp_image.imageData(imageData);
} else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) {
// from YUV, etc.
err = dc1394_convert_frames(frame, conv_image);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_convert_frames() Error " + err + ": Could not convert frame.");
}
}
if (!alreadySwapped && depth > 8 && !frameEndian.equals(ByteOrder.nativeOrder())) {
// ack, the camera's endianness doesn't correspond to our machine ...
// swap bytes of 16-bit images
ByteBuffer bb = temp_image.getByteBuffer();
ShortBuffer in = bb.order(frameEndian).asShortBuffer();
ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
}
// should we copy the padding as well?
if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
cvCvtColor(temp_image, return_image, CV_GRAY2BGR);
} else if (imageMode == ImageMode.GRAY && (colorbayer || colorrgb || coloryuv)) {
cvCvtColor(temp_image, return_image, CV_BGR2GRAY);
}
}
switch(frame.color_filter()) {
case DC1394_COLOR_FILTER_RGGB:
sensorPattern = SENSOR_PATTERN_RGGB;
break;
case DC1394_COLOR_FILTER_GBRG:
sensorPattern = SENSOR_PATTERN_GBRG;
break;
case DC1394_COLOR_FILTER_GRBG:
sensorPattern = SENSOR_PATTERN_GRBG;
break;
case DC1394_COLOR_FILTER_BGGR:
sensorPattern = SENSOR_PATTERN_BGGR;
break;
default:
sensorPattern = -1L;
}
enqueue_image = frame;
timestamp = frame.timestamp();
frameNumber += numDequeued;
// System.out.println("frame age = " + (local_time[0] - timestamp));
return converter.convert(return_image);
}
use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.
the class FrameConverterTest method testAndroidFrameConverter.
@Test
public void testAndroidFrameConverter() {
System.out.println("AndroidFrameConverter");
AndroidFrameConverter converter = new AndroidFrameConverter();
int width = 512;
int height = 1024;
byte[] yuvData = new byte[3 * width * height / 2];
for (int i = 0; i < yuvData.length; i++) {
yuvData[i] = (byte) i;
}
Mat yuvImage = new Mat(3 * height / 2, width, CV_8UC1, new BytePointer(yuvData));
Mat bgrImage = new Mat(height, width, CV_8UC3);
cvtColor(yuvImage, bgrImage, CV_YUV2BGR_NV21);
Frame bgrFrame = converter.convert(yuvData, width, height);
UByteIndexer bgrImageIdx = bgrImage.createIndexer();
UByteIndexer bgrFrameIdx = bgrFrame.createIndexer();
assertEquals(bgrImageIdx.rows(), bgrFrameIdx.rows());
assertEquals(bgrImageIdx.cols(), bgrFrameIdx.cols());
assertEquals(bgrImageIdx.channels(), bgrFrameIdx.channels());
for (int i = 0; i < bgrImageIdx.rows(); i++) {
for (int j = 0; j < bgrImageIdx.cols(); j++) {
for (int k = 0; k < bgrImageIdx.channels(); k++) {
assertEquals((float) bgrImageIdx.get(i, j, k), (float) bgrFrameIdx.get(i, j, k), 1.0f);
}
}
}
bgrImageIdx.release();
bgrFrameIdx.release();
Frame grayFrame = new Frame(1024 + 1, 768, Frame.DEPTH_UBYTE, 1);
Frame colorFrame = new Frame(640 + 1, 480, Frame.DEPTH_UBYTE, 3);
UByteIndexer grayFrameIdx = grayFrame.createIndexer();
for (int i = 0; i < grayFrameIdx.rows(); i++) {
for (int j = 0; j < grayFrameIdx.cols(); j++) {
grayFrameIdx.put(i, j, i + j);
}
}
UByteIndexer colorFrameIdx = colorFrame.createIndexer();
for (int i = 0; i < colorFrameIdx.rows(); i++) {
for (int j = 0; j < colorFrameIdx.cols(); j++) {
for (int k = 0; k < colorFrameIdx.channels(); k++) {
colorFrameIdx.put(i, j, k, i + j + k);
}
}
}
width = grayFrame.imageWidth;
height = grayFrame.imageHeight;
int stride = grayFrame.imageStride;
int rowBytes = width * 4;
ByteBuffer in = (ByteBuffer) grayFrame.image[0];
ByteBuffer buffer = converter.gray2rgba(in, width, height, stride, rowBytes);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
// GRAY -> RGBA
byte B = in.get(y * stride + x);
assertEquals(buffer.get(y * rowBytes + 4 * x), B);
assertEquals(buffer.get(y * rowBytes + 4 * x + 1), B);
assertEquals(buffer.get(y * rowBytes + 4 * x + 2), B);
assertEquals(buffer.get(y * rowBytes + 4 * x + 3), (byte) 0xFF);
}
}
width = colorFrame.imageWidth;
height = colorFrame.imageHeight;
stride = colorFrame.imageStride;
rowBytes = width * 4;
in = (ByteBuffer) colorFrame.image[0];
buffer = converter.bgr2rgba(in, width, height, stride, rowBytes);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
// BGR -> RGBA
byte B = in.get(y * stride + 3 * x);
byte G = in.get(y * stride + 3 * x + 1);
byte R = in.get(y * stride + 3 * x + 2);
assertEquals(buffer.get(y * rowBytes + 4 * x), R);
assertEquals(buffer.get(y * rowBytes + 4 * x + 1), G);
assertEquals(buffer.get(y * rowBytes + 4 * x + 2), B);
assertEquals(buffer.get(y * rowBytes + 4 * x + 3), (byte) 0xFF);
}
}
colorFrameIdx.release();
grayFrameIdx.release();
converter.close();
colorFrame.close();
grayFrame.close();
}
Aggregations