use of org.bytedeco.javacpp.FloatPointer in project deeplearning4j by deeplearning4j.
the class NDArrayHDF5Reader method readFromDataSet.
private DataBuffer readFromDataSet(hdf5.DataSet dataSet, int total) {
float[] dataBuffer = new float[total];
FloatPointer fp = new FloatPointer(dataBuffer);
dataSet.read(fp, new hdf5.DataType(hdf5.PredType.NATIVE_FLOAT()));
fp.get(dataBuffer);
return Nd4j.createBuffer(dataBuffer);
}
use of org.bytedeco.javacpp.FloatPointer in project bigbluebutton by bigbluebutton.
the class Frame method createIndexer.
/** Returns an {@link Indexer} for the <i>i</i>th image plane. */
public <I extends Indexer> I createIndexer(boolean direct, int i) {
long[] sizes = { imageHeight, imageWidth, imageChannels };
long[] strides = { imageStride, imageChannels, 1 };
Buffer buffer = image[i];
Object array = buffer.hasArray() ? buffer.array() : null;
switch(imageDepth) {
case DEPTH_UBYTE:
return array != null ? (I) UByteIndexer.create((byte[]) array, sizes, strides) : direct ? (I) UByteIndexer.create((ByteBuffer) buffer, sizes, strides) : (I) UByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false);
case DEPTH_BYTE:
return array != null ? (I) ByteIndexer.create((byte[]) array, sizes, strides) : direct ? (I) ByteIndexer.create((ByteBuffer) buffer, sizes, strides) : (I) ByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false);
case DEPTH_USHORT:
return array != null ? (I) UShortIndexer.create((short[]) array, sizes, strides) : direct ? (I) UShortIndexer.create((ShortBuffer) buffer, sizes, strides) : (I) UShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false);
case DEPTH_SHORT:
return array != null ? (I) ShortIndexer.create((short[]) array, sizes, strides) : direct ? (I) ShortIndexer.create((ShortBuffer) buffer, sizes, strides) : (I) ShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false);
case DEPTH_INT:
return array != null ? (I) IntIndexer.create((int[]) array, sizes, strides) : direct ? (I) IntIndexer.create((IntBuffer) buffer, sizes, strides) : (I) IntIndexer.create(new IntPointer((IntBuffer) buffer), sizes, strides, false);
case DEPTH_LONG:
return array != null ? (I) LongIndexer.create((long[]) array, sizes, strides) : direct ? (I) LongIndexer.create((LongBuffer) buffer, sizes, strides) : (I) LongIndexer.create(new LongPointer((LongBuffer) buffer), sizes, strides, false);
case DEPTH_FLOAT:
return array != null ? (I) FloatIndexer.create((float[]) array, sizes, strides) : direct ? (I) FloatIndexer.create((FloatBuffer) buffer, sizes, strides) : (I) FloatIndexer.create(new FloatPointer((FloatBuffer) buffer), sizes, strides, false);
case DEPTH_DOUBLE:
return array != null ? (I) DoubleIndexer.create((double[]) array, sizes, strides) : direct ? (I) DoubleIndexer.create((DoubleBuffer) buffer, sizes, strides) : (I) DoubleIndexer.create(new DoublePointer((DoubleBuffer) buffer), sizes, strides, false);
default:
assert false;
}
return null;
}
use of org.bytedeco.javacpp.FloatPointer in project javacv by bytedeco.
the class OpticalFlowTracker method main.
public static void main(String[] args) {
// Load two images and allocate other structures
IplImage imgA = cvLoadImage("image0.png", CV_LOAD_IMAGE_GRAYSCALE);
IplImage imgB = cvLoadImage("image1.png", CV_LOAD_IMAGE_GRAYSCALE);
CvSize img_sz = cvGetSize(imgA);
int win_size = 15;
// IplImage imgC = cvLoadImage("OpticalFlow1.png",
// CV_LOAD_IMAGE_UNCHANGED);
IplImage imgC = cvLoadImage("image0.png", CV_LOAD_IMAGE_UNCHANGED);
// Get the features for tracking
IplImage eig_image = cvCreateImage(img_sz, IPL_DEPTH_32F, 1);
IplImage tmp_image = cvCreateImage(img_sz, IPL_DEPTH_32F, 1);
IntPointer corner_count = new IntPointer(1).put(MAX_CORNERS);
CvPoint2D32f cornersA = new CvPoint2D32f(MAX_CORNERS);
CvArr mask = null;
cvGoodFeaturesToTrack(imgA, eig_image, tmp_image, cornersA, corner_count, 0.05, 5.0, mask, 3, 0, 0.04);
cvFindCornerSubPix(imgA, cornersA, corner_count.get(), cvSize(win_size, win_size), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));
// Call Lucas Kanade algorithm
BytePointer features_found = new BytePointer(MAX_CORNERS);
FloatPointer feature_errors = new FloatPointer(MAX_CORNERS);
CvSize pyr_sz = cvSize(imgA.width() + 8, imgB.height() / 3);
IplImage pyrA = cvCreateImage(pyr_sz, IPL_DEPTH_32F, 1);
IplImage pyrB = cvCreateImage(pyr_sz, IPL_DEPTH_32F, 1);
CvPoint2D32f cornersB = new CvPoint2D32f(MAX_CORNERS);
cvCalcOpticalFlowPyrLK(imgA, imgB, pyrA, pyrB, cornersA, cornersB, corner_count.get(), cvSize(win_size, win_size), 5, features_found, feature_errors, cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.3), 0);
// Make an image of the results
for (int i = 0; i < corner_count.get(); i++) {
if (features_found.get(i) == 0 || feature_errors.get(i) > 550) {
System.out.println("Error is " + feature_errors.get(i) + "/n");
continue;
}
System.out.println("Got it/n");
cornersA.position(i);
cornersB.position(i);
CvPoint p0 = cvPoint(Math.round(cornersA.x()), Math.round(cornersA.y()));
CvPoint p1 = cvPoint(Math.round(cornersB.x()), Math.round(cornersB.y()));
cvLine(imgC, p0, p1, CV_RGB(255, 0, 0), 2, 8, 0);
}
cvSaveImage("image0-1.png", imgC);
cvNamedWindow("LKpyr_OpticalFlow", 0);
cvShowImage("LKpyr_OpticalFlow", imgC);
cvWaitKey(0);
}
use of org.bytedeco.javacpp.FloatPointer in project javacv by bytedeco.
the class FFmpegFrameRecorder method recordSamples.
public boolean recordSamples(int sampleRate, int audioChannels, Buffer... samples) throws Exception {
if (audio_st == null) {
throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
}
if (samples == null && samples_out[0].position() > 0) {
// Typically samples_out[0].limit() is double the audio_input_frame_size --> sampleDivisor = 2
double sampleDivisor = Math.floor((int) Math.min(samples_out[0].limit(), Integer.MAX_VALUE) / audio_input_frame_size);
writeSamples((int) Math.floor((int) samples_out[0].position() / sampleDivisor));
return record((AVFrame) null);
}
int ret;
if (sampleRate <= 0) {
sampleRate = audio_c.sample_rate();
}
if (audioChannels <= 0) {
audioChannels = audio_c.channels();
}
int inputSize = samples != null ? samples[0].limit() - samples[0].position() : 0;
int inputFormat = samples_format;
int inputChannels = samples != null && samples.length > 1 ? 1 : audioChannels;
int inputDepth = 0;
int outputFormat = audio_c.sample_fmt();
int outputChannels = samples_out.length > 1 ? 1 : audio_c.channels();
int outputDepth = av_get_bytes_per_sample(outputFormat);
if (samples != null && samples[0] instanceof ByteBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
inputDepth = 1;
for (int i = 0; i < samples.length; i++) {
ByteBuffer b = (ByteBuffer) samples[i];
if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
} else {
samples_in[i] = new BytePointer(b);
}
}
} else if (samples != null && samples[0] instanceof ShortBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
inputDepth = 2;
for (int i = 0; i < samples.length; i++) {
ShortBuffer b = (ShortBuffer) samples[i];
if (samples_in[i] instanceof ShortPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((ShortPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
} else {
samples_in[i] = new ShortPointer(b);
}
}
} else if (samples != null && samples[0] instanceof IntBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
inputDepth = 4;
for (int i = 0; i < samples.length; i++) {
IntBuffer b = (IntBuffer) samples[i];
if (samples_in[i] instanceof IntPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
} else {
samples_in[i] = new IntPointer(b);
}
}
} else if (samples != null && samples[0] instanceof FloatBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
inputDepth = 4;
for (int i = 0; i < samples.length; i++) {
FloatBuffer b = (FloatBuffer) samples[i];
if (samples_in[i] instanceof FloatPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
} else {
samples_in[i] = new FloatPointer(b);
}
}
} else if (samples != null && samples[0] instanceof DoubleBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
inputDepth = 8;
for (int i = 0; i < samples.length; i++) {
DoubleBuffer b = (DoubleBuffer) samples[i];
if (samples_in[i] instanceof DoublePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
} else {
samples_in[i] = new DoublePointer(b);
}
}
} else if (samples != null) {
throw new Exception("Audio samples Buffer has unsupported type: " + samples);
}
if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) {
samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, audio_c.channel_layout(), outputFormat, audio_c.sample_rate(), av_get_default_channel_layout(audioChannels), inputFormat, sampleRate, 0, null);
if (samples_convert_ctx == null) {
throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
} else if ((ret = swr_init(samples_convert_ctx)) < 0) {
throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
}
samples_channels = audioChannels;
samples_format = inputFormat;
samples_rate = sampleRate;
}
for (int i = 0; samples != null && i < samples.length; i++) {
samples_in[i].position(samples_in[i].position() * inputDepth).limit((samples_in[i].position() + inputSize) * inputDepth);
}
while (true) {
int inputCount = (int) Math.min(samples != null ? (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth) : 0, Integer.MAX_VALUE);
int outputCount = (int) Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE);
inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate());
for (int i = 0; samples != null && i < samples.length; i++) {
samples_in_ptr.put(i, samples_in[i]);
}
for (int i = 0; i < samples_out.length; i++) {
samples_out_ptr.put(i, samples_out[i]);
}
if ((ret = swr_convert(samples_convert_ctx, samples_out_ptr, outputCount, samples_in_ptr, inputCount)) < 0) {
throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
} else if (ret == 0) {
break;
}
for (int i = 0; samples != null && i < samples.length; i++) {
samples_in[i].position(samples_in[i].position() + inputCount * inputChannels * inputDepth);
}
for (int i = 0; i < samples_out.length; i++) {
samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
}
if (samples == null || samples_out[0].position() >= samples_out[0].limit()) {
writeSamples(audio_input_frame_size);
}
}
return samples != null ? frame.key_frame() != 0 : record((AVFrame) null);
}
use of org.bytedeco.javacpp.FloatPointer in project javacv by bytedeco.
the class Frame method createIndexer.
/**
* Returns an {@link Indexer} for the <i>i</i>th image plane.
*/
public <I extends Indexer> I createIndexer(boolean direct, int i) {
long[] sizes = { imageHeight, imageWidth, imageChannels };
long[] strides = { imageStride, imageChannels, 1 };
Buffer buffer = image[i];
Object array = buffer.hasArray() ? buffer.array() : null;
switch(imageDepth) {
case DEPTH_UBYTE:
return array != null ? (I) UByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) UByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) UByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_BYTE:
return array != null ? (I) ByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) ByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) ByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_USHORT:
return array != null ? (I) UShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) UShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) UShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_SHORT:
return array != null ? (I) ShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) ShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) ShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_INT:
return array != null ? (I) IntIndexer.create((int[]) array, sizes, strides).indexable(this) : direct ? (I) IntIndexer.create((IntBuffer) buffer, sizes, strides).indexable(this) : (I) IntIndexer.create(new IntPointer((IntBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_LONG:
return array != null ? (I) LongIndexer.create((long[]) array, sizes, strides).indexable(this) : direct ? (I) LongIndexer.create((LongBuffer) buffer, sizes, strides).indexable(this) : (I) LongIndexer.create(new LongPointer((LongBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_FLOAT:
return array != null ? (I) FloatIndexer.create((float[]) array, sizes, strides).indexable(this) : direct ? (I) FloatIndexer.create((FloatBuffer) buffer, sizes, strides).indexable(this) : (I) FloatIndexer.create(new FloatPointer((FloatBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_DOUBLE:
return array != null ? (I) DoubleIndexer.create((double[]) array, sizes, strides).indexable(this) : direct ? (I) DoubleIndexer.create((DoubleBuffer) buffer, sizes, strides).indexable(this) : (I) DoubleIndexer.create(new DoublePointer((DoubleBuffer) buffer), sizes, strides, false).indexable(this);
default:
assert false;
}
return null;
}
Aggregations