use of java.nio.ShortBuffer in project javacv by bytedeco.
the class FFmpegFrameRecorder method recordSamples.
public boolean recordSamples(int sampleRate, int audioChannels, Buffer... samples) throws Exception {
if (audio_st == null) {
throw new Exception("No audio output stream (Is audioChannels > 0 and has start() been called?)");
}
if (samples == null && samples_out[0].position() > 0) {
// Typically samples_out[0].limit() is double the audio_input_frame_size --> sampleDivisor = 2
double sampleDivisor = Math.floor((int) Math.min(samples_out[0].limit(), Integer.MAX_VALUE) / audio_input_frame_size);
writeSamples((int) Math.floor((int) samples_out[0].position() / sampleDivisor));
return record((AVFrame) null);
}
int ret;
if (sampleRate <= 0) {
sampleRate = audio_c.sample_rate();
}
if (audioChannels <= 0) {
audioChannels = audio_c.channels();
}
int inputSize = samples != null ? samples[0].limit() - samples[0].position() : 0;
int inputFormat = samples_format;
int inputChannels = samples != null && samples.length > 1 ? 1 : audioChannels;
int inputDepth = 0;
int outputFormat = audio_c.sample_fmt();
int outputChannels = samples_out.length > 1 ? 1 : audio_c.channels();
int outputDepth = av_get_bytes_per_sample(outputFormat);
if (samples != null && samples[0] instanceof ByteBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_U8P : AV_SAMPLE_FMT_U8;
inputDepth = 1;
for (int i = 0; i < samples.length; i++) {
ByteBuffer b = (ByteBuffer) samples[i];
if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((BytePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
} else {
samples_in[i] = new BytePointer(b);
}
}
} else if (samples != null && samples[0] instanceof ShortBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S16P : AV_SAMPLE_FMT_S16;
inputDepth = 2;
for (int i = 0; i < samples.length; i++) {
ShortBuffer b = (ShortBuffer) samples[i];
if (samples_in[i] instanceof ShortPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((ShortPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
} else {
samples_in[i] = new ShortPointer(b);
}
}
} else if (samples != null && samples[0] instanceof IntBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_S32P : AV_SAMPLE_FMT_S32;
inputDepth = 4;
for (int i = 0; i < samples.length; i++) {
IntBuffer b = (IntBuffer) samples[i];
if (samples_in[i] instanceof IntPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((IntPointer) samples_in[i]).position(0).put(b.array(), samples[i].position(), inputSize);
} else {
samples_in[i] = new IntPointer(b);
}
}
} else if (samples != null && samples[0] instanceof FloatBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_FLTP : AV_SAMPLE_FMT_FLT;
inputDepth = 4;
for (int i = 0; i < samples.length; i++) {
FloatBuffer b = (FloatBuffer) samples[i];
if (samples_in[i] instanceof FloatPointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((FloatPointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
} else {
samples_in[i] = new FloatPointer(b);
}
}
} else if (samples != null && samples[0] instanceof DoubleBuffer) {
inputFormat = samples.length > 1 ? AV_SAMPLE_FMT_DBLP : AV_SAMPLE_FMT_DBL;
inputDepth = 8;
for (int i = 0; i < samples.length; i++) {
DoubleBuffer b = (DoubleBuffer) samples[i];
if (samples_in[i] instanceof DoublePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) {
((DoublePointer) samples_in[i]).position(0).put(b.array(), b.position(), inputSize);
} else {
samples_in[i] = new DoublePointer(b);
}
}
} else if (samples != null) {
throw new Exception("Audio samples Buffer has unsupported type: " + samples);
}
if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) {
samples_convert_ctx = swr_alloc_set_opts(samples_convert_ctx, audio_c.channel_layout(), outputFormat, audio_c.sample_rate(), av_get_default_channel_layout(audioChannels), inputFormat, sampleRate, 0, null);
if (samples_convert_ctx == null) {
throw new Exception("swr_alloc_set_opts() error: Cannot allocate the conversion context.");
} else if ((ret = swr_init(samples_convert_ctx)) < 0) {
throw new Exception("swr_init() error " + ret + ": Cannot initialize the conversion context.");
}
samples_channels = audioChannels;
samples_format = inputFormat;
samples_rate = sampleRate;
}
for (int i = 0; samples != null && i < samples.length; i++) {
samples_in[i].position(samples_in[i].position() * inputDepth).limit((samples_in[i].position() + inputSize) * inputDepth);
}
while (true) {
int inputCount = (int) Math.min(samples != null ? (samples_in[0].limit() - samples_in[0].position()) / (inputChannels * inputDepth) : 0, Integer.MAX_VALUE);
int outputCount = (int) Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE);
inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate());
for (int i = 0; samples != null && i < samples.length; i++) {
samples_in_ptr.put(i, samples_in[i]);
}
for (int i = 0; i < samples_out.length; i++) {
samples_out_ptr.put(i, samples_out[i]);
}
if ((ret = swr_convert(samples_convert_ctx, samples_out_ptr, outputCount, samples_in_ptr, inputCount)) < 0) {
throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples.");
} else if (ret == 0) {
break;
}
for (int i = 0; samples != null && i < samples.length; i++) {
samples_in[i].position(samples_in[i].position() + inputCount * inputChannels * inputDepth);
}
for (int i = 0; i < samples_out.length; i++) {
samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth);
}
if (samples == null || samples_out[0].position() >= samples_out[0].limit()) {
writeSamples(audio_input_frame_size);
}
}
return samples != null ? frame.key_frame() != 0 : record((AVFrame) null);
}
use of java.nio.ShortBuffer in project javacv by bytedeco.
the class FlyCapture2FrameGrabber method grab.
public Frame grab() throws FrameGrabber.Exception {
Error error = camera.RetrieveBuffer(raw_image);
if (error.notEquals(PGRERROR_OK)) {
throw new FrameGrabber.Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)");
}
int w = raw_image.GetCols();
int h = raw_image.GetRows();
int format = raw_image.GetPixelFormat();
int depth = getDepth(format);
int stride = raw_image.GetStride();
int size = h * stride;
int numChannels = getNumChannels(format);
error = camera.ReadRegister(IMAGE_DATA_FORMAT, regOut);
if (error.notEquals(PGRERROR_OK)) {
throw new FrameGrabber.Exception("flycaptureGetCameraRegister() Error " + error);
}
ByteOrder frameEndian = (regOut[0] & 0x1) != 0 ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN;
boolean alreadySwapped = false;
boolean colorbayer = raw_image.GetBayerTileFormat() != NONE;
boolean colorrgb = format == PIXEL_FORMAT_RGB8 || format == PIXEL_FORMAT_RGB16 || format == PIXEL_FORMAT_BGR || format == PIXEL_FORMAT_BGRU;
boolean coloryuv = format == PIXEL_FORMAT_411YUV8 || format == PIXEL_FORMAT_422YUV8 || format == PIXEL_FORMAT_444YUV8;
BytePointer imageData = raw_image.GetData().capacity(raw_image.GetDataSize());
if ((depth == IPL_DEPTH_8U || frameEndian.equals(ByteOrder.nativeOrder())) && (imageMode == FrameGrabber.ImageMode.RAW || (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 3) || (imageMode == FrameGrabber.ImageMode.GRAY && numChannels == 1 && !colorbayer))) {
if (return_image == null) {
return_image = IplImage.createHeader(w, h, depth, numChannels);
}
return_image.widthStep(stride);
return_image.imageSize(size);
return_image.imageData(imageData);
} else {
if (return_image == null) {
return_image = IplImage.create(w, h, depth, imageMode == FrameGrabber.ImageMode.COLOR ? 3 : 1);
}
if (temp_image == null) {
if (imageMode == FrameGrabber.ImageMode.COLOR && (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) {
temp_image = IplImage.create(w, h, depth, numChannels);
} else if (imageMode == FrameGrabber.ImageMode.GRAY && colorbayer) {
temp_image = IplImage.create(w, h, depth, 3);
} else if (imageMode == FrameGrabber.ImageMode.GRAY && colorrgb) {
temp_image = IplImage.createHeader(w, h, depth, 3);
} else if (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
temp_image = IplImage.createHeader(w, h, depth, 1);
} else {
temp_image = return_image;
}
}
setStride(conv_image, temp_image.widthStep());
conv_image.SetData(temp_image.imageData(), temp_image.width() * temp_image.height() * temp_image.depth());
if (depth == IPL_DEPTH_8U) {
setPixelFormat(conv_image, imageMode == FrameGrabber.ImageMode.RAW ? PIXEL_FORMAT_RAW8 : temp_image.nChannels() == 1 ? PIXEL_FORMAT_MONO8 : PIXEL_FORMAT_BGR);
} else {
setPixelFormat(conv_image, imageMode == FrameGrabber.ImageMode.RAW ? PIXEL_FORMAT_RAW16 : temp_image.nChannels() == 1 ? PIXEL_FORMAT_MONO16 : PIXEL_FORMAT_RGB16);
}
if (depth != IPL_DEPTH_8U && conv_image.GetPixelFormat() == format && conv_image.GetStride() == stride) {
// we just need a copy to swap bytes..
ShortBuffer in = imageData.asByteBuffer().order(frameEndian).asShortBuffer();
ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
alreadySwapped = true;
} else if ((imageMode == FrameGrabber.ImageMode.GRAY && colorrgb) || (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) {
temp_image.widthStep(stride);
temp_image.imageSize(size);
temp_image.imageData(imageData);
} else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) {
error = raw_image.Convert(conv_image);
// error = flycaptureConvertImage(context, raw_image, conv_image);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("raw_image.Convert Error " + error);
}
}
if (!alreadySwapped && depth != IPL_DEPTH_8U && !frameEndian.equals(ByteOrder.nativeOrder())) {
// ack, the camera's endianness doesn't correspond to our machine ...
// swap bytes of 16-bit images
ByteBuffer bb = temp_image.getByteBuffer();
ShortBuffer in = bb.order(frameEndian).asShortBuffer();
ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
}
if (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
cvCvtColor(temp_image, return_image, CV_GRAY2BGR);
} else if (imageMode == FrameGrabber.ImageMode.GRAY && (colorbayer || colorrgb)) {
cvCvtColor(temp_image, return_image, CV_BGR2GRAY);
}
}
int bayerFormat = cameraInfo.bayerTileFormat();
switch(bayerFormat) {
case BGGR:
sensorPattern = SENSOR_PATTERN_BGGR;
break;
case GBRG:
sensorPattern = SENSOR_PATTERN_GBRG;
break;
case GRBG:
sensorPattern = SENSOR_PATTERN_GRBG;
break;
case RGGB:
sensorPattern = SENSOR_PATTERN_RGGB;
break;
default:
sensorPattern = -1L;
}
TimeStamp timeStamp = raw_image.GetTimeStamp();
timestamp = timeStamp.seconds() * 1000000L + timeStamp.microSeconds();
return converter.convert(return_image);
}
use of java.nio.ShortBuffer in project javacv by bytedeco.
the class Frame method createIndexer.
/**
* Returns an {@link Indexer} for the <i>i</i>th image plane.
*/
public <I extends Indexer> I createIndexer(boolean direct, int i) {
long[] sizes = { imageHeight, imageWidth, imageChannels };
long[] strides = { imageStride, imageChannels, 1 };
Buffer buffer = image[i];
Object array = buffer.hasArray() ? buffer.array() : null;
switch(imageDepth) {
case DEPTH_UBYTE:
return array != null ? (I) UByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) UByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) UByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_BYTE:
return array != null ? (I) ByteIndexer.create((byte[]) array, sizes, strides).indexable(this) : direct ? (I) ByteIndexer.create((ByteBuffer) buffer, sizes, strides).indexable(this) : (I) ByteIndexer.create(new BytePointer((ByteBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_USHORT:
return array != null ? (I) UShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) UShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) UShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_SHORT:
return array != null ? (I) ShortIndexer.create((short[]) array, sizes, strides).indexable(this) : direct ? (I) ShortIndexer.create((ShortBuffer) buffer, sizes, strides).indexable(this) : (I) ShortIndexer.create(new ShortPointer((ShortBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_INT:
return array != null ? (I) IntIndexer.create((int[]) array, sizes, strides).indexable(this) : direct ? (I) IntIndexer.create((IntBuffer) buffer, sizes, strides).indexable(this) : (I) IntIndexer.create(new IntPointer((IntBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_LONG:
return array != null ? (I) LongIndexer.create((long[]) array, sizes, strides).indexable(this) : direct ? (I) LongIndexer.create((LongBuffer) buffer, sizes, strides).indexable(this) : (I) LongIndexer.create(new LongPointer((LongBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_FLOAT:
return array != null ? (I) FloatIndexer.create((float[]) array, sizes, strides).indexable(this) : direct ? (I) FloatIndexer.create((FloatBuffer) buffer, sizes, strides).indexable(this) : (I) FloatIndexer.create(new FloatPointer((FloatBuffer) buffer), sizes, strides, false).indexable(this);
case DEPTH_DOUBLE:
return array != null ? (I) DoubleIndexer.create((double[]) array, sizes, strides).indexable(this) : direct ? (I) DoubleIndexer.create((DoubleBuffer) buffer, sizes, strides).indexable(this) : (I) DoubleIndexer.create(new DoublePointer((DoubleBuffer) buffer), sizes, strides, false).indexable(this);
default:
assert false;
}
return null;
}
use of java.nio.ShortBuffer in project ShootAndRun by IonAgorria.
the class GameView method createBatch.
/**
* Creates batch for current drawables in queue, this clears queue.
*
* @param slot to set batch
* @param mode to use
* @param drawables to draw in the batch
* @param camera location when drawing
* @param rotation of camera when drawing
* @param color to set when drawing
* @return batch created
*/
public DrawableBatch createBatch(DrawableBatch.SLOT slot, DrawableBatch.MODE mode, List<Drawable> drawables, Vector3f camera, Vector3f rotation, Color color) {
// Vertices calculation
int vertexCount = 0;
for (Drawable drawable : drawables) {
vertexCount += drawable.getVertexCount();
}
// Create buffers
FloatBuffer positionBuffer = Utilities.getDirectByteBuffer(vertexCount * Constants.STRIDE_POSITION).asFloatBuffer();
FloatBuffer texCoordBuffer = Utilities.getDirectByteBuffer(vertexCount * Constants.STRIDE_TEXCOORD).asFloatBuffer();
// Fill buffers with each drawable
TextureAtlas.Atlas atlas = null;
int i = 0;
int indicesCount = 0;
List<short[]> indicesList = new ArrayList<>();
for (Drawable drawable : drawables) {
float[] positions = drawable.getVertexPositions();
positionBuffer.put(positions);
texCoordBuffer.put(drawable.getVertexTexCoords());
short[] indices = drawable.getVertexIndices(i);
indicesCount += indices.length;
indicesList.add(indices);
// Increment indices index
i += positions.length / Constants.FLOATS_PER_POSITION;
// Check atlas is the same
TextureAtlas.Atlas drawableAtlas = drawable.getAtlas();
if (atlas == null) {
atlas = drawableAtlas;
} else if (atlas != drawableAtlas) {
throw new RuntimeException("This batch uses " + atlas + " but drawable has " + drawableAtlas);
}
}
// Create indices buffer
ShortBuffer indicesBuffer = Utilities.getDirectByteBuffer(indicesCount * 2).asShortBuffer();
for (short[] indices : indicesList) {
indicesBuffer.put(indices);
}
// Reset positions for reading
positionBuffer.position(0);
texCoordBuffer.position(0);
indicesBuffer.position(0);
// Create and return batch
return new DrawableBatch(slot, mode, camera, rotation, new Color(color), atlas, positionBuffer, texCoordBuffer, indicesBuffer, indicesCount);
}
use of java.nio.ShortBuffer in project ffx by mjschnie.
the class Unsigned16BitIntegerMatrixBuf method sendItems.
// Hidden operations.
/**
* {@inheritDoc}
*
* Send as many items as possible from this buffer to the given byte buffer.
* <P>
* The <TT>sendItems()</TT> method must not block the calling thread; if it
* does, all message I/O in MP will be blocked.
*/
protected int sendItems(int i, ByteBuffer buffer) {
ShortBuffer shortbuffer = buffer.asShortBuffer();
int n = 0;
int r = i2r(i);
int row = r * myRowStride + myLowerRow;
int c = i2c(i);
int col = c * myColStride + myLowerCol;
int ncols = Math.min(myColCount - c, shortbuffer.remaining());
while (r < myRowCount && ncols > 0) {
int[] myMatrix_row = myMatrix[row];
while (c < ncols) {
shortbuffer.put((short) myMatrix_row[col]);
++c;
col += myColStride;
}
n += ncols;
++r;
row += myRowStride;
c = 0;
col = myLowerCol;
ncols = Math.min(myColCount, shortbuffer.remaining());
}
buffer.position(buffer.position() + 2 * n);
return n;
}
Aggregations