use of java.nio.ShortBuffer in project narchy by automenta.
the class SND_MIX method TransferPaintBuffer.
/*
* =================== S_TransferPaintBuffer
*
* ===================
*/
static void TransferPaintBuffer(int endtime) {
int out_idx;
int count;
int out_mask;
int p;
int step;
int val;
// unsigned long *pbuf;
ByteBuffer pbuf = ByteBuffer.wrap(dma.buffer);
pbuf.order(ByteOrder.LITTLE_ENDIAN);
if (SND_DMA.s_testsound.value != 0.0f) {
int i;
int count2;
// write a fixed sine wave
count2 = (endtime - paintedtime) * 2;
int v;
for (i = 0; i < count2; i += 2) {
v = (int) (Math.sin((paintedtime + i) * 0.1) * 20000 * 256);
paintbuffer.put(i, v);
paintbuffer.put(i + 1, v);
}
}
if (dma.samplebits == 16 && dma.channels == 2) {
// optimized case
TransferStereo16(pbuf, endtime);
} else {
// general case
p = 0;
count = (endtime - paintedtime) * dma.channels;
out_mask = dma.samples - 1;
out_idx = paintedtime * dma.channels & out_mask;
step = 3 - dma.channels;
if (dma.samplebits == 16) {
// short *out = (short *) pbuf;
ShortBuffer out = pbuf.asShortBuffer();
while (count-- > 0) {
val = paintbuffer.get(p) >> 8;
p += step;
if (val > 0x7fff)
val = 0x7fff;
else if (val < (short) 0x8000)
val = (short) 0x8000;
out.put(out_idx, (short) val);
// System.out.println(out_idx + " " + val);
out_idx = (out_idx + 1) & out_mask;
}
} else if (dma.samplebits == 8) {
// unsigned char *out = (unsigned char *) pbuf;
while (count-- > 0) {
val = paintbuffer.get(p) >> 8;
p += step;
if (val > 0x7fff)
val = 0x7fff;
else if (val < (short) 0x8000)
val = (short) 0x8000;
pbuf.put(out_idx, (byte) (val >>> 8));
out_idx = (out_idx + 1) & out_mask;
}
}
}
}
use of java.nio.ShortBuffer in project narchy by automenta.
the class Mesh method GL_DrawAliasFrameLerp.
/**
* GL_DrawAliasFrameLerp
* <p>
* interpolates between two frames and origins FIXME: batch lerp all
* vertexes
*/
void GL_DrawAliasFrameLerp(qfiles.dmdl_t paliashdr, float backlerp) {
qfiles.daliasframe_t frame = paliashdr.aliasFrames[currententity.frame];
int[] verts = frame.verts;
qfiles.daliasframe_t oldframe = paliashdr.aliasFrames[currententity.oldframe];
int[] ov = oldframe.verts;
float alpha;
if ((currententity.flags & Defines.RF_TRANSLUCENT) != 0)
alpha = currententity.alpha;
else
alpha = 1.0f;
// PMM - added double shell
if ((currententity.flags & (Defines.RF_SHELL_RED | Defines.RF_SHELL_GREEN | Defines.RF_SHELL_BLUE | Defines.RF_SHELL_DOUBLE | Defines.RF_SHELL_HALF_DAM)) != 0)
gl.glDisable(GL_TEXTURE_2D);
float frontlerp = 1.0f - backlerp;
// move should be the delta back to the previous frame * backlerp
Math3D.VectorSubtract(currententity.oldorigin, currententity.origin, frontv);
float[][] vectors = this.vectors;
Math3D.AngleVectors(currententity.angles, vectors[0], vectors[1], vectors[2]);
float[] move = this.move;
// forward
move[0] = Math3D.DotProduct(frontv, vectors[0]);
// left
move[1] = -Math3D.DotProduct(frontv, vectors[1]);
// up
move[2] = Math3D.DotProduct(frontv, vectors[2]);
Math3D.VectorAdd(move, oldframe.translate, move);
float[] frontv = this.frontv;
float[] backv = this.backv;
float[] translate = frame.translate;
float[] scale = frame.scale;
float[] oldScale = oldframe.scale;
for (int i = 0; i < 3; i++) {
move[i] = backlerp * move[i] + frontlerp * translate[i];
frontv[i] = frontlerp * scale[i];
backv[i] = backlerp * oldScale[i];
}
// ab hier wird optimiert
GL_LerpVerts(paliashdr.num_xyz, ov, verts, move, frontv, backv);
gl.glEnableClientState(GL_VERTEX_ARRAY);
gl.glVertexPointer(3, 0, vertexArrayBuf);
// PMM - added double damage shell
float[] shadelight = this.shadelight;
if ((currententity.flags & (Defines.RF_SHELL_RED | Defines.RF_SHELL_GREEN | Defines.RF_SHELL_BLUE | Defines.RF_SHELL_DOUBLE | Defines.RF_SHELL_HALF_DAM)) != 0) {
gl.glColor4f(shadelight[0], shadelight[1], shadelight[2], alpha);
} else {
gl.glEnableClientState(GL_COLOR_ARRAY);
gl.glColorPointer(4, 0, colorArrayBuf);
//
// pre light everything
//
FloatBuffer color = colorArrayBuf;
float l;
int size = paliashdr.num_xyz;
int j = 0;
float[] shadedots = this.shadedots;
for (int i = 0; i < size; i++) {
l = shadedots[(verts[i] >>> 24) & 0xFF];
color.put(j, l * shadelight[0]);
color.put(j + 1, l * shadelight[1]);
color.put(j + 2, l * shadelight[2]);
color.put(j + 3, alpha);
j += 4;
}
}
gl.glClientActiveTextureARB(TEXTURE0);
gl.glTexCoordPointer(2, 0, textureArrayBuf);
gl.glEnableClientState(GL_TEXTURE_COORD_ARRAY);
int pos = 0;
int[] counts = paliashdr.counts;
ShortBuffer srcIndexBuf = null;
FloatBuffer dstTextureCoords = textureArrayBuf;
FloatBuffer srcTextureCoords = paliashdr.textureCoordBuf;
int dstIndex = 0;
int srcIndex = 0;
int count;
int mode;
int size = counts.length;
for (int j = 0; j < size; j++) {
// get the vertex count and primitive type
count = counts[j];
if (count == 0)
// done
break;
srcIndexBuf = paliashdr.indexElements[j];
mode = GL_TRIANGLE_STRIP;
if (count < 0) {
mode = GL_TRIANGLE_FAN;
count = -count;
}
srcIndex = pos << 1;
srcIndex--;
for (int k = 0; k < count; k++) {
dstIndex = srcIndexBuf.get(k) << 1;
dstTextureCoords.put(dstIndex, srcTextureCoords.get(++srcIndex));
dstTextureCoords.put(++dstIndex, srcTextureCoords.get(++srcIndex));
}
gl.glDrawElements(mode, srcIndexBuf);
pos += count;
}
// PMM - added double damage shell
if ((currententity.flags & (Defines.RF_SHELL_RED | Defines.RF_SHELL_GREEN | Defines.RF_SHELL_BLUE | Defines.RF_SHELL_DOUBLE | Defines.RF_SHELL_HALF_DAM)) != 0)
gl.glEnable(GL_TEXTURE_2D);
gl.glDisableClientState(GL_COLOR_ARRAY);
}
use of java.nio.ShortBuffer in project javacv by bytedeco.
the class FrameGrabberTest method testFFmpegFrameGrabberLockingTest.
@Test
public void testFFmpegFrameGrabberLockingTest() {
final boolean[] failed = { false };
final int numberOfInstances = 20;
System.out.println("FFmpegFrameGrabberLocking");
Runnable[] runables = new Runnable[numberOfInstances];
Thread[] threads = new Thread[numberOfInstances];
final boolean[] finish = new boolean[numberOfInstances];
for (int instance = 0; instance < numberOfInstances; instance++) {
final int instance_final = instance;
Runnable r = new Runnable() {
public void run() {
File tempFile = new File(Loader.getTempDir(), "test" + instance_final + ".mkv");
try {
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(new FileOutputStream(tempFile), 640, 480, 2);
// mp4 doesn't support streaming
recorder.setFormat("matroska");
recorder.setPixelFormat(AV_PIX_FMT_BGR24);
recorder.setVideoCodecName("libx264rgb");
// lossless
recorder.setVideoQuality(0);
recorder.setSampleFormat(AV_SAMPLE_FMT_S16);
recorder.setSampleRate(44100);
recorder.setAudioCodecName("pcm_s16le");
recorder.start();
Frame[] frames = new Frame[10];
for (int n = 0; n < frames.length; n++) {
Frame frame = new Frame(640, 480, Frame.DEPTH_UBYTE, 3);
UByteIndexer frameIdx = frame.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
frameIdx.put(i, j, k, n + i + j + k);
}
}
}
recorder.record(frame);
frames[n] = frame;
}
Frame audioFrame = new Frame();
ShortBuffer audioBuffer = ShortBuffer.allocate(64 * 1024);
audioFrame.sampleRate = 44100;
audioFrame.audioChannels = 2;
audioFrame.samples = new ShortBuffer[] { audioBuffer };
for (int i = 0; i < audioBuffer.capacity(); i++) {
audioBuffer.put(i, (short) i);
}
recorder.record(audioFrame);
recorder.stop();
recorder.release();
Thread.sleep(1000);
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new FileInputStream(tempFile));
grabber.setSampleMode(FrameGrabber.SampleMode.FLOAT);
grabber.start();
int n = 0, m = 0;
Frame frame2;
while ((frame2 = grabber.grab()) != null) {
if (frame2.image != null) {
Frame frame = frames[n++];
assertEquals(frame.imageWidth, frame2.imageWidth);
assertEquals(frame.imageHeight, frame2.imageHeight);
assertEquals(frame.imageChannels, frame2.imageChannels);
UByteIndexer frameIdx = frame.createIndexer();
UByteIndexer frame2Idx = frame2.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
int b = frameIdx.get(i, j, k);
assertEquals(b, frame2Idx.get(i, j, k));
}
}
}
} else {
FloatBuffer audioBuffer2 = (FloatBuffer) frame2.samples[0];
while (audioBuffer2.hasRemaining()) {
assertEquals((float) audioBuffer.get(m++) / (Short.MAX_VALUE + 1), audioBuffer2.get(), 0);
}
}
}
assertEquals(frames.length, n);
assertEquals(null, grabber.grab());
grabber.restart();
grabber.stop();
grabber.release();
} catch (Error | Exception e) {
failed[0] = true;
fail("Exception should not have been thrown: " + e);
} finally {
tempFile.delete();
finish[instance_final] = true;
}
}
};
runables[instance_final] = r;
}
for (int instance = 0; instance < numberOfInstances; instance++) {
threads[instance] = new Thread(runables[instance]);
threads[instance].setName("Testthread-" + instance);
}
for (int instance = 0; instance < numberOfInstances; instance++) {
threads[instance].start();
}
while (true) {
boolean finished = true;
for (int instance = 0; instance < numberOfInstances; instance++) {
if (!finish[instance]) {
finished = false;
break;
}
}
if (!finished) {
System.out.println("Still waiting...");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
break;
}
}
assertFalse(failed[0]);
}
use of java.nio.ShortBuffer in project javacv by bytedeco.
the class JavaFxPlayVideoAndAudio method start.
@Override
public void start(Stage primaryStage) throws Exception {
StackPane root = new StackPane();
ImageView imageView = new ImageView();
root.getChildren().add(imageView);
imageView.fitWidthProperty().bind(primaryStage.widthProperty());
imageView.fitHeightProperty().bind(primaryStage.heightProperty());
Scene scene = new Scene(root, 640, 480);
primaryStage.setTitle("Video + audio");
primaryStage.setScene(scene);
primaryStage.show();
playThread = new Thread(() -> {
try {
String videoFilename = getParameters().getRaw().get(0);
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(videoFilename);
grabber.start();
primaryStage.setWidth(grabber.getImageWidth());
primaryStage.setHeight(grabber.getImageHeight());
AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
SourceDataLine soundLine = (SourceDataLine) AudioSystem.getLine(info);
soundLine.open(audioFormat);
soundLine.start();
Java2DFrameConverter converter = new Java2DFrameConverter();
ExecutorService executor = Executors.newSingleThreadExecutor();
while (!Thread.interrupted()) {
Frame frame = grabber.grab();
if (frame == null) {
break;
}
if (frame.image != null) {
Image image = SwingFXUtils.toFXImage(converter.convert(frame), null);
Platform.runLater(() -> {
imageView.setImage(image);
});
} else if (frame.samples != null) {
ShortBuffer channelSamplesShortBuffer = (ShortBuffer) frame.samples[0];
channelSamplesShortBuffer.rewind();
ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesShortBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesShortBuffer.capacity(); i++) {
short val = channelSamplesShortBuffer.get(i);
outBuffer.putShort(val);
}
/**
* We need this because soundLine.write ignores
* interruptions during writing.
*/
try {
executor.submit(() -> {
soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
outBuffer.clear();
}).get();
} catch (InterruptedException interruptedException) {
Thread.currentThread().interrupt();
}
}
}
executor.shutdownNow();
executor.awaitTermination(10, TimeUnit.SECONDS);
soundLine.stop();
grabber.stop();
grabber.release();
Platform.exit();
} catch (Exception exception) {
LOG.log(Level.SEVERE, null, exception);
System.exit(1);
}
});
playThread.start();
}
use of java.nio.ShortBuffer in project javacv by bytedeco.
the class DC1394FrameGrabber method grab.
public Frame grab() throws Exception {
enqueue();
if (linux) {
fds.events(POLLIN);
if (poll(fds, 1, timeout) == 0) {
throw new Exception("poll() Error: Timeout occured. (Has start() been called?)");
}
}
int i = 0;
int err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, raw_image[i]);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_capture_dequeue(WAIT) Error " + err + ": Could not capture a frame. (Has start() been called?)");
}
// try to poll for more images, to get the most recent one...
int numDequeued = 0;
while (!raw_image[i].isNull()) {
enqueue();
enqueue_image = raw_image[i];
i = (i + 1) % 2;
numDequeued++;
err = dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_POLL, raw_image[i]);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_capture_dequeue(POLL) Error " + err + ": Could not capture a frame.");
}
}
frame = raw_image[(i + 1) % 2];
int w = frame.size(0);
int h = frame.size(1);
int depth = frame.data_depth();
int iplDepth = 0;
switch(depth) {
case 8:
iplDepth = IPL_DEPTH_8U;
break;
case 16:
iplDepth = IPL_DEPTH_16U;
break;
default:
assert false;
}
int stride = frame.stride();
int size = frame.image_bytes();
int numChannels = stride / w * 8 / depth;
ByteOrder frameEndian = frame.little_endian() != 0 ? ByteOrder.LITTLE_ENDIAN : ByteOrder.BIG_ENDIAN;
boolean alreadySwapped = false;
int color_coding = frame.color_coding();
boolean colorbayer = color_coding == DC1394_COLOR_CODING_RAW8 || color_coding == DC1394_COLOR_CODING_RAW16;
boolean colorrgb = color_coding == DC1394_COLOR_CODING_RGB8 || color_coding == DC1394_COLOR_CODING_RGB16;
boolean coloryuv = color_coding == DC1394_COLOR_CODING_YUV411 || color_coding == DC1394_COLOR_CODING_YUV422 || color_coding == DC1394_COLOR_CODING_YUV444;
BytePointer imageData = frame.image();
if ((depth <= 8 || frameEndian.equals(ByteOrder.nativeOrder())) && !coloryuv && (imageMode == ImageMode.RAW || (imageMode == ImageMode.COLOR && numChannels == 3) || (imageMode == ImageMode.GRAY && numChannels == 1 && !colorbayer))) {
if (return_image == null) {
return_image = IplImage.createHeader(w, h, iplDepth, numChannels);
}
return_image.widthStep(stride);
return_image.imageSize(size);
return_image.imageData(imageData);
} else {
// in the padding, there's sometimes timeframe information and stuff
// that libdc1394 will copy for us, so we need to allocate it
int padding_bytes = frame.padding_bytes();
int padding1 = (int) Math.ceil((double) padding_bytes / (w * depth / 8));
int padding3 = (int) Math.ceil((double) padding_bytes / (w * 3 * depth / 8));
if (return_image == null) {
int c = imageMode == ImageMode.COLOR ? 3 : 1;
int padding = imageMode == ImageMode.COLOR ? padding3 : padding1;
return_image = IplImage.create(w, h + padding, iplDepth, c);
return_image.height(return_image.height() - padding);
}
if (temp_image == null) {
if (imageMode == ImageMode.COLOR && (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) {
temp_image = IplImage.create(w, h + padding1, iplDepth, numChannels);
temp_image.height(temp_image.height() - padding1);
} else if (imageMode == ImageMode.GRAY && (coloryuv || colorbayer || (colorrgb && depth > 8))) {
temp_image = IplImage.create(w, h + padding3, iplDepth, 3);
temp_image.height(temp_image.height() - padding3);
} else if (imageMode == ImageMode.GRAY && colorrgb) {
temp_image = IplImage.createHeader(w, h, iplDepth, 3);
} else if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
temp_image = IplImage.createHeader(w, h, iplDepth, 1);
} else {
temp_image = return_image;
}
}
conv_image.size(0, temp_image.width());
conv_image.size(1, temp_image.height());
if (depth > 8) {
conv_image.color_coding(imageMode == ImageMode.RAW ? DC1394_COLOR_CODING_RAW16 : temp_image.nChannels() == 1 ? DC1394_COLOR_CODING_MONO16 : DC1394_COLOR_CODING_RGB16);
conv_image.data_depth(16);
} else {
conv_image.color_coding(imageMode == ImageMode.RAW ? DC1394_COLOR_CODING_RAW8 : temp_image.nChannels() == 1 ? DC1394_COLOR_CODING_MONO8 : DC1394_COLOR_CODING_RGB8);
conv_image.data_depth(8);
}
conv_image.stride(temp_image.widthStep());
int temp_size = temp_image.imageSize();
conv_image.allocated_image_bytes(temp_size).total_bytes(temp_size).image_bytes(temp_size);
conv_image.image(temp_image.imageData());
if (colorbayer) {
// from raw Bayer... invert R and B to get BGR images
// (like OpenCV wants them) instead of RGB
int c = frame.color_filter();
if (c == DC1394_COLOR_FILTER_RGGB) {
frame.color_filter(DC1394_COLOR_FILTER_BGGR);
} else if (c == DC1394_COLOR_FILTER_GBRG) {
frame.color_filter(DC1394_COLOR_FILTER_GRBG);
} else if (c == DC1394_COLOR_FILTER_GRBG) {
frame.color_filter(DC1394_COLOR_FILTER_GBRG);
} else if (c == DC1394_COLOR_FILTER_BGGR) {
frame.color_filter(DC1394_COLOR_FILTER_RGGB);
} else {
assert false;
}
// other better methods than "simple" give garbage at 16 bits..
err = dc1394_debayer_frames(frame, conv_image, DC1394_BAYER_METHOD_SIMPLE);
frame.color_filter(c);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_debayer_frames() Error " + err + ": Could not debayer frame.");
}
} else if (depth > 8 && frame.data_depth() == conv_image.data_depth() && frame.color_coding() == conv_image.color_coding() && frame.stride() == conv_image.stride()) {
// we just need a copy to swap bytes..
ShortBuffer in = frame.getByteBuffer().order(frameEndian).asShortBuffer();
ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
alreadySwapped = true;
} else if ((imageMode == ImageMode.GRAY && colorrgb) || (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) {
temp_image.widthStep(stride);
temp_image.imageSize(size);
temp_image.imageData(imageData);
} else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) {
// from YUV, etc.
err = dc1394_convert_frames(frame, conv_image);
if (err != DC1394_SUCCESS) {
throw new Exception("dc1394_convert_frames() Error " + err + ": Could not convert frame.");
}
}
if (!alreadySwapped && depth > 8 && !frameEndian.equals(ByteOrder.nativeOrder())) {
// ack, the camera's endianness doesn't correspond to our machine ...
// swap bytes of 16-bit images
ByteBuffer bb = temp_image.getByteBuffer();
ShortBuffer in = bb.order(frameEndian).asShortBuffer();
ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
}
// should we copy the padding as well?
if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
cvCvtColor(temp_image, return_image, CV_GRAY2BGR);
} else if (imageMode == ImageMode.GRAY && (colorbayer || colorrgb || coloryuv)) {
cvCvtColor(temp_image, return_image, CV_BGR2GRAY);
}
}
switch(frame.color_filter()) {
case DC1394_COLOR_FILTER_RGGB:
sensorPattern = SENSOR_PATTERN_RGGB;
break;
case DC1394_COLOR_FILTER_GBRG:
sensorPattern = SENSOR_PATTERN_GBRG;
break;
case DC1394_COLOR_FILTER_GRBG:
sensorPattern = SENSOR_PATTERN_GRBG;
break;
case DC1394_COLOR_FILTER_BGGR:
sensorPattern = SENSOR_PATTERN_BGGR;
break;
default:
sensorPattern = -1L;
}
enqueue_image = frame;
timestamp = frame.timestamp();
frameNumber += numDequeued;
// System.out.println("frame age = " + (local_time[0] - timestamp));
return converter.convert(return_image);
}
Aggregations