use of org.bytedeco.javacpp.indexer.UByteIndexer in project javacv by bytedeco.
the class FrameGrabberTest method testFFmpegFrameGrabberSeeking.
@Test
public void testFFmpegFrameGrabberSeeking() throws IOException {
System.out.println("FFmpegFrameGrabberSeeking");
for (int seektestnum = 0; seektestnum < 3; seektestnum++) {
String fileName = seektestnum == 0 ? "testAV.mp4" : seektestnum == 1 ? "testV.mp4" : "testA.mp4";
File tempFile = new File(Loader.getTempDir(), fileName);
tempFile.deleteOnExit();
FFmpegFrameRecorder recorder = seektestnum == 0 ? new FFmpegFrameRecorder(tempFile, 640, 480, 2) : seektestnum == 1 ? new FFmpegFrameRecorder(tempFile, 640, 480, 0) : new FFmpegFrameRecorder(tempFile, 0, 0, 2);
recorder.setFormat("mp4");
recorder.setFrameRate(30);
recorder.setPixelFormat(AV_PIX_FMT_YUV420P);
recorder.setVideoCodec(AV_CODEC_ID_H264);
recorder.setVideoQuality(10);
recorder.setSampleRate(48000);
recorder.setSampleFormat(AV_SAMPLE_FMT_FLTP);
recorder.setAudioCodec(AV_CODEC_ID_AAC);
recorder.setAudioQuality(0);
recorder.start();
if (seektestnum != 2) {
for (int n = 0; n < 10000; n++) {
Frame frame = new Frame(640, 480, Frame.DEPTH_UBYTE, 3);
UByteIndexer frameIdx = frame.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
frameIdx.put(i, j, k, n + i + j + k);
}
}
}
recorder.record(frame);
if (n == 5000 && seektestnum != 1) {
Frame audioFrame = new Frame();
ShortBuffer audioBuffer = ShortBuffer.allocate(48000 * 2 * 10000 / 30);
audioFrame.sampleRate = 48000;
audioFrame.audioChannels = 2;
audioFrame.samples = new ShortBuffer[] { audioBuffer };
for (int i = 0; i < audioBuffer.capacity(); i++) {
audioBuffer.put(i, (short) i);
}
recorder.record(audioFrame);
}
}
} else {
Frame audioFrame = new Frame();
ShortBuffer audioBuffer = ShortBuffer.allocate(48000 * 2 * 10000 / 30);
audioFrame.sampleRate = 48000;
audioFrame.audioChannels = 2;
audioFrame.samples = new ShortBuffer[] { audioBuffer };
for (int i = 0; i < audioBuffer.capacity(); i++) {
audioBuffer.put(i, (short) i);
}
recorder.record(audioFrame);
}
recorder.stop();
recorder.release();
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(tempFile);
grabber.start();
int length = (int) (grabber.getLengthInTime() - 1000000L);
System.out.println();
System.out.println("Seek in file containing " + (seektestnum == 0 ? "video and audio" : seektestnum == 1 ? "video only" : "audio only"));
System.out.println("============================================");
System.out.println("Testing file " + tempFile.getName());
System.out.println("Length = " + grabber.getLengthInTime());
System.out.println("Framerate = " + grabber.getFrameRate());
System.out.println();
System.out.println("has video stream = " + (grabber.hasVideo() ? "YES" : "NO") + ", has audio stream = " + (grabber.hasAudio() ? "YES" : "NO"));
long tolerance = 1000000L + (grabber.getFrameRate() > 0.0 ? (long) (5000000 / grabber.getFrameRate()) : 500000L);
Random random = new Random();
for (int frametypenum = 0; frametypenum < 3; frametypenum++) {
long mindelta = Long.MAX_VALUE;
long maxdelta = Long.MIN_VALUE;
System.out.println();
System.out.println("Seek by " + (frametypenum == 0 ? "any" : frametypenum == 1 ? "video" : "audio") + " frames");
System.out.println("--------------------");
for (int i = 0; i < 200; i++) {
long timestamp = random.nextInt(length);
switch(frametypenum) {
case 0:
grabber.setTimestamp(timestamp);
break;
case 1:
grabber.setVideoTimestamp(timestamp);
break;
case 2:
grabber.setAudioTimestamp(timestamp);
break;
}
Frame frame = grabber.grab();
long timestamp2 = grabber.getTimestamp();
long delta = timestamp2 - timestamp;
if (delta > maxdelta)
maxdelta = delta;
if (delta < mindelta)
mindelta = delta;
assertTrue(frame.image != null ^ frame.samples != null);
System.out.println(timestamp2 + " - " + timestamp + " = " + delta + " type: " + frame.getTypes());
assertTrue(Math.abs(delta) < tolerance);
if (seektestnum == 0) {
boolean wasVideo = frame.image != null;
boolean wasAudio = frame.samples != null;
Frame frame2 = grabber.grab();
while ((wasVideo && frame2.image != null) || (wasAudio && frame2.samples != null)) {
frame2 = grabber.grab();
}
assertTrue(wasVideo ^ frame2.image != null);
assertTrue(wasAudio ^ frame2.samples != null);
long timestamp3 = grabber.getTimestamp();
System.out.println(timestamp3 + " - " + timestamp + " = " + (timestamp3 - timestamp));
assertTrue(timestamp3 >= timestamp - tolerance && timestamp3 < timestamp + tolerance);
}
}
System.out.println();
System.out.println("------------------------------------");
System.out.println("delta from " + mindelta + " to " + maxdelta);
System.out.println();
}
if (seektestnum == 2) {
long count1 = 0;
long duration = grabber.getLengthInTime();
System.out.println();
System.out.println("======== Check seeking in audio ========");
System.out.println("FrameRate = " + grabber.getFrameRate() + " AudioFrameRate = " + grabber.getAudioFrameRate() + ", duration = " + duration + " audio frames = " + grabber.getLengthInAudioFrames());
double deltaTimeStamp = 0.0;
if (grabber.hasAudio() && grabber.getAudioFrameRate() > 0) {
deltaTimeStamp = 1000000.0 / grabber.getAudioFrameRate();
}
System.out.println("AudioFrameDuration = " + deltaTimeStamp);
System.out.println();
System.out.println("======== Check setAudioFrameNumber ========");
count1 = 0;
while (count1++ < 1000) {
int audioFrameToSeek = random.nextInt(grabber.getLengthInAudioFrames() - 100);
grabber.setAudioFrameNumber(audioFrameToSeek);
Frame setFrame = grabber.grabSamples();
if (setFrame == null) {
System.out.println("null frame after seek to audio frame");
} else {
long audioTs = grabber.getTimestamp();
System.out.println("audioFrame # " + audioFrameToSeek + ", timeStamp = " + audioTs + ", difference = " + Math.round(audioTs * grabber.getAudioFrameRate() / 1000000 - audioFrameToSeek));
assertTrue(Math.abs(audioTs * grabber.getAudioFrameRate() / 1000000 - audioFrameToSeek) < 10);
}
}
}
grabber.stop();
System.out.println();
System.out.println("======= seek in " + fileName + " is finished ===========");
}
}
use of org.bytedeco.javacpp.indexer.UByteIndexer in project javacv by bytedeco.
the class FrameConverterTest method testOpenCVFrameConverter.
@Test
public void testOpenCVFrameConverter() {
System.out.println("OpenCVFrameConverter");
for (int depth = 8; depth <= 64; depth *= 2) {
assertEquals(depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getIplImageDepth(depth)));
assertEquals(depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getMatDepth(depth)));
if (depth < 64) {
assertEquals(-depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getIplImageDepth(-depth)));
assertEquals(-depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getMatDepth(-depth)));
}
}
Frame frame = new Frame(640 + 1, 480, Frame.DEPTH_UBYTE, 3);
OpenCVFrameConverter.ToIplImage converter1 = new OpenCVFrameConverter.ToIplImage();
OpenCVFrameConverter.ToMat converter2 = new OpenCVFrameConverter.ToMat();
UByteIndexer frameIdx = frame.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
frameIdx.put(i, j, k, i + j + k);
}
}
}
IplImage image = converter1.convert(frame);
Mat mat = converter2.convert(frame);
converter1.frame = null;
converter2.frame = null;
Frame frame1 = converter1.convert(image);
Frame frame2 = converter2.convert(mat);
assertEquals(frame2.opaque, mat);
Mat mat2 = new Mat(mat.rows(), mat.cols(), mat.type(), mat.data(), mat.step());
assertNotEquals(mat, mat2);
frame2 = converter2.convert(mat2);
assertEquals(frame2.opaque, mat2);
UByteIndexer frame1Idx = frame1.createIndexer();
UByteIndexer frame2Idx = frame2.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
int b = frameIdx.get(i, j, k);
assertEquals(b, frame1Idx.get(i, j, k));
assertEquals(b, frame2Idx.get(i, j, k));
}
}
}
try {
frame1Idx.get(frameIdx.rows() + 1, frameIdx.cols() + 1);
fail("IndexOutOfBoundsException should have been thrown.");
} catch (IndexOutOfBoundsException e) {
}
try {
frame2Idx.get(frameIdx.rows() + 1, frameIdx.cols() + 1);
fail("IndexOutOfBoundsException should have been thrown.");
} catch (IndexOutOfBoundsException e) {
}
frameIdx.release();
frame1Idx.release();
frame2Idx.release();
}
use of org.bytedeco.javacpp.indexer.UByteIndexer in project javacv by bytedeco.
the class FrameGrabberTest method testFFmpegFrameGrabber.
@Test
public void testFFmpegFrameGrabber() {
System.out.println("FFmpegFrameGrabber");
File tempFile = new File(Loader.getTempDir(), "test.mkv");
try {
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(new FileOutputStream(tempFile), 640, 480, 2);
// mp4 doesn't support streaming
recorder.setFormat("matroska");
recorder.setPixelFormat(AV_PIX_FMT_BGR24);
recorder.setVideoCodecName("libx264rgb");
// lossless
recorder.setVideoQuality(0);
recorder.setSampleFormat(AV_SAMPLE_FMT_S16);
recorder.setSampleRate(44100);
recorder.setAudioCodecName("pcm_s16le");
recorder.start();
Frame[] frames = new Frame[1000];
for (int n = 0; n < frames.length; n++) {
Frame frame = new Frame(640, 480, Frame.DEPTH_UBYTE, 3);
UByteIndexer frameIdx = frame.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
frameIdx.put(i, j, k, n + i + j + k);
}
}
}
recorder.record(frame);
frames[n] = frame;
}
Frame audioFrame = new Frame();
ShortBuffer audioBuffer = ShortBuffer.allocate(64 * 1024);
audioFrame.sampleRate = 44100;
audioFrame.audioChannels = 2;
audioFrame.samples = new ShortBuffer[] { audioBuffer };
for (int i = 0; i < audioBuffer.capacity(); i++) {
audioBuffer.put(i, (short) i);
}
recorder.record(audioFrame);
recorder.stop();
recorder.release();
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new FileInputStream(tempFile));
grabber.setSampleMode(FrameGrabber.SampleMode.FLOAT);
grabber.start();
int n = 0, m = 0;
Frame frame2;
while ((frame2 = grabber.grab()) != null) {
if (frame2.image != null) {
Frame frame = frames[n++];
assertEquals(frame.imageWidth, frame2.imageWidth);
assertEquals(frame.imageHeight, frame2.imageHeight);
assertEquals(frame.imageChannels, frame2.imageChannels);
UByteIndexer frameIdx = frame.createIndexer();
UByteIndexer frame2Idx = frame2.createIndexer();
for (int i = 0; i < frameIdx.rows(); i++) {
for (int j = 0; j < frameIdx.cols(); j++) {
for (int k = 0; k < frameIdx.channels(); k++) {
int b = frameIdx.get(i, j, k);
assertEquals(b, frame2Idx.get(i, j, k));
}
}
}
} else {
FloatBuffer audioBuffer2 = (FloatBuffer) frame2.samples[0];
while (audioBuffer2.hasRemaining()) {
assertEquals((float) audioBuffer.get(m++) / (Short.MAX_VALUE + 1), audioBuffer2.get(), 0);
}
}
}
assertEquals(frames.length, n);
assertEquals(null, grabber.grab());
grabber.restart();
grabber.stop();
grabber.release();
} catch (Exception e) {
fail("Exception should not have been thrown: " + e);
} finally {
tempFile.delete();
}
}
Aggregations