use of org.bytedeco.javacv.FFmpegFrameRecorder in project bigbluebutton by bigbluebutton.
the class FfmpegScreenshare method setupLinuxRecorder.
private FFmpegFrameRecorder setupLinuxRecorder(String url, int width, int height, Map<String, String> codecOptions, Boolean useH264) {
FFmpegFrameRecorder linuxRecorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight());
Double frameRate = parseFrameRate(codecOptions.get(FRAMERATE_KEY));
linuxRecorder.setFrameRate(frameRate);
int keyFrameInterval = parseKeyFrameInterval(codecOptions.get(KEYFRAMEINTERVAL_KEY));
int gopSize = frameRate.intValue() * keyFrameInterval;
linuxRecorder.setGopSize(gopSize);
System.out.println("==== CODEC OPTIONS =====");
for (Map.Entry<String, String> entry : codecOptions.entrySet()) {
System.out.println("Key = " + entry.getKey() + ", Value = " + entry.getValue());
if (entry.getKey().equals(FRAMERATE_KEY) || entry.getKey().equals(KEYFRAMEINTERVAL_KEY)) {
// ignore as we have handled this above
} else {
linuxRecorder.setVideoOption(entry.getKey(), entry.getValue());
}
}
System.out.println("==== END CODEC OPTIONS =====");
linuxRecorder.setFormat("flv");
if (useH264) {
// H264
linuxRecorder.setVideoCodec(AV_CODEC_ID_H264);
linuxRecorder.setPixelFormat(AV_PIX_FMT_YUV420P);
linuxRecorder.setVideoOption("crf", "38");
linuxRecorder.setVideoOption("preset", "veryfast");
linuxRecorder.setVideoOption("tune", "zerolatency");
linuxRecorder.setVideoOption("intra-refresh", "1");
} else {
// Flash SVC2
linuxRecorder.setVideoCodec(AV_CODEC_ID_FLASHSV2);
linuxRecorder.setPixelFormat(AV_PIX_FMT_BGR24);
}
return linuxRecorder;
}
use of org.bytedeco.javacv.FFmpegFrameRecorder in project bigbluebutton by bigbluebutton.
the class FfmpegScreenshare method setupWindowsRecorder.
//==============================================
// RECORDERS
//==============================================
private FFmpegFrameRecorder setupWindowsRecorder(String url, int width, int height, Map<String, String> codecOptions, Boolean useH264) {
FFmpegFrameRecorder winRecorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight());
Double frameRate = parseFrameRate(codecOptions.get(FRAMERATE_KEY));
winRecorder.setFrameRate(frameRate);
int keyFrameInterval = parseKeyFrameInterval(codecOptions.get(KEYFRAMEINTERVAL_KEY));
int gopSize = frameRate.intValue() * keyFrameInterval;
winRecorder.setGopSize(gopSize);
System.out.println("==== CODEC OPTIONS =====");
for (Map.Entry<String, String> entry : codecOptions.entrySet()) {
System.out.println("Key = " + entry.getKey() + ", Value = " + entry.getValue());
if (entry.getKey().equals(FRAMERATE_KEY) || entry.getKey().equals(KEYFRAMEINTERVAL_KEY)) {
// ignore as we have handled this above
} else {
winRecorder.setVideoOption(entry.getKey(), entry.getValue());
}
}
System.out.println("==== END CODEC OPTIONS =====");
winRecorder.setFormat("flv");
if (useH264) {
System.out.println("Using H264 codec");
// H264
winRecorder.setVideoCodec(AV_CODEC_ID_H264);
winRecorder.setPixelFormat(AV_PIX_FMT_YUV420P);
winRecorder.setVideoOption("crf", "38");
winRecorder.setVideoOption("preset", "veryfast");
winRecorder.setVideoOption("tune", "zerolatency");
winRecorder.setVideoOption("intra-refresh", "1");
} else {
System.out.println("Using SVC2 codec");
// Flash SVC2
winRecorder.setVideoCodec(AV_CODEC_ID_FLASHSV2);
winRecorder.setPixelFormat(AV_PIX_FMT_BGR24);
}
return winRecorder;
}
use of org.bytedeco.javacv.FFmpegFrameRecorder in project bigbluebutton by bigbluebutton.
the class FfmpegScreenshare method setupMacOsXRecorder.
private FFmpegFrameRecorder setupMacOsXRecorder(String url, int width, int height, Map<String, String> codecOptions, Boolean useH264) {
FFmpegFrameRecorder macRecorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight());
Double frameRate = parseFrameRate(codecOptions.get(FRAMERATE_KEY));
macRecorder.setFrameRate(frameRate);
int keyFrameInterval = parseKeyFrameInterval(codecOptions.get(KEYFRAMEINTERVAL_KEY));
int gopSize = frameRate.intValue() * keyFrameInterval;
macRecorder.setGopSize(gopSize);
System.out.println("==== CODEC OPTIONS =====");
for (Map.Entry<String, String> entry : codecOptions.entrySet()) {
System.out.println("Key = " + entry.getKey() + ", Value = " + entry.getValue());
if (entry.getKey().equals(FRAMERATE_KEY) || entry.getKey().equals(KEYFRAMEINTERVAL_KEY)) {
// ignore as we have handled this above
} else {
macRecorder.setVideoOption(entry.getKey(), entry.getValue());
}
}
System.out.println("==== END CODEC OPTIONS =====");
macRecorder.setFormat("flv");
if (useH264) {
// H264
macRecorder.setVideoCodec(AV_CODEC_ID_H264);
macRecorder.setPixelFormat(AV_PIX_FMT_YUV420P);
macRecorder.setVideoOption("crf", "34");
macRecorder.setVideoOption("preset", "veryfast");
// Mac doesn't support the options below.
// macRecorder.setVideoOption("tune", "zerolatency");
// macRecorder.setVideoOption("intra-refresh", "1");
} else {
// Flash SVC2
macRecorder.setVideoCodec(AV_CODEC_ID_FLASHSV2);
macRecorder.setPixelFormat(AV_PIX_FMT_BGR24);
}
return macRecorder;
}
use of org.bytedeco.javacv.FFmpegFrameRecorder in project javacv by bytedeco.
the class RecordActivity method initRecorder.
// ---------------------------------------
// initialize ffmpeg_recorder
// ---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
// The filterString is any ffmpeg filter.
// Here is the link for a list: https://ffmpeg.org/ffmpeg-filters.html
filterString = "transpose=2,crop=w=200:h=200:x=0:y=0";
filter = new FFmpegFrameFilter(filterString, imageWidth, imageHeight);
// default format on android
filter.setPixelFormat(avutil.AV_PIX_FMT_NV21);
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
use of org.bytedeco.javacv.FFmpegFrameRecorder in project javacv by bytedeco.
the class WebcamAndMicrophoneCapture method main.
public static void main(String[] args) throws Exception, org.bytedeco.javacv.FrameGrabber.Exception {
final int captureWidth = 1280;
final int captureHeight = 720;
// The available FrameGrabber classes include OpenCVFrameGrabber (opencv_videoio),
// DC1394FrameGrabber, FlyCapture2FrameGrabber, OpenKinectFrameGrabber,
// PS3EyeFrameGrabber, VideoInputFrameGrabber, and FFmpegFrameGrabber.
final OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(WEBCAM_DEVICE_INDEX);
grabber.setImageWidth(captureWidth);
grabber.setImageHeight(captureHeight);
grabber.start();
// org.bytedeco.javacv.FFmpegFrameRecorder.FFmpegFrameRecorder(String
// filename, int imageWidth, int imageHeight, int audioChannels)
// For each param, we're passing in...
// filename = either a path to a local file we wish to create, or an
// RTMP url to an FMS / Wowza server
// imageWidth = width we specified for the grabber
// imageHeight = height we specified for the grabber
// audioChannels = 2, because we like stereo
final FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("rtmp://my-streaming-server/app_name_here/instance_name/stream_name", captureWidth, captureHeight, 2);
recorder.setInterleaved(true);
// decrease "startup" latency in FFMPEG (see:
// https://trac.ffmpeg.org/wiki/StreamingGuide)
recorder.setVideoOption("tune", "zerolatency");
// tradeoff between quality and encode speed
// possible values are ultrafast,superfast, veryfast, faster, fast,
// medium, slow, slower, veryslow
// ultrafast offers us the least amount of compression (lower encoder
// CPU) at the cost of a larger stream size
// at the other end, veryslow provides the best compression (high
// encoder CPU) while lowering the stream size
// (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
recorder.setVideoOption("preset", "ultrafast");
// Constant Rate Factor (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
recorder.setVideoOption("crf", "28");
// 2000 kb/s, reasonable "sane" area for 720
recorder.setVideoBitrate(2000000);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.setFormat("flv");
// FPS (frames per second)
recorder.setFrameRate(FRAME_RATE);
// Key frame interval, in our case every 2 seconds -> 30 (fps) * 2 = 60
// (gop length)
recorder.setGopSize(GOP_LENGTH_IN_FRAMES);
// We don't want variable bitrate audio
recorder.setAudioOption("crf", "0");
// Highest quality
recorder.setAudioQuality(0);
// 192 Kbps
recorder.setAudioBitrate(192000);
recorder.setSampleRate(44100);
recorder.setAudioChannels(2);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
// Jack 'n coke... do it...
recorder.start();
// Thread for audio capture, this could be in a nested private class if you prefer...
new Thread(new Runnable() {
@Override
public void run() {
// Pick a format...
// NOTE: It is better to enumerate the formats that the system supports,
// because getLine() can error out with any particular format...
// For us: 44.1 sample rate, 16 bits, stereo, signed, little endian
AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
// Get TargetDataLine with that format
Mixer.Info[] minfoSet = AudioSystem.getMixerInfo();
Mixer mixer = AudioSystem.getMixer(minfoSet[AUDIO_DEVICE_INDEX]);
DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
try {
// Open and start capturing audio
// It's possible to have more control over the chosen audio device with this line:
// TargetDataLine line = (TargetDataLine)mixer.getLine(dataLineInfo);
final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
line.open(audioFormat);
line.start();
final int sampleRate = (int) audioFormat.getSampleRate();
final int numChannels = audioFormat.getChannels();
// Let's initialize our audio buffer...
final int audioBufferSize = sampleRate * numChannels;
final byte[] audioBytes = new byte[audioBufferSize];
// Using a ScheduledThreadPoolExecutor vs a while loop with
// a Thread.sleep will allow
// us to get around some OS specific timing issues, and keep
// to a more precise
// clock as the fixed rate accounts for garbage collection
// time, etc
// a similar approach could be used for the webcam capture
// as well, if you wish
ScheduledThreadPoolExecutor exec = new ScheduledThreadPoolExecutor(1);
exec.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
// Read from the line... non-blocking
int nBytesRead = 0;
while (nBytesRead == 0) {
nBytesRead = line.read(audioBytes, 0, line.available());
}
// Since we specified 16 bits in the AudioFormat,
// we need to convert our read byte[] to short[]
// (see source from FFmpegFrameRecorder.recordSamples for AV_SAMPLE_FMT_S16)
// Let's initialize our short[] array
int nSamplesRead = nBytesRead / 2;
short[] samples = new short[nSamplesRead];
// Let's wrap our short[] into a ShortBuffer and
// pass it to recordSamples
ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
// recorder is instance of
// org.bytedeco.javacv.FFmpegFrameRecorder
recorder.recordSamples(sampleRate, numChannels, sBuff);
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
e.printStackTrace();
}
}
}, 0, (long) 1000 / FRAME_RATE, TimeUnit.MILLISECONDS);
} catch (LineUnavailableException e1) {
e1.printStackTrace();
}
}
}).start();
// A really nice hardware accelerated component for our preview...
final CanvasFrame cFrame = new CanvasFrame("Capture Preview", CanvasFrame.getDefaultGamma() / grabber.getGamma());
Frame capturedFrame = null;
// While we are capturing...
while ((capturedFrame = grabber.grab()) != null) {
if (cFrame.isVisible()) {
// Show our frame in the preview
cFrame.showImage(capturedFrame);
}
// as the delta from assignment to computed time could be too high
if (startTime == 0)
startTime = System.currentTimeMillis();
// Create timestamp for this frame
videoTS = 1000 * (System.currentTimeMillis() - startTime);
// Check for AV drift
if (videoTS > recorder.getTimestamp()) {
System.out.println("Lip-flap correction: " + videoTS + " : " + recorder.getTimestamp() + " -> " + (videoTS - recorder.getTimestamp()));
// We tell the recorder to write this frame at this timestamp
recorder.setTimestamp(videoTS);
}
// Send the frame to the org.bytedeco.javacv.FFmpegFrameRecorder
recorder.record(capturedFrame);
}
cFrame.dispose();
recorder.stop();
grabber.stop();
}
Aggregations