use of org.bytedeco.javacv.Frame in project bigbluebutton by bigbluebutton.
the class FfmpegScreenshare method captureScreen.
private void captureScreen() {
long now = System.currentTimeMillis();
Frame frame;
try {
frame = grabber.grabImage();
if (frame != null) {
try {
long timestamp = now - startTime;
// Override timestamp from system screen grabber. Otherwise, we will have skewed recorded file.
// FfmpegFrameRecorder needs to propagate this timestamp into the avpacket sent to the server.
// ralam - Sept. 14, 2016
frame.timestamp = timestamp;
//System.out.println("frame timestamp=[" + frame.timestamp + "] ");
mainRecorder.record(frame);
} catch (Exception e) {
System.out.println("CaptureScreen Exception 1");
if (!ignoreDisconnect) {
listener.networkConnectionException(ExitCode.INTERNAL_ERROR, null);
}
}
}
} catch (Exception e1) {
System.out.println("Exception grabbing image");
listener.networkConnectionException(ExitCode.INTERNAL_ERROR, null);
}
long sleepFramerate = (long) (1000 / frameRate);
//System.out.println("timestamp=[" + timestamp + "]");
mainRecorder.setFrameNumber(frameNumber);
//System.out.println("[ENCODER] encoded image " + frameNumber + " in " + (System.currentTimeMillis() - now));
frameNumber++;
long execDuration = (System.currentTimeMillis() - now);
long sleepDuration = Math.max(sleepFramerate - execDuration, 0);
pause(sleepDuration);
}
use of org.bytedeco.javacv.Frame in project javacv by bytedeco.
the class FaceApplet method paint.
@Override
public void paint(Graphics g) {
if (grabbedImage != null) {
Frame frame = grabberConverter.convert(grabbedImage);
BufferedImage image = paintConverter.getBufferedImage(frame, 2.2 / grabber.getGamma());
Graphics2D g2 = image.createGraphics();
if (faces != null) {
g2.setColor(Color.RED);
g2.setStroke(new BasicStroke(2));
int total = faces.total();
for (int i = 0; i < total; i++) {
CvRect r = new CvRect(cvGetSeqElem(faces, i));
g2.drawRect(r.x() * 4, r.y() * 4, r.width() * 4, r.height() * 4);
}
faces = null;
}
g.drawImage(image, 0, 0, null);
}
if (exception != null) {
int y = 0, h = g.getFontMetrics().getHeight();
g.drawString(exception.toString(), 5, y += h);
for (StackTraceElement e : exception.getStackTrace()) {
g.drawString(" at " + e.toString(), 5, y += h);
}
}
}
use of org.bytedeco.javacv.Frame in project javacv by bytedeco.
the class JavaFxPlayVideoAndAudio method start.
@Override
public void start(final Stage primaryStage) throws Exception {
final StackPane root = new StackPane();
final ImageView imageView = new ImageView();
root.getChildren().add(imageView);
imageView.fitWidthProperty().bind(primaryStage.widthProperty());
imageView.fitHeightProperty().bind(primaryStage.heightProperty());
final Scene scene = new Scene(root, 640, 480);
primaryStage.setTitle("Video + audio");
primaryStage.setScene(scene);
primaryStage.show();
playThread = new Thread(new Runnable() {
public void run() {
try {
final String videoFilename = getParameters().getRaw().get(0);
final FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(videoFilename);
grabber.start();
primaryStage.setWidth(grabber.getImageWidth());
primaryStage.setHeight(grabber.getImageHeight());
final PlaybackTimer playbackTimer;
final SourceDataLine soundLine;
if (grabber.getAudioChannels() > 0) {
final AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);
final DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
soundLine = (SourceDataLine) AudioSystem.getLine(info);
soundLine.open(audioFormat);
soundLine.start();
playbackTimer = new PlaybackTimer(soundLine);
} else {
soundLine = null;
playbackTimer = new PlaybackTimer();
}
final JavaFXFrameConverter converter = new JavaFXFrameConverter();
final ExecutorService audioExecutor = Executors.newSingleThreadExecutor();
final ExecutorService imageExecutor = Executors.newSingleThreadExecutor();
final long maxReadAheadBufferMicros = 1000 * 1000L;
long lastTimeStamp = -1L;
while (!Thread.interrupted()) {
final Frame frame = grabber.grab();
if (frame == null) {
break;
}
if (lastTimeStamp < 0) {
playbackTimer.start();
}
lastTimeStamp = frame.timestamp;
if (frame.image != null) {
final Frame imageFrame = frame.clone();
imageExecutor.submit(new Runnable() {
public void run() {
final Image image = converter.convert(imageFrame);
final long timeStampDeltaMicros = imageFrame.timestamp - playbackTimer.elapsedMicros();
imageFrame.close();
if (timeStampDeltaMicros > 0) {
final long delayMillis = timeStampDeltaMicros / 1000L;
try {
Thread.sleep(delayMillis);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
Platform.runLater(new Runnable() {
public void run() {
imageView.setImage(image);
}
});
}
});
} else if (frame.samples != null) {
if (soundLine == null) {
throw new IllegalStateException("Internal error: sound playback not initialized");
}
final ShortBuffer channelSamplesShortBuffer = (ShortBuffer) frame.samples[0];
channelSamplesShortBuffer.rewind();
final ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesShortBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesShortBuffer.capacity(); i++) {
short val = channelSamplesShortBuffer.get(i);
outBuffer.putShort(val);
}
audioExecutor.submit(new Runnable() {
public void run() {
soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
outBuffer.clear();
}
});
}
final long timeStampDeltaMicros = frame.timestamp - playbackTimer.elapsedMicros();
if (timeStampDeltaMicros > maxReadAheadBufferMicros) {
Thread.sleep((timeStampDeltaMicros - maxReadAheadBufferMicros) / 1000);
}
}
if (!Thread.interrupted()) {
long delay = (lastTimeStamp - playbackTimer.elapsedMicros()) / 1000 + Math.round(1 / grabber.getFrameRate() * 1000);
Thread.sleep(Math.max(0, delay));
}
grabber.stop();
grabber.release();
if (soundLine != null) {
soundLine.stop();
}
audioExecutor.shutdownNow();
audioExecutor.awaitTermination(10, TimeUnit.SECONDS);
imageExecutor.shutdownNow();
imageExecutor.awaitTermination(10, TimeUnit.SECONDS);
Platform.exit();
} catch (Exception exception) {
LOG.log(Level.SEVERE, null, exception);
System.exit(1);
}
}
});
playThread.start();
}
use of org.bytedeco.javacv.Frame in project javacv by bytedeco.
the class RealSense2DepthMeasuring method main.
public static void main(String[] args) throws FrameGrabber.Exception {
final RealSense2FrameGrabber rs2 = new RealSense2FrameGrabber();
// list all cameras
for (RealSense2FrameGrabber.RealSense2DeviceInfo info : rs2.getDeviceInfos()) {
System.out.printf("Device: %s %s %s Locked: %b\n", info.getName(), info.getFirmware(), info.getSerialNumber(), info.isLocked());
}
// enable the color & depth stream of the realsense camera
rs2.enableColorStream(640, 480, 30);
rs2.enableDepthStream(640, 480, 30);
// here are more examples of streams:
/*
rs2.enableColorStream(640, 480, 30); // color stream
rs2.enableIRStream(640, 480, 90); // ir stream
rs2.enableStream(new RealSense2FrameGrabber.RealSenseStream(
RS2_STREAM_INFRARED,
2,
new Size(640, 480),
30,
RS2_FORMAT_Y8
)); // second ir stream
*/
// start realsense camera
rs2.start();
// start frame to view the stream
CanvasFrame canvasFrame = new CanvasFrame("RealSense");
canvasFrame.setCanvasSize(rs2.getImageWidth(), rs2.getImageHeight());
// add mouse listener to see the depth at the clicked point
canvasFrame.getCanvas().addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
try {
System.out.println("Depth: " + rs2.getDistance(e.getX(), e.getY()));
} catch (FrameGrabber.Exception ex) {
ex.printStackTrace();
}
}
});
// run canvas
while (canvasFrame.isVisible()) {
// trigger camera to capture images
rs2.trigger();
// display images -> grab will return the first stream added
// use rs2.grabDepth(), rs2.grabColor() and rs2.grabIR() for the other streams
Frame frame = rs2.grab();
if (frame == null) {
System.err.println("Frame is null!");
break;
}
// display frame
canvasFrame.showImage(frame);
}
// close realsense camera
rs2.stop();
rs2.release();
canvasFrame.dispose();
}
use of org.bytedeco.javacv.Frame in project javacv by bytedeco.
the class DeinterlacedVideoPlayer method start.
public void start() {
FrameFilter filter = null;
try {
startFrameGrabber();
Frame frame = null;
while ((frame = grabber.grab()) != null) {
if (filter == null) {
filter = new FFmpegFrameFilter(ffmpegString, frame.imageWidth, frame.imageHeight);
filter.setPixelFormat(PIXEL_FORMAT);
filter.start();
}
filter.push(frame);
frame = filter.pull();
// do something with the filtered frame
}
} catch (Exception | org.bytedeco.javacv.FrameFilter.Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
releaseGrabberAndFilter(this.grabber, filter);
}
}
Aggregations