use of org.bytedeco.javacv.FFmpegFrameGrabber in project instagram4j by brunocvcunha.
the class InstagramUploadVideoRequest method configureThumbnail.
/**
* Configures the thumbnails for the given uploadId
* @param uploadId The session id
* @return Result
* @throws Exception
* @throws IOException
* @throws ClientProtocolException
*/
protected StatusResult configureThumbnail(String uploadId) throws Exception, IOException, ClientProtocolException {
try (FFmpegFrameGrabber frameGrabber = new FFmpegFrameGrabber(videoFile)) {
frameGrabber.start();
Java2DFrameConverter converter = new Java2DFrameConverter();
int width = frameGrabber.getImageWidth();
int height = frameGrabber.getImageHeight();
long length = frameGrabber.getLengthInTime();
BufferedImage bufferedImage;
if (thumbnailFile == null) {
bufferedImage = MyImageUtils.deepCopy(converter.convert(frameGrabber.grabImage()));
thumbnailFile = File.createTempFile("insta", ".jpg");
log.info("Generated thumbnail: " + thumbnailFile.getAbsolutePath());
ImageIO.write(bufferedImage, "JPG", thumbnailFile);
} else {
bufferedImage = ImageIO.read(thumbnailFile);
}
holdOn();
StatusResult thumbnailResult = api.sendRequest(new InstagramUploadPhotoRequest(thumbnailFile, caption, uploadId));
log.info("Thumbnail result: " + thumbnailResult);
StatusResult configureResult = api.sendRequest(InstagramConfigureVideoRequest.builder().uploadId(uploadId).caption(caption).duration(length).width(width).height(height).build());
log.info("Video configure result: " + configureResult);
return configureResult;
}
}
use of org.bytedeco.javacv.FFmpegFrameGrabber in project javacv by bytedeco.
the class JavaFxPlayVideoAndAudio method start.
@Override
public void start(final Stage primaryStage) throws Exception {
final StackPane root = new StackPane();
final ImageView imageView = new ImageView();
root.getChildren().add(imageView);
imageView.fitWidthProperty().bind(primaryStage.widthProperty());
imageView.fitHeightProperty().bind(primaryStage.heightProperty());
final Scene scene = new Scene(root, 640, 480);
primaryStage.setTitle("Video + audio");
primaryStage.setScene(scene);
primaryStage.show();
playThread = new Thread(new Runnable() {
public void run() {
try {
final String videoFilename = getParameters().getRaw().get(0);
final FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(videoFilename);
grabber.start();
primaryStage.setWidth(grabber.getImageWidth());
primaryStage.setHeight(grabber.getImageHeight());
final PlaybackTimer playbackTimer;
final SourceDataLine soundLine;
if (grabber.getAudioChannels() > 0) {
final AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);
final DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
soundLine = (SourceDataLine) AudioSystem.getLine(info);
soundLine.open(audioFormat);
soundLine.start();
playbackTimer = new PlaybackTimer(soundLine);
} else {
soundLine = null;
playbackTimer = new PlaybackTimer();
}
final JavaFXFrameConverter converter = new JavaFXFrameConverter();
final ExecutorService audioExecutor = Executors.newSingleThreadExecutor();
final ExecutorService imageExecutor = Executors.newSingleThreadExecutor();
final long maxReadAheadBufferMicros = 1000 * 1000L;
long lastTimeStamp = -1L;
while (!Thread.interrupted()) {
final Frame frame = grabber.grab();
if (frame == null) {
break;
}
if (lastTimeStamp < 0) {
playbackTimer.start();
}
lastTimeStamp = frame.timestamp;
if (frame.image != null) {
final Frame imageFrame = frame.clone();
imageExecutor.submit(new Runnable() {
public void run() {
final Image image = converter.convert(imageFrame);
final long timeStampDeltaMicros = imageFrame.timestamp - playbackTimer.elapsedMicros();
imageFrame.close();
if (timeStampDeltaMicros > 0) {
final long delayMillis = timeStampDeltaMicros / 1000L;
try {
Thread.sleep(delayMillis);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
Platform.runLater(new Runnable() {
public void run() {
imageView.setImage(image);
}
});
}
});
} else if (frame.samples != null) {
if (soundLine == null) {
throw new IllegalStateException("Internal error: sound playback not initialized");
}
final ShortBuffer channelSamplesShortBuffer = (ShortBuffer) frame.samples[0];
channelSamplesShortBuffer.rewind();
final ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesShortBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesShortBuffer.capacity(); i++) {
short val = channelSamplesShortBuffer.get(i);
outBuffer.putShort(val);
}
audioExecutor.submit(new Runnable() {
public void run() {
soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
outBuffer.clear();
}
});
}
final long timeStampDeltaMicros = frame.timestamp - playbackTimer.elapsedMicros();
if (timeStampDeltaMicros > maxReadAheadBufferMicros) {
Thread.sleep((timeStampDeltaMicros - maxReadAheadBufferMicros) / 1000);
}
}
if (!Thread.interrupted()) {
long delay = (lastTimeStamp - playbackTimer.elapsedMicros()) / 1000 + Math.round(1 / grabber.getFrameRate() * 1000);
Thread.sleep(Math.max(0, delay));
}
grabber.stop();
grabber.release();
if (soundLine != null) {
soundLine.stop();
}
audioExecutor.shutdownNow();
audioExecutor.awaitTermination(10, TimeUnit.SECONDS);
imageExecutor.shutdownNow();
imageExecutor.awaitTermination(10, TimeUnit.SECONDS);
Platform.exit();
} catch (Exception exception) {
LOG.log(Level.SEVERE, null, exception);
System.exit(1);
}
}
});
playThread.start();
}
use of org.bytedeco.javacv.FFmpegFrameGrabber in project javacv by bytedeco.
the class FFmpegStreamingTimeout method rtspStreamingTest.
private static void rtspStreamingTest() {
try {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(SOURCE_RTSP);
/**
* "rw_timeout" - IS IGNORED when a network cable have been
* unplugged before a connection but the option takes effect after a
* connection was established.
*
* "timeout" - works fine.
*/
grabber.setOption(TimeoutOption.TIMEOUT.getKey(), String.valueOf(TIMEOUT * 1000000));
// In microseconds.
grabber.start();
Frame frame = null;
/**
* When network is disabled (before grabber was started) grabber
* throws exception: "org.bytedeco.javacv.FrameGrabber$Exception:
* avformat_open_input() error -138: Could not open input...".
*
* When connections is lost (after a few grabbed frames)
* grabber.grab() returns null without exception.
*/
while ((frame = grabber.grab()) != null) {
System.out.println("frame grabbed at " + grabber.getTimestamp());
}
System.out.println("loop end with frame: " + frame);
} catch (FrameGrabber.Exception ex) {
System.out.println("exception: " + ex);
}
System.out.println("end");
}
use of org.bytedeco.javacv.FFmpegFrameGrabber in project javacv by bytedeco.
the class FFmpegStreamingTimeout method testWithCallback.
private static void testWithCallback() {
try {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(SOURCE_RTSP);
/**
* grabber.getFormatContext() is null before grabber.start().
*
* But if network is disabled grabber.start() will never return.
*
* That's why interrupt_callback not suitable for "network disabled
* case".
*/
grabber.start();
final AtomicBoolean interruptFlag = new AtomicBoolean(false);
AVIOInterruptCB.Callback_Pointer cp = new AVIOInterruptCB.Callback_Pointer() {
@Override
public int call(Pointer pointer) {
// 0 - continue, 1 - exit
int interruptFlagInt = interruptFlag.get() ? 1 : 0;
System.out.println("callback, interrupt flag == " + interruptFlagInt);
return interruptFlagInt;
}
};
AVFormatContext oc = grabber.getFormatContext();
avformat_alloc_context();
AVIOInterruptCB cb = new AVIOInterruptCB();
cb.callback(cp);
oc.interrupt_callback(cb);
new Thread(new Runnable() {
public void run() {
try {
TimeUnit.SECONDS.sleep(TIMEOUT);
interruptFlag.set(true);
System.out.println("interrupt flag was changed");
} catch (InterruptedException ex) {
System.out.println("exception in interruption thread: " + ex);
}
}
}).start();
Frame frame = null;
/**
* On one of my RTSP cams grabber stops calling callback on
* connection lost. I think it's has something to do with message:
* "[swscaler @ 0000000029af49e0] deprecated pixel format used, make
* sure you did set range correctly".
*
* So there is at least one case when grabber stops calling
* callback.
*/
while ((frame = grabber.grab()) != null) {
System.out.println("frame grabbed at " + grabber.getTimestamp());
}
System.out.println("loop end with frame: " + frame);
} catch (FrameGrabber.Exception ex) {
System.out.println("exception: " + ex);
}
System.out.println("end");
}
use of org.bytedeco.javacv.FFmpegFrameGrabber in project bigbluebutton by bigbluebutton.
the class FfmpegScreenshare method setupWindowsGrabber.
//==============================================
// GRABBERS
//==============================================
// Need to construct our grabber depending on which
// platform the user is using.
// https://trac.ffmpeg.org/wiki/Capture/Desktop
//
private FFmpegFrameGrabber setupWindowsGrabber(int width, int height, int x, int y) {
System.out.println("Setting up grabber for windows.");
FFmpegFrameGrabber winGrabber = new FFmpegFrameGrabber("desktop");
winGrabber.setImageWidth(width);
winGrabber.setImageHeight(height);
if (ssi.fullScreen) {
winGrabber.setOption("offset_x", new Integer(0).toString());
winGrabber.setOption("offset_y", new Integer(0).toString());
} else {
winGrabber.setOption("offset_x", new Integer(x).toString());
winGrabber.setOption("offset_y", new Integer(y).toString());
}
winGrabber.setFormat("gdigrab");
return winGrabber;
}
Aggregations