use of com.googlecode.javacpp.BytePointer in project VideoRecorder by qdrzwd.
the class NewFFmpegFrameRecorder method record.
public boolean record(IplImage image, int pixelFormat) throws Exception {
if (videoSt == null) {
throw new Exception("No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
}
if (image == null) {
/* no more frame to compress. The codec has a latency of a few
frames if using B frames, so we get the last frames by
passing the same picture again */
} else {
//image = rotate(image,90);
int width = image.width();
int step = image.widthStep();
if (pixelFormat == AV_PIX_FMT_NONE) {
int depth = image.depth();
int channels = image.nChannels();
if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 3) {
pixelFormat = AV_PIX_FMT_BGR24;
} else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 1) {
pixelFormat = AV_PIX_FMT_GRAY8;
} else if ((depth == IPL_DEPTH_16U || depth == IPL_DEPTH_16S) && channels == 1) {
pixelFormat = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN) ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
} else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 4) {
pixelFormat = AV_PIX_FMT_RGBA;
} else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 2) {
// Android's camera capture format
pixelFormat = AV_PIX_FMT_NV21;
step = width;
} else {
throw new Exception("Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
}
}
BytePointer data = image.imageData();
int height = image.height();
if (videoC.pix_fmt() != pixelFormat || videoC.width() != width || videoC.height() != height) {
/* convert to the codec pixel format if needed */
imgConvertCtx = sws_getCachedContext(imgConvertCtx, videoC.width(), videoC.height(), pixelFormat, videoC.width(), videoC.height(), videoC.pix_fmt(), SWS_BILINEAR, null, null, (DoublePointer) null);
if (imgConvertCtx == null) {
throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
}
avpicture_fill(new AVPicture(tmpPicture), data, pixelFormat, width, height);
avpicture_fill(new AVPicture(picture), pictureBuf, videoC.pix_fmt(), videoC.width(), videoC.height());
tmpPicture.linesize(0, step);
sws_scale(imgConvertCtx, new PointerPointer(tmpPicture), tmpPicture.linesize(), 0, height, new PointerPointer(picture), picture.linesize());
} else {
avpicture_fill(new AVPicture(picture), data, pixelFormat, width, height);
picture.linesize(0, step);
}
}
int ret;
if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
if (image == null) {
return false;
}
/* raw video case. The API may change slightly in the future for that? */
av_init_packet(videoPkt);
videoPkt.flags(videoPkt.flags() | AV_PKT_FLAG_KEY);
videoPkt.stream_index(videoSt.index());
videoPkt.data(new BytePointer(picture));
videoPkt.size(Loader.sizeof(AVPicture.class));
} else {
/* encode the image */
av_init_packet(videoPkt);
videoPkt.data(videoOutbuf);
videoPkt.size(videoOutbufSize);
picture.quality(videoC.global_quality());
if ((ret = avcodec_encode_video2(videoC, videoPkt, image == null ? null : picture, gotVideoPacket)) < 0) {
throw new Exception("avcodec_encode_video2() error " + ret + ": Could not encode video packet.");
}
// magic required by libx264
picture.pts(picture.pts() + 1);
/* if zero size, it means the image was buffered */
if (gotVideoPacket[0] != 0) {
if (videoPkt.pts() != AV_NOPTS_VALUE) {
videoPkt.pts(av_rescale_q(videoPkt.pts(), videoC.time_base(), videoSt.time_base()));
}
if (videoPkt.dts() != AV_NOPTS_VALUE) {
videoPkt.dts(av_rescale_q(videoPkt.dts(), videoC.time_base(), videoSt.time_base()));
}
videoPkt.stream_index(videoSt.index());
} else {
return false;
}
}
synchronized (oc) {
/* write the compressed frame in the media file */
if (interleaved && audioSt != null) {
if ((ret = av_interleaved_write_frame(oc, videoPkt)) < 0) {
throw new Exception("av_interleaved_write_frame() error " + ret + " while writing interleaved video frame.");
}
} else {
if ((ret = av_write_frame(oc, videoPkt)) < 0) {
throw new Exception("av_write_frame() error " + ret + " while writing video frame.");
}
}
}
return picture.key_frame() != 0;
}
Aggregations