Search in sources :

Example 1 with AVStream

use of org.bytedeco.ffmpeg.avformat.AVStream in project cineast by vitrivr.

the class TechnicalVideoMetadataExtractor method extract.

/**
 * Extracts the technical video metadata from the specified path and returns a List of {@link MediaObjectMetadataDescriptor} objects (one for each metadata entry).
 *
 * @param objectId ID of the multimedia object for which metadata will be generated.
 * @param path     Path to the file for which metadata should be extracted.
 * @return List of {@link MediaObjectMetadataDescriptor}s or an empty list, if extracting metadata fails.
 */
@Override
public List<MediaObjectMetadataDescriptor> extract(String objectId, Path path) {
    final ArrayList<MediaObjectMetadataDescriptor> metadata = new ArrayList<>();
    if (!Files.exists(path)) {
        LOGGER.warn("File does not exist, returning empty metadata");
        return metadata;
    }
    /* we assume that everythign which can be handled by the ffmpegvideodecoder can also be handled here. Without this safety-guard, extraction will crash with a core-dump */
    if (!FFMpegVideoDecoder.supportedFiles.contains(MimeTypeHelper.getContentType(path.toString()))) {
        LOGGER.warn("File is not a video, returning empty metadata");
        return metadata;
    }
    /* Initialize the AVFormatContext. */
    final AVFormatContext pFormatContext = avformat.avformat_alloc_context();
    /* */
    if (avformat.avformat_open_input(pFormatContext, path.toString(), null, null) != 0) {
        LOGGER.error("Error while accessing file {}. Failed to obtain technical video metadata.", path.toString());
        return metadata;
    }
    /* Retrieve stream information. */
    if (avformat.avformat_find_stream_info(pFormatContext, (PointerPointer<?>) null) < 0) {
        LOGGER.error("Error, Ccouldn't find stream information. Failed to obtain technical video metadata.");
        return metadata;
    }
    final AVCodec codec = avcodec.av_codec_iterate(new Pointer());
    final int videoStreamIdx = avformat.av_find_best_stream(pFormatContext, avutil.AVMEDIA_TYPE_VIDEO, -1, -1, codec, 0);
    final AVStream videoStream = pFormatContext.streams(videoStreamIdx);
    final AVRational timebase = videoStream.time_base();
    /* Allocate new codec-context for codec returned by av_find_best_stream(). */
    final AVCodecContext videoCodecContext = avcodec.avcodec_alloc_context3(codec);
    avcodec.avcodec_parameters_to_context(videoCodecContext, videoStream.codecpar());
    /* Open the code context. */
    if (avcodec.avcodec_open2(videoCodecContext, codec, (AVDictionary) null) < 0) {
        LOGGER.error("Error, Could not open video codec.  Failed to obtain technical video metadata.");
        return metadata;
    }
    /* Extract and add the video metadata to the list. */
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_FPS, ((float) videoStream.avg_frame_rate().num() / (float) videoStream.avg_frame_rate().den()), false));
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_DURATION, Math.floorDiv(videoStream.duration() * timebase.num() * 1000, timebase.den()), false));
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_WIDTH, videoCodecContext.width(), false));
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_HEIGHT, videoCodecContext.height(), false));
    /* Closes all the resources. */
    avcodec.avcodec_free_context(videoCodecContext);
    avformat.avformat_close_input(pFormatContext);
    /* Return list of results. */
    return metadata;
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) AVFormatContext(org.bytedeco.ffmpeg.avformat.AVFormatContext) ArrayList(java.util.ArrayList) AVCodec(org.bytedeco.ffmpeg.avcodec.AVCodec) MediaObjectMetadataDescriptor(org.vitrivr.cineast.core.data.entities.MediaObjectMetadataDescriptor) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) AVStream(org.bytedeco.ffmpeg.avformat.AVStream) AVDictionary(org.bytedeco.ffmpeg.avutil.AVDictionary) AVRational(org.bytedeco.ffmpeg.avutil.AVRational) AVCodecContext(org.bytedeco.ffmpeg.avcodec.AVCodecContext)

Example 2 with AVStream

use of org.bytedeco.ffmpeg.avformat.AVStream in project blog_demos by zq2599.

the class PushMp4 method grabAndPush.

/**
 * 读取指定的mp4文件,推送到SRS服务器
 * @param sourceFilePath 视频文件的绝对路径
 * @param PUSH_ADDRESS 推流地址
 * @throws Exception
 */
private static void grabAndPush(String sourceFilePath, String PUSH_ADDRESS) throws Exception {
    // ffmepg日志级别
    avutil.av_log_set_level(avutil.AV_LOG_INFO);
    FFmpegLogCallback.set();
    // 实例化帧抓取器对象,将文件路径传入
    FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(MP4_FILE_PATH);
    long startTime = System.currentTimeMillis();
    log.info("开始初始化帧抓取器");
    // 初始化帧抓取器,例如数据结构(时间戳、编码器上下文、帧对象等),
    // 如果入参等于true,还会调用avformat_find_stream_info方法获取流的信息,放入AVFormatContext类型的成员变量oc中
    grabber.start(true);
    log.info("帧抓取器初始化完成,耗时[{}]毫秒", System.currentTimeMillis() - startTime);
    // grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
    AVFormatContext avFormatContext = grabber.getFormatContext();
    // 文件内有几个媒体流(一般是视频流+音频流)
    int streamNum = avFormatContext.nb_streams();
    // 没有媒体流就不用继续了
    if (streamNum < 1) {
        log.error("文件内不存在媒体流");
        return;
    }
    // 取得视频的帧率
    int frameRate = (int) grabber.getVideoFrameRate();
    log.info("视频帧率[{}],视频时长[{}]秒,媒体流数量[{}]", frameRate, avFormatContext.duration() / 1000000, avFormatContext.nb_streams());
    // 遍历每一个流,检查其类型
    for (int i = 0; i < streamNum; i++) {
        AVStream avStream = avFormatContext.streams(i);
        AVCodecParameters avCodecParameters = avStream.codecpar();
        log.info("流的索引[{}],编码器类型[{}],编码器ID[{}]", i, avCodecParameters.codec_type(), avCodecParameters.codec_id());
    }
    // 视频宽度
    int frameWidth = grabber.getImageWidth();
    // 视频高度
    int frameHeight = grabber.getImageHeight();
    // 音频通道数量
    int audioChannels = grabber.getAudioChannels();
    log.info("视频宽度[{}],视频高度[{}],音频通道数[{}]", frameWidth, frameHeight, audioChannels);
    // 实例化FFmpegFrameRecorder,将SRS的推送地址传入
    FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(SRS_PUSH_ADDRESS, frameWidth, frameHeight, audioChannels);
    // 设置编码格式
    recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
    // 设置封装格式
    recorder.setFormat("flv");
    // 一秒内的帧数
    recorder.setFrameRate(frameRate);
    // 两个关键帧之间的帧数
    recorder.setGopSize(frameRate);
    // 设置音频通道数,与视频源的通道数相等
    recorder.setAudioChannels(grabber.getAudioChannels());
    startTime = System.currentTimeMillis();
    log.info("开始初始化帧抓取器");
    // 初始化帧录制器,例如数据结构(音频流、视频流指针,编码器),
    // 调用av_guess_format方法,确定视频输出时的封装方式,
    // 媒体上下文对象的内存分配,
    // 编码器的各项参数设置
    recorder.start();
    log.info("帧录制初始化完成,耗时[{}]毫秒", System.currentTimeMillis() - startTime);
    Frame frame;
    startTime = System.currentTimeMillis();
    log.info("开始推流");
    long videoTS = 0;
    int videoFrameNum = 0;
    int audioFrameNum = 0;
    int dataFrameNum = 0;
    // 假设一秒钟15帧,那么两帧间隔就是(1000/15)毫秒
    int interVal = 1000 / frameRate;
    // 发送完一帧后sleep的时间,不能完全等于(1000/frameRate),不然会卡顿,
    // 要更小一些,这里取八分之一
    interVal /= 8;
    // 持续从视频源取帧
    while (null != (frame = grabber.grab())) {
        videoTS = 1000 * (System.currentTimeMillis() - startTime);
        // 时间戳
        recorder.setTimestamp(videoTS);
        // 有图像,就把视频帧加一
        if (null != frame.image) {
            videoFrameNum++;
        }
        // 有声音,就把音频帧加一
        if (null != frame.samples) {
            audioFrameNum++;
        }
        // 有数据,就把数据帧加一
        if (null != frame.data) {
            dataFrameNum++;
        }
        // 取出的每一帧,都推送到SRS
        recorder.record(frame);
        // 停顿一下再推送
        Thread.sleep(interVal);
    }
    log.info("推送完成,视频帧[{}],音频帧[{}],数据帧[{}],耗时[{}]秒", videoFrameNum, audioFrameNum, dataFrameNum, (System.currentTimeMillis() - startTime) / 1000);
    // 关闭帧录制器
    recorder.close();
    // 关闭帧抓取器
    grabber.close();
}
Also used : FFmpegFrameGrabber(org.bytedeco.javacv.FFmpegFrameGrabber) Frame(org.bytedeco.javacv.Frame) AVCodecParameters(org.bytedeco.ffmpeg.avcodec.AVCodecParameters) AVFormatContext(org.bytedeco.ffmpeg.avformat.AVFormatContext) FFmpegFrameRecorder(org.bytedeco.javacv.FFmpegFrameRecorder) AVStream(org.bytedeco.ffmpeg.avformat.AVStream)

Aggregations

AVFormatContext (org.bytedeco.ffmpeg.avformat.AVFormatContext)2 AVStream (org.bytedeco.ffmpeg.avformat.AVStream)2 ArrayList (java.util.ArrayList)1 AVCodec (org.bytedeco.ffmpeg.avcodec.AVCodec)1 AVCodecContext (org.bytedeco.ffmpeg.avcodec.AVCodecContext)1 AVCodecParameters (org.bytedeco.ffmpeg.avcodec.AVCodecParameters)1 AVDictionary (org.bytedeco.ffmpeg.avutil.AVDictionary)1 AVRational (org.bytedeco.ffmpeg.avutil.AVRational)1 Pointer (org.bytedeco.javacpp.Pointer)1 PointerPointer (org.bytedeco.javacpp.PointerPointer)1 FFmpegFrameGrabber (org.bytedeco.javacv.FFmpegFrameGrabber)1 FFmpegFrameRecorder (org.bytedeco.javacv.FFmpegFrameRecorder)1 Frame (org.bytedeco.javacv.Frame)1 MediaObjectMetadataDescriptor (org.vitrivr.cineast.core.data.entities.MediaObjectMetadataDescriptor)1