Search in sources :

Example 1 with AVCodec

use of org.bytedeco.ffmpeg.avcodec.AVCodec in project cineast by vitrivr.

the class TechnicalVideoMetadataExtractor method extract.

/**
 * Extracts the technical video metadata from the specified path and returns a List of {@link MediaObjectMetadataDescriptor} objects (one for each metadata entry).
 *
 * @param objectId ID of the multimedia object for which metadata will be generated.
 * @param path     Path to the file for which metadata should be extracted.
 * @return List of {@link MediaObjectMetadataDescriptor}s or an empty list, if extracting metadata fails.
 */
@Override
public List<MediaObjectMetadataDescriptor> extract(String objectId, Path path) {
    final ArrayList<MediaObjectMetadataDescriptor> metadata = new ArrayList<>();
    if (!Files.exists(path)) {
        LOGGER.warn("File does not exist, returning empty metadata");
        return metadata;
    }
    /* we assume that everythign which can be handled by the ffmpegvideodecoder can also be handled here. Without this safety-guard, extraction will crash with a core-dump */
    if (!FFMpegVideoDecoder.supportedFiles.contains(MimeTypeHelper.getContentType(path.toString()))) {
        LOGGER.warn("File is not a video, returning empty metadata");
        return metadata;
    }
    /* Initialize the AVFormatContext. */
    final AVFormatContext pFormatContext = avformat.avformat_alloc_context();
    /* */
    if (avformat.avformat_open_input(pFormatContext, path.toString(), null, null) != 0) {
        LOGGER.error("Error while accessing file {}. Failed to obtain technical video metadata.", path.toString());
        return metadata;
    }
    /* Retrieve stream information. */
    if (avformat.avformat_find_stream_info(pFormatContext, (PointerPointer<?>) null) < 0) {
        LOGGER.error("Error, Ccouldn't find stream information. Failed to obtain technical video metadata.");
        return metadata;
    }
    final AVCodec codec = avcodec.av_codec_iterate(new Pointer());
    final int videoStreamIdx = avformat.av_find_best_stream(pFormatContext, avutil.AVMEDIA_TYPE_VIDEO, -1, -1, codec, 0);
    final AVStream videoStream = pFormatContext.streams(videoStreamIdx);
    final AVRational timebase = videoStream.time_base();
    /* Allocate new codec-context for codec returned by av_find_best_stream(). */
    final AVCodecContext videoCodecContext = avcodec.avcodec_alloc_context3(codec);
    avcodec.avcodec_parameters_to_context(videoCodecContext, videoStream.codecpar());
    /* Open the code context. */
    if (avcodec.avcodec_open2(videoCodecContext, codec, (AVDictionary) null) < 0) {
        LOGGER.error("Error, Could not open video codec.  Failed to obtain technical video metadata.");
        return metadata;
    }
    /* Extract and add the video metadata to the list. */
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_FPS, ((float) videoStream.avg_frame_rate().num() / (float) videoStream.avg_frame_rate().den()), false));
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_DURATION, Math.floorDiv(videoStream.duration() * timebase.num() * 1000, timebase.den()), false));
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_WIDTH, videoCodecContext.width(), false));
    metadata.add(new MediaObjectMetadataDescriptor(objectId, this.domain(), KEY_VIDEO_HEIGHT, videoCodecContext.height(), false));
    /* Closes all the resources. */
    avcodec.avcodec_free_context(videoCodecContext);
    avformat.avformat_close_input(pFormatContext);
    /* Return list of results. */
    return metadata;
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) AVFormatContext(org.bytedeco.ffmpeg.avformat.AVFormatContext) ArrayList(java.util.ArrayList) AVCodec(org.bytedeco.ffmpeg.avcodec.AVCodec) MediaObjectMetadataDescriptor(org.vitrivr.cineast.core.data.entities.MediaObjectMetadataDescriptor) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) AVStream(org.bytedeco.ffmpeg.avformat.AVStream) AVDictionary(org.bytedeco.ffmpeg.avutil.AVDictionary) AVRational(org.bytedeco.ffmpeg.avutil.AVRational) AVCodecContext(org.bytedeco.ffmpeg.avcodec.AVCodecContext)

Example 2 with AVCodec

use of org.bytedeco.ffmpeg.avcodec.AVCodec in project cineast by vitrivr.

the class FFMpegVideoDecoder method initAudio.

/**
 * Initializes the audio decoding part of FFMPEG.
 *
 * @param config The {@link DecoderConfig} used for configuring the {@link FFMpegVideoDecoder}.
 * @return True if a) audio decoder was initialized, b) number of channels is smaller than zero (no audio) or c) audio is unavailable or unsupported, false if initialization failed due to technical reasons.
 */
private boolean initAudio(DecoderConfig config) {
    /* Read decoder configuration. */
    int samplerate = config.namedAsInt(CONFIG_SAMPLERATE_PROPERTY, CONFIG_SAMPLERATE_DEFAULT);
    int channels = config.namedAsInt(CONFIG_CHANNELS_PROPERTY, CONFIG_CHANNELS_DEFAULT);
    long channellayout = avutil.av_get_default_channel_layout(channels);
    /* If number of channels is smaller or equal than zero; return true (no audio decoded). */
    if (channels <= 0) {
        LOGGER.info("Channel setting is smaller than zero. Continuing without audio!");
        this.audioComplete.set(true);
        return true;
    }
    /* Find the best frames stream. */
    final AVCodec codec = avcodec.av_codec_iterate(new Pointer());
    this.audioStream = avformat.av_find_best_stream(this.pFormatCtx, avutil.AVMEDIA_TYPE_AUDIO, -1, -1, codec, 0);
    if (this.audioStream < 0) {
        LOGGER.warn("Couldn't find a supported audio stream. Continuing without audio!");
        this.audioComplete.set(true);
        return true;
    }
    /* Allocate new codec-context. */
    this.pCodecCtxAudio = avcodec.avcodec_alloc_context3(codec);
    avcodec.avcodec_parameters_to_context(this.pCodecCtxAudio, this.pFormatCtx.streams(this.audioStream).codecpar());
    /* Open the code context. */
    if (avcodec.avcodec_open2(this.pCodecCtxAudio, codec, (AVDictionary) null) < 0) {
        LOGGER.error("Could not open audio codec. Continuing without audio!");
        this.audioComplete.set(true);
        return true;
    }
    /* Allocate the re-sample context. */
    this.swr_ctx = swresample.swr_alloc_set_opts(null, channellayout, TARGET_FORMAT, samplerate, this.pCodecCtxAudio.channel_layout(), this.pCodecCtxAudio.sample_fmt(), this.pCodecCtxAudio.sample_rate(), 0, null);
    if (swresample.swr_init(this.swr_ctx) < 0) {
        this.swr_ctx = null;
        LOGGER.warn("Could not open re-sample context - original format will be kept!");
    }
    /* Initialize decoded and resampled frame. */
    this.resampledFrame = avutil.av_frame_alloc();
    if (this.resampledFrame == null) {
        LOGGER.error("Could not allocate frame data structure for re-sampled data.");
        return false;
    }
    /* Initialize out-frame. */
    this.resampledFrame = avutil.av_frame_alloc();
    this.resampledFrame.channel_layout(channellayout);
    this.resampledFrame.sample_rate(samplerate);
    this.resampledFrame.channels(channels);
    this.resampledFrame.format(TARGET_FORMAT);
    /* Initialize the AudioDescriptor. */
    final AVRational timebase = this.pFormatCtx.streams(this.audioStream).time_base();
    final long duration = (1000L * timebase.num() * this.pFormatCtx.streams(this.audioStream).duration() / timebase.den());
    if (this.swr_ctx == null) {
        this.audioDescriptor = new AudioDescriptor(this.pCodecCtxAudio.sample_rate(), this.pCodecCtxAudio.channels(), duration);
    } else {
        this.audioDescriptor = new AudioDescriptor(this.resampledFrame.sample_rate(), this.resampledFrame.channels(), duration);
    }
    /* Completed initialization. */
    return true;
}
Also used : AVCodec(org.bytedeco.ffmpeg.avcodec.AVCodec) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) AudioDescriptor(org.vitrivr.cineast.core.data.frames.AudioDescriptor) AVDictionary(org.bytedeco.ffmpeg.avutil.AVDictionary) AVRational(org.bytedeco.ffmpeg.avutil.AVRational)

Example 3 with AVCodec

use of org.bytedeco.ffmpeg.avcodec.AVCodec in project cineast by vitrivr.

the class FFMpegAudioDecoder method init.

/**
 * Initializes the decoder with a file. This is a necessary step before content can be retrieved from the decoder by means of the getNext() method.
 *
 * @param path          Path to the file that should be decoded.
 * @param decoderConfig {@link DecoderConfig} used by this {@link Decoder}.
 * @param cacheConfig   The {@link CacheConfig} used by this {@link Decoder}
 * @return True if initialization was successful, false otherwise.
 */
@Override
public boolean init(Path path, DecoderConfig decoderConfig, CacheConfig cacheConfig) {
    if (!Files.exists(path)) {
        LOGGER.error("File does not exist {}", path.toString());
        return false;
    }
    /* Read decoder configuration. */
    int samplerate = decoderConfig.namedAsInt(CONFIG_SAMPLERATE_PROPERTY, CONFIG_SAMPLERATE_DEFAULT);
    int channels = decoderConfig.namedAsInt(CONFIG_CHANNELS_PROPERTY, CONFIG_CHANNELS_DEFAULT);
    long channellayout = avutil.av_get_default_channel_layout(channels);
    /* Initialize the AVFormatContext. */
    this.pFormatCtx = avformat.avformat_alloc_context();
    /* Open file (pure frames or video + frames). */
    if (avformat.avformat_open_input(this.pFormatCtx, path.toString(), null, null) != 0) {
        LOGGER.error("Error while accessing file {}.", path.toString());
        return false;
    }
    /* Retrieve stream information. */
    if (avformat.avformat_find_stream_info(this.pFormatCtx, (PointerPointer<?>) null) < 0) {
        LOGGER.error("Couldn't find stream information.");
        return false;
    }
    /* Find the best stream. */
    final AVCodec codec = avcodec.av_codec_iterate(new Pointer());
    this.audioStream = avformat.av_find_best_stream(this.pFormatCtx, avutil.AVMEDIA_TYPE_AUDIO, -1, -1, codec, 0);
    if (this.audioStream == -1) {
        LOGGER.error("Couldn't find a supported audio stream.");
        return false;
    }
    /* Allocate new codec-context. */
    this.pCodecCtx = avcodec.avcodec_alloc_context3(codec);
    avcodec.avcodec_parameters_to_context(this.pCodecCtx, this.pFormatCtx.streams(this.audioStream).codecpar());
    /* Initialize context with stream's codec settings. */
    this.pCodecCtx.sample_rate(this.pFormatCtx.streams(this.audioStream).codecpar().sample_rate());
    this.pCodecCtx.channels(this.pFormatCtx.streams(this.audioStream).codecpar().channels());
    this.pCodecCtx.channel_layout(this.pFormatCtx.streams(this.audioStream).codecpar().channel_layout());
    this.pCodecCtx.sample_fmt(this.pFormatCtx.streams(this.audioStream).codecpar().format());
    /* Open the code context. */
    if (avcodec.avcodec_open2(this.pCodecCtx, codec, (AVDictionary) null) < 0) {
        LOGGER.error("Could not open audio codec.");
        return false;
    }
    /* Allocate the re-sample context. */
    this.swr_ctx = swresample.swr_alloc_set_opts(null, channellayout, TARGET_FORMAT, samplerate, this.pCodecCtx.channel_layout(), this.pCodecCtx.sample_fmt(), this.pCodecCtx.sample_rate(), 0, null);
    if (swresample.swr_init(this.swr_ctx) < 0) {
        this.swr_ctx = null;
        LOGGER.warn("Could not open re-sample context - original format will be kept!");
    }
    /* Initialize the packet. */
    this.packet = avcodec.av_packet_alloc();
    if (this.packet == null) {
        LOGGER.error("Could not allocate packet data structure for decoded data.");
        return false;
    }
    /* Allocate frame that holds decoded frame information. */
    this.decodedFrame = avutil.av_frame_alloc();
    if (this.decodedFrame == null) {
        LOGGER.error("Could not allocate frame data structure for decoded data.");
        return false;
    }
    /* Initialize out-frame. */
    this.resampledFrame = avutil.av_frame_alloc();
    if (this.resampledFrame == null) {
        LOGGER.error("Could not allocate frame data structure for re-sampled data.");
        return false;
    }
    this.resampledFrame.channel_layout(channellayout);
    this.resampledFrame.sample_rate(samplerate);
    this.resampledFrame.channels(channels);
    this.resampledFrame.format(TARGET_FORMAT);
    /* Initialize the AudioDescriptor. */
    AVRational timebase = this.pFormatCtx.streams(this.audioStream).time_base();
    long duration = Math.floorDiv(1000L * timebase.num() * this.pFormatCtx.streams(this.audioStream).duration(), timebase.den());
    if (this.swr_ctx == null) {
        this.descriptor = new AudioDescriptor(this.pCodecCtx.sample_rate(), this.pCodecCtx.channels(), duration);
    } else {
        this.descriptor = new AudioDescriptor(this.resampledFrame.sample_rate(), this.resampledFrame.channels(), duration);
    }
    /* Completed initialization. */
    LOGGER.debug("{} was initialized successfully.", this.getClass().getName());
    return true;
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) AVCodec(org.bytedeco.ffmpeg.avcodec.AVCodec) IntPointer(org.bytedeco.javacpp.IntPointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) AudioDescriptor(org.vitrivr.cineast.core.data.frames.AudioDescriptor) AVDictionary(org.bytedeco.ffmpeg.avutil.AVDictionary) AVRational(org.bytedeco.ffmpeg.avutil.AVRational)

Example 4 with AVCodec

use of org.bytedeco.ffmpeg.avcodec.AVCodec in project cineast by vitrivr.

the class FFMpegVideoDecoder method initVideo.

/**
 * Initializes the video decoding part of FFMPEG.
 *
 * @param config The {@link DecoderConfig} used for configuring the {@link FFMpegVideoDecoder}.
 * @return True if vide decoder was initialized, false otherwise.
 */
private boolean initVideo(DecoderConfig config) {
    /* Read decoder config (VIDEO). */
    int maxWidth = config.namedAsInt(CONFIG_MAXWIDTH_PROPERTY, CONFIG_MAXWIDTH_DEFAULT);
    int maxHeight = config.namedAsInt(CONFIG_HEIGHT_PROPERTY, CONFIG_MAXHEIGHT_DEFAULT);
    /* Find the best video stream. */
    final AVCodec codec = avcodec.av_codec_iterate(new Pointer());
    this.videoStream = avformat.av_find_best_stream(this.pFormatCtx, avutil.AVMEDIA_TYPE_VIDEO, -1, -1, codec, 0);
    if (this.videoStream == -1) {
        LOGGER.error("Couldn't find a video stream.");
        return false;
    }
    /* Allocate new codec-context for codec returned by av_find_best_stream(). */
    this.pCodecCtxVideo = avcodec.avcodec_alloc_context3(codec);
    avcodec.avcodec_parameters_to_context(this.pCodecCtxVideo, this.pFormatCtx.streams(this.videoStream).codecpar());
    /* Open the code context. */
    if (avcodec.avcodec_open2(this.pCodecCtxVideo, codec, (AVDictionary) null) < 0) {
        LOGGER.error("Error, Could not open video codec.");
        return false;
    }
    /* Allocate an AVFrame structure that will hold the resized video. */
    this.pFrameRGB = avutil.av_frame_alloc();
    if (pFrameRGB == null) {
        LOGGER.error("Error. Could not allocate frame for resized video.");
        return false;
    }
    int originalWidth = pCodecCtxVideo.width();
    int originalHeight = pCodecCtxVideo.height();
    int width = originalWidth;
    int height = originalHeight;
    if (originalWidth > maxWidth || originalHeight > maxHeight) {
        float scaleDown = Math.min((float) maxWidth / (float) originalWidth, (float) maxHeight / (float) originalHeight);
        width = Math.round(originalWidth * scaleDown);
        height = Math.round(originalHeight * scaleDown);
        LOGGER.debug("scaling input video down by a factor of {} from {}x{} to {}x{}", scaleDown, originalWidth, originalHeight, width, height);
    }
    bytes = new byte[width * height * 3];
    pixels = new int[width * height];
    /* Initialize data-structures used for resized image. */
    int numBytes = avutil.av_image_get_buffer_size(avutil.AV_PIX_FMT_RGB24, pCodecCtxVideo.width(), pCodecCtxVideo.height(), 1);
    this.buffer = new BytePointer(avutil.av_malloc(numBytes));
    avutil.av_image_fill_arrays(this.pFrameRGB.data(), this.pFrameRGB.linesize(), this.buffer, avutil.AV_PIX_FMT_RGB24, width, height, 1);
    /* Initialize SWS Context. */
    this.sws_ctx = swscale.sws_getContext(this.pCodecCtxVideo.width(), this.pCodecCtxVideo.height(), this.pCodecCtxVideo.pix_fmt(), width, height, avutil.AV_PIX_FMT_RGB24, swscale.SWS_BILINEAR, null, null, (DoublePointer) null);
    /* Initialize VideoDescriptor. */
    AVRational timebase = this.pFormatCtx.streams(this.videoStream).time_base();
    long duration = (1000L * timebase.num() * this.pFormatCtx.streams(this.videoStream).duration() / timebase.den());
    AVRational framerate = this.pFormatCtx.streams(this.videoStream).avg_frame_rate();
    float fps = ((float) framerate.num()) / ((float) framerate.den());
    this.videoDescriptor = new VideoDescriptor(fps, duration, width, height);
    /* Return true (success). */
    return true;
}
Also used : AVCodec(org.bytedeco.ffmpeg.avcodec.AVCodec) BytePointer(org.bytedeco.javacpp.BytePointer) DoublePointer(org.bytedeco.javacpp.DoublePointer) VideoDescriptor(org.vitrivr.cineast.core.data.frames.VideoDescriptor) DoublePointer(org.bytedeco.javacpp.DoublePointer) IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) Pointer(org.bytedeco.javacpp.Pointer) PointerPointer(org.bytedeco.javacpp.PointerPointer) AVDictionary(org.bytedeco.ffmpeg.avutil.AVDictionary) AVRational(org.bytedeco.ffmpeg.avutil.AVRational)

Aggregations

AVCodec (org.bytedeco.ffmpeg.avcodec.AVCodec)4 AVDictionary (org.bytedeco.ffmpeg.avutil.AVDictionary)4 AVRational (org.bytedeco.ffmpeg.avutil.AVRational)4 Pointer (org.bytedeco.javacpp.Pointer)4 PointerPointer (org.bytedeco.javacpp.PointerPointer)4 IntPointer (org.bytedeco.javacpp.IntPointer)3 BytePointer (org.bytedeco.javacpp.BytePointer)2 DoublePointer (org.bytedeco.javacpp.DoublePointer)2 AudioDescriptor (org.vitrivr.cineast.core.data.frames.AudioDescriptor)2 ArrayList (java.util.ArrayList)1 AVCodecContext (org.bytedeco.ffmpeg.avcodec.AVCodecContext)1 AVFormatContext (org.bytedeco.ffmpeg.avformat.AVFormatContext)1 AVStream (org.bytedeco.ffmpeg.avformat.AVStream)1 MediaObjectMetadataDescriptor (org.vitrivr.cineast.core.data.entities.MediaObjectMetadataDescriptor)1 VideoDescriptor (org.vitrivr.cineast.core.data.frames.VideoDescriptor)1