Search in sources :

Example 61 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project MindsEye by SimiaCryptus.

the class Hdf5Archive method getString.

@Nonnull
private CharSequence getString(@Nonnull Attribute attribute, DataType dataType, @Nonnull byte[] buffer) {
    @Nonnull BytePointer pointer = new BytePointer(buffer);
    attribute.read(dataType, pointer);
    pointer.get(buffer);
    @Nonnull String str = new String(buffer);
    if (str.indexOf('\0') >= 0) {
        return str.substring(0, str.indexOf('\0'));
    } else {
        return str;
    }
}
Also used : Nonnull(javax.annotation.Nonnull) BytePointer(org.bytedeco.javacpp.BytePointer) Nonnull(javax.annotation.Nonnull)

Example 62 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FrameConverterTest method testOpenCVFrameConverter.

@Test
public void testOpenCVFrameConverter() {
    System.out.println("OpenCVFrameConverter");
    Loader.load(org.bytedeco.opencv.opencv_java.class);
    for (int depth = 8; depth <= 64; depth *= 2) {
        assertEquals(depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getIplImageDepth(depth)));
        assertEquals(depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getMatDepth(depth)));
        if (depth < 64) {
            assertEquals(-depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getIplImageDepth(-depth)));
            assertEquals(-depth, OpenCVFrameConverter.getFrameDepth(OpenCVFrameConverter.getMatDepth(-depth)));
        }
    }
    Frame frame = new Frame(640 + 1, 480, Frame.DEPTH_UBYTE, 3);
    OpenCVFrameConverter.ToIplImage converter1 = new OpenCVFrameConverter.ToIplImage();
    OpenCVFrameConverter.ToMat converter2 = new OpenCVFrameConverter.ToMat();
    OpenCVFrameConverter.ToOrgOpenCvCoreMat converter3 = new OpenCVFrameConverter.ToOrgOpenCvCoreMat();
    UByteIndexer frameIdx = frame.createIndexer();
    for (int i = 0; i < frameIdx.rows(); i++) {
        for (int j = 0; j < frameIdx.cols(); j++) {
            for (int k = 0; k < frameIdx.channels(); k++) {
                frameIdx.put(i, j, k, i + j + k);
            }
        }
    }
    IplImage image = converter1.convert(frame);
    Mat mat = converter2.convert(frame);
    final org.opencv.core.Mat cvmat = converter3.convert(frame);
    converter1.frame = null;
    converter2.frame = null;
    converter3.frame = null;
    Frame frame1 = converter1.convert(image);
    Frame frame2 = converter2.convert(mat);
    Frame frame3 = converter3.convert(cvmat);
    assertEquals(frame2.opaque, mat);
    assertEquals(frame3.opaque, cvmat);
    Mat mat2 = new Mat(mat.rows(), mat.cols(), mat.type(), mat.data(), mat.step());
    org.opencv.core.Mat cvmat2 = new org.opencv.core.Mat(cvmat.rows(), cvmat.cols(), cvmat.type(), new BytePointer() {

        {
            address = cvmat.dataAddr();
        }
    }.capacity(cvmat.rows() * cvmat.cols() * cvmat.elemSize()).asByteBuffer(), cvmat.step1() * cvmat.elemSize1());
    assertNotEquals(mat, mat2);
    assertNotEquals(cvmat, cvmat2);
    frame2 = converter2.convert(mat2);
    frame3 = converter3.convert(cvmat2);
    assertEquals(frame2.opaque, mat2);
    assertEquals(frame3.opaque, cvmat2);
    assertEquals(frame3.imageStride, cvmat2.step1() * cvmat2.elemSize1());
    UByteIndexer frame1Idx = frame1.createIndexer();
    UByteIndexer frame2Idx = frame2.createIndexer();
    UByteIndexer frame3Idx = frame3.createIndexer();
    for (int i = 0; i < frameIdx.rows(); i++) {
        for (int j = 0; j < frameIdx.cols(); j++) {
            for (int k = 0; k < frameIdx.channels(); k++) {
                int b = frameIdx.get(i, j, k);
                assertEquals(b, frame1Idx.get(i, j, k));
                assertEquals(b, frame2Idx.get(i, j, k));
                assertEquals(b, frame3Idx.get(i, j, k));
            }
        }
    }
    try {
        frame1Idx.get(frameIdx.rows() + 1, frameIdx.cols() + 1);
        fail("IndexOutOfBoundsException should have been thrown.");
    } catch (IndexOutOfBoundsException e) {
    }
    try {
        frame2Idx.get(frameIdx.rows() + 1, frameIdx.cols() + 1);
        fail("IndexOutOfBoundsException should have been thrown.");
    } catch (IndexOutOfBoundsException e) {
    }
    try {
        frame3Idx.get(frameIdx.rows() + 1, frameIdx.cols() + 1);
        fail("IndexOutOfBoundsException should have been thrown.");
    } catch (IndexOutOfBoundsException e) {
    }
    frameIdx.release();
    frame1Idx.release();
    frame2Idx.release();
    frame3Idx.release();
    converter1.close();
    converter2.close();
    converter3.close();
    frame.close();
}
Also used : BytePointer(org.bytedeco.javacpp.BytePointer) UByteIndexer(org.bytedeco.javacpp.indexer.UByteIndexer) org.bytedeco.opencv.global.opencv_core(org.bytedeco.opencv.global.opencv_core) org.bytedeco.opencv.opencv_core(org.bytedeco.opencv.opencv_core) Test(org.junit.Test)

Example 63 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameFilter method pullSamples.

public synchronized Frame pullSamples() throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        av_frame_unref(filt_frame);
        /* pull a filtered frame from the filtergraph */
        int ret = av_buffersink_get_frame(abuffersink_ctx, filt_frame);
        if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
            return null;
        } else if (ret < 0) {
            throw new Exception("av_buffersink_get_frame(): Error occurred: " + av_make_error_string(new BytePointer(256), 256, ret).getString());
        }
        int sample_format = filt_frame.format();
        int planes = av_sample_fmt_is_planar(sample_format) != 0 ? (int) filt_frame.channels() : 1;
        int data_size = av_samples_get_buffer_size((IntPointer) null, filt_frame.channels(), filt_frame.nb_samples(), filt_frame.format(), 1) / planes;
        if (samples_buf == null || samples_buf.length != planes) {
            samples_ptr = new BytePointer[planes];
            samples_buf = new Buffer[planes];
        }
        frame.audioChannels = filt_frame.channels();
        frame.sampleRate = filt_frame.sample_rate();
        frame.samples = samples_buf;
        frame.opaque = filt_frame;
        int sample_size = data_size / av_get_bytes_per_sample(sample_format);
        for (int i = 0; i < planes; i++) {
            BytePointer p = filt_frame.data(i);
            if (!p.equals(samples_ptr[i]) || samples_ptr[i].capacity() < data_size) {
                samples_ptr[i] = p.capacity(data_size);
                ByteBuffer b = p.asBuffer();
                switch(sample_format) {
                    case AV_SAMPLE_FMT_U8:
                    case AV_SAMPLE_FMT_U8P:
                        samples_buf[i] = b;
                        break;
                    case AV_SAMPLE_FMT_S16:
                    case AV_SAMPLE_FMT_S16P:
                        samples_buf[i] = b.asShortBuffer();
                        break;
                    case AV_SAMPLE_FMT_S32:
                    case AV_SAMPLE_FMT_S32P:
                        samples_buf[i] = b.asIntBuffer();
                        break;
                    case AV_SAMPLE_FMT_FLT:
                    case AV_SAMPLE_FMT_FLTP:
                        samples_buf[i] = b.asFloatBuffer();
                        break;
                    case AV_SAMPLE_FMT_DBL:
                    case AV_SAMPLE_FMT_DBLP:
                        samples_buf[i] = b.asDoubleBuffer();
                        break;
                    default:
                        assert false;
                }
            }
            samples_buf[i].position(0).limit(sample_size);
        }
        frame.timestamp = 1000000L * filt_frame.pts() * atime_base.num() / atime_base.den();
        return frame;
    }
}
Also used : IntPointer(org.bytedeco.javacpp.IntPointer) BytePointer(org.bytedeco.javacpp.BytePointer) PointerScope(org.bytedeco.javacpp.PointerScope) ByteBuffer(java.nio.ByteBuffer)

Example 64 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameFilter method pullImage.

public synchronized Frame pullImage() throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (!started) {
            throw new Exception("start() was not called successfully!");
        }
        av_frame_unref(filt_frame);
        /* pull a filtered frame from the filtergraph */
        int ret = av_buffersink_get_frame(buffersink_ctx, filt_frame);
        if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) {
            return null;
        } else if (ret < 0) {
            throw new Exception("av_buffersink_get_frame(): Error occurred: " + av_make_error_string(new BytePointer(256), 256, ret).getString());
        }
        frame.imageWidth = filt_frame.width();
        frame.imageHeight = filt_frame.height();
        frame.imageDepth = Frame.DEPTH_UBYTE;
        if (filt_frame.data(1) == null) {
            frame.imageStride = filt_frame.linesize(0);
            BytePointer ptr = filt_frame.data(0);
            // see https://github.com/bytedeco/javacv/issues/975
            if (ptr != null && !ptr.equals(image_ptr[0])) {
                image_ptr[0] = ptr.capacity(frame.imageHeight * Math.abs(frame.imageStride));
                image_buf[0] = ptr.asBuffer();
            }
            frame.image = image_buf;
            frame.image[0].position(0).limit(frame.imageHeight * Math.abs(frame.imageStride));
            frame.imageChannels = Math.abs(frame.imageStride) / frame.imageWidth;
            frame.opaque = filt_frame;
        } else {
            frame.imageStride = frame.imageWidth;
            int size = av_image_get_buffer_size(filt_frame.format(), frame.imageWidth, frame.imageHeight, 1);
            if (image_ptr2[0] == null || image_ptr2[0].capacity() < size) {
                av_free(image_ptr2[0]);
                image_ptr2[0] = new BytePointer(av_malloc(size)).capacity(size);
                image_buf2[0] = image_ptr2[0].asBuffer();
            }
            frame.image = image_buf2;
            frame.image[0].position(0).limit(size);
            frame.imageChannels = (size + frame.imageWidth * frame.imageHeight - 1) / (frame.imageWidth * frame.imageHeight);
            ret = av_image_copy_to_buffer(image_ptr2[0].position(0), (int) image_ptr2[0].capacity(), new PointerPointer(filt_frame), filt_frame.linesize(), filt_frame.format(), frame.imageWidth, frame.imageHeight, 1);
            if (ret < 0) {
                throw new Exception("av_image_copy_to_buffer() error " + ret + ": Cannot pull image.");
            }
            frame.opaque = image_ptr2[0];
        }
        frame.timestamp = 1000000L * filt_frame.pts() * time_base.num() / time_base.den();
        return frame;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) PointerScope(org.bytedeco.javacpp.PointerScope)

Example 65 with BytePointer

use of org.bytedeco.javacpp.BytePointer in project javacv by bytedeco.

the class FFmpegFrameRecorder method startUnsafe.

public synchronized void startUnsafe() throws Exception {
    try (PointerScope scope = new PointerScope()) {
        if (oc != null && !oc.isNull()) {
            throw new Exception("start() has already been called: Call stop() before calling start() again.");
        }
        int ret;
        picture = null;
        tmp_picture = null;
        picture_buf = null;
        frame = null;
        video_outbuf = null;
        audio_outbuf = null;
        oc = new AVFormatContext(null);
        video_c = null;
        audio_c = null;
        video_st = null;
        audio_st = null;
        plane_ptr = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
        plane_ptr2 = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference();
        video_pkt = new AVPacket().retainReference();
        audio_pkt = new AVPacket().retainReference();
        got_video_packet = new int[1];
        got_audio_packet = new int[1];
        /* auto detect the output format from the name. */
        String format_name = format == null || format.length() == 0 ? null : format;
        if ((oformat = av_guess_format(format_name, filename, null)) == null) {
            int proto = filename.indexOf("://");
            if (proto > 0) {
                format_name = filename.substring(0, proto);
            }
            if ((oformat = av_guess_format(format_name, filename, null)) == null) {
                throw new Exception("av_guess_format() error: Could not guess output format for \"" + filename + "\" and " + format + " format.");
            }
        }
        format_name = oformat.name().getString();
        /* allocate the output media context */
        if (avformat_alloc_output_context2(oc, null, format_name, filename) < 0) {
            throw new Exception("avformat_alloc_context2() error:\tCould not allocate format context");
        }
        if (outputStream != null) {
            avio = avio_alloc_context(new BytePointer(av_malloc(4096)), 4096, 1, oc, null, writeCallback, outputStream instanceof Seekable ? seekCallback : null);
            oc.pb(avio);
            filename = outputStream.toString();
            outputStreams.put(oc, outputStream);
        }
        oc.oformat(oformat);
        oc.url(new BytePointer(av_malloc(filename.getBytes().length + 1)).putString(filename));
        oc.max_delay(maxDelay);
        /* add the audio and video streams using the format codecs
           and initialize the codecs */
        AVStream inpVideoStream = null, inpAudioStream = null;
        if (ifmt_ctx != null) {
            // get input video and audio stream indices from ifmt_ctx
            for (int idx = 0; idx < ifmt_ctx.nb_streams(); idx++) {
                AVStream inputStream = ifmt_ctx.streams(idx);
                if (inputStream.codecpar().codec_type() == AVMEDIA_TYPE_VIDEO) {
                    inpVideoStream = inputStream;
                    videoCodec = inpVideoStream.codecpar().codec_id();
                    if (inpVideoStream.r_frame_rate().num() != AV_NOPTS_VALUE && inpVideoStream.r_frame_rate().den() != 0) {
                        frameRate = (inpVideoStream.r_frame_rate().num()) * 1.0d / (inpVideoStream.r_frame_rate().den());
                    }
                } else if (inputStream.codecpar().codec_type() == AVMEDIA_TYPE_AUDIO) {
                    inpAudioStream = inputStream;
                    audioCodec = inpAudioStream.codecpar().codec_id();
                }
            }
        }
        if (imageWidth > 0 && imageHeight > 0) {
            if (videoCodec == AV_CODEC_ID_NONE) {
                videoCodec = oformat.video_codec();
            }
            /* find the video encoder */
            if ((video_codec = avcodec_find_encoder_by_name(videoCodecName)) == null && (video_codec = avcodec_find_encoder(videoCodec)) == null) {
                releaseUnsafe();
                throw new Exception("avcodec_find_encoder() error: Video codec not found.");
            }
            // oformat.video_codec(video_codec.id());
            AVRational frame_rate = av_d2q(frameRate, 1001000);
            AVRational supported_framerates = video_codec.supported_framerates();
            if (supported_framerates != null) {
                int idx = av_find_nearest_q_idx(frame_rate, supported_framerates);
                frame_rate = supported_framerates.position(idx);
            }
            /* add a video output stream */
            if ((video_st = avformat_new_stream(oc, null)) == null) {
                releaseUnsafe();
                throw new Exception("avformat_new_stream() error: Could not allocate video stream.");
            }
            if ((video_c = avcodec_alloc_context3(video_codec)) == null) {
                releaseUnsafe();
                throw new Exception("avcodec_alloc_context3() error: Could not allocate video encoding context.");
            }
            if (inpVideoStream != null) {
                if ((ret = avcodec_parameters_copy(video_st.codecpar(), inpVideoStream.codecpar())) < 0) {
                    releaseUnsafe();
                    throw new Exception("avcodec_parameters_copy() error " + ret + ": Failed to copy video stream codec parameters from input to output");
                }
                videoBitrate = (int) inpVideoStream.codecpar().bit_rate();
                pixelFormat = inpVideoStream.codecpar().format();
                aspectRatio = inpVideoStream.codecpar().sample_aspect_ratio().num() * 1.0d / inpVideoStream.codecpar().sample_aspect_ratio().den();
                // videoQuality = inpVideoStream.codecpar().global_quality();
                video_c.codec_tag(0);
            }
            video_c.codec_id(video_codec.id());
            video_c.codec_type(AVMEDIA_TYPE_VIDEO);
            /* put sample parameters */
            video_c.bit_rate(videoBitrate);
            /* resolution must be a multiple of two. Scale height to maintain the aspect ratio. */
            if (imageWidth % 2 == 1) {
                int roundedWidth = imageWidth + 1;
                imageHeight = (roundedWidth * imageHeight + imageWidth / 2) / imageWidth;
                imageWidth = roundedWidth;
            }
            video_c.width(imageWidth);
            video_c.height(imageHeight);
            if (aspectRatio > 0) {
                AVRational r = av_d2q(aspectRatio, 255);
                video_c.sample_aspect_ratio(r);
                video_st.sample_aspect_ratio(r);
            }
            /* time base: this is the fundamental unit of time (in seconds) in terms
               of which frame timestamps are represented. for fixed-fps content,
               timebase should be 1/framerate and timestamp increments should be
               identically 1. */
            AVRational time_base = av_inv_q(frame_rate);
            video_c.time_base(time_base);
            video_st.time_base(time_base);
            video_st.avg_frame_rate(frame_rate);
            // video_st.codec().time_base(time_base); // "deprecated", but this is actually required
            if (gopSize >= 0) {
                video_c.gop_size(gopSize);
            /* emit one intra frame every gopSize frames at most */
            }
            if (videoQuality >= 0) {
                video_c.flags(video_c.flags() | AV_CODEC_FLAG_QSCALE);
                video_c.global_quality((int) Math.round(FF_QP2LAMBDA * videoQuality));
            }
            if (pixelFormat != AV_PIX_FMT_NONE) {
                video_c.pix_fmt(pixelFormat);
            } else if (video_c.codec_id() == AV_CODEC_ID_RAWVIDEO || video_c.codec_id() == AV_CODEC_ID_PNG || video_c.codec_id() == AV_CODEC_ID_HUFFYUV || video_c.codec_id() == AV_CODEC_ID_FFV1) {
                // appropriate for common lossless formats
                video_c.pix_fmt(AV_PIX_FMT_RGB32);
            } else if (video_c.codec_id() == AV_CODEC_ID_JPEGLS) {
                video_c.pix_fmt(AV_PIX_FMT_BGR24);
            } else if (video_c.codec_id() == AV_CODEC_ID_MJPEG || video_c.codec_id() == AV_CODEC_ID_MJPEGB) {
                video_c.pix_fmt(AV_PIX_FMT_YUVJ420P);
            } else {
                // lossy, but works with about everything
                video_c.pix_fmt(AV_PIX_FMT_YUV420P);
            }
            if (video_c.codec_id() == AV_CODEC_ID_MPEG2VIDEO) {
                /* just for testing, we also add B frames */
                video_c.max_b_frames(2);
            } else if (video_c.codec_id() == AV_CODEC_ID_MPEG1VIDEO) {
                /* Needed to avoid using macroblocks in which some coeffs overflow.
                   This does not happen with normal video, it just happens here as
                   the motion of the chroma plane does not match the luma plane. */
                video_c.mb_decision(2);
            } else if (video_c.codec_id() == AV_CODEC_ID_H263) {
                // H.263 does not support any other resolution than the following
                if (imageWidth <= 128 && imageHeight <= 96) {
                    video_c.width(128).height(96);
                } else if (imageWidth <= 176 && imageHeight <= 144) {
                    video_c.width(176).height(144);
                } else if (imageWidth <= 352 && imageHeight <= 288) {
                    video_c.width(352).height(288);
                } else if (imageWidth <= 704 && imageHeight <= 576) {
                    video_c.width(704).height(576);
                } else {
                    video_c.width(1408).height(1152);
                }
            } else if (video_c.codec_id() == AV_CODEC_ID_H264) {
                // default to constrained baseline to produce content that plays back on anything,
                // without any significant tradeoffs for most use cases
                video_c.profile(AVCodecContext.FF_PROFILE_H264_CONSTRAINED_BASELINE);
            }
            // some formats want stream headers to be separate
            if ((oformat.flags() & AVFMT_GLOBALHEADER) != 0) {
                video_c.flags(video_c.flags() | AV_CODEC_FLAG_GLOBAL_HEADER);
            }
            if ((video_codec.capabilities() & AV_CODEC_CAP_EXPERIMENTAL) != 0) {
                video_c.strict_std_compliance(AVCodecContext.FF_COMPLIANCE_EXPERIMENTAL);
            }
            if (maxBFrames >= 0) {
                video_c.max_b_frames(maxBFrames);
                video_c.has_b_frames(maxBFrames == 0 ? 0 : 1);
            }
            if (trellis >= 0) {
                video_c.trellis(trellis);
            }
        }
        /*
         * add an audio output stream
         */
        if (audioChannels > 0 && audioBitrate > 0 && sampleRate > 0) {
            if (audioCodec == AV_CODEC_ID_NONE) {
                audioCodec = oformat.audio_codec();
            }
            /* find the audio encoder */
            if ((audio_codec = avcodec_find_encoder_by_name(audioCodecName)) == null && (audio_codec = avcodec_find_encoder(audioCodec)) == null) {
                releaseUnsafe();
                throw new Exception("avcodec_find_encoder() error: Audio codec not found.");
            }
            // oformat.audio_codec(audio_codec.id());
            AVRational sample_rate = av_d2q(sampleRate, 1001000);
            if ((audio_st = avformat_new_stream(oc, null)) == null) {
                releaseUnsafe();
                throw new Exception("avformat_new_stream() error: Could not allocate audio stream.");
            }
            if ((audio_c = avcodec_alloc_context3(audio_codec)) == null) {
                releaseUnsafe();
                throw new Exception("avcodec_alloc_context3() error: Could not allocate audio encoding context.");
            }
            if (inpAudioStream != null && audioChannels > 0) {
                if ((ret = avcodec_parameters_copy(audio_st.codecpar(), inpAudioStream.codecpar())) < 0) {
                    throw new Exception("avcodec_parameters_copy() error " + ret + ": Failed to copy audio stream codec parameters from input to output");
                }
                audioBitrate = (int) inpAudioStream.codecpar().bit_rate();
                sampleRate = inpAudioStream.codecpar().sample_rate();
                audioChannels = inpAudioStream.codecpar().channels();
                sampleFormat = inpAudioStream.codecpar().format();
                // audioQuality = inpAudioStream.codecpar().global_quality();
                audio_c.codec_tag(0);
                // audio_st.pts(inpAudioStream.pts());
                audio_st.duration(inpAudioStream.duration());
                audio_st.time_base().num(inpAudioStream.time_base().num());
                audio_st.time_base().den(inpAudioStream.time_base().den());
            }
            audio_c.codec_id(audio_codec.id());
            audio_c.codec_type(AVMEDIA_TYPE_AUDIO);
            /* put sample parameters */
            audio_c.bit_rate(audioBitrate);
            audio_c.sample_rate(sampleRate);
            audio_c.channels(audioChannels);
            audio_c.channel_layout(av_get_default_channel_layout(audioChannels));
            if (sampleFormat != AV_SAMPLE_FMT_NONE) {
                audio_c.sample_fmt(sampleFormat);
            } else {
                // use AV_SAMPLE_FMT_S16 by default, if available
                audio_c.sample_fmt(AV_SAMPLE_FMT_FLTP);
                IntPointer formats = audio_c.codec().sample_fmts();
                for (int i = 0; formats.get(i) != -1; i++) {
                    if (formats.get(i) == AV_SAMPLE_FMT_S16) {
                        audio_c.sample_fmt(AV_SAMPLE_FMT_S16);
                        break;
                    }
                }
            }
            AVRational time_base = av_inv_q(sample_rate);
            audio_c.time_base(time_base);
            audio_st.time_base(time_base);
            // audio_st.codec().time_base(time_base); // "deprecated", but this is actually required
            switch(audio_c.sample_fmt()) {
                case AV_SAMPLE_FMT_U8:
                case AV_SAMPLE_FMT_U8P:
                    audio_c.bits_per_raw_sample(8);
                    break;
                case AV_SAMPLE_FMT_S16:
                case AV_SAMPLE_FMT_S16P:
                    audio_c.bits_per_raw_sample(16);
                    break;
                case AV_SAMPLE_FMT_S32:
                case AV_SAMPLE_FMT_S32P:
                    audio_c.bits_per_raw_sample(32);
                    break;
                case AV_SAMPLE_FMT_FLT:
                case AV_SAMPLE_FMT_FLTP:
                    audio_c.bits_per_raw_sample(32);
                    break;
                case AV_SAMPLE_FMT_DBL:
                case AV_SAMPLE_FMT_DBLP:
                    audio_c.bits_per_raw_sample(64);
                    break;
                default:
                    assert false;
            }
            if (audioQuality >= 0) {
                audio_c.flags(audio_c.flags() | AV_CODEC_FLAG_QSCALE);
                audio_c.global_quality((int) Math.round(FF_QP2LAMBDA * audioQuality));
            }
            // some formats want stream headers to be separate
            if ((oformat.flags() & AVFMT_GLOBALHEADER) != 0) {
                audio_c.flags(audio_c.flags() | AV_CODEC_FLAG_GLOBAL_HEADER);
            }
            if ((audio_codec.capabilities() & AV_CODEC_CAP_EXPERIMENTAL) != 0) {
                audio_c.strict_std_compliance(AVCodecContext.FF_COMPLIANCE_EXPERIMENTAL);
            }
        }
        /* now that all the parameters are set, we can open the audio and
           video codecs and allocate the necessary encode buffers */
        if (video_st != null && inpVideoStream == null) {
            AVDictionary options = new AVDictionary(null);
            if (videoQuality >= 0) {
                av_dict_set(options, "crf", "" + videoQuality, 0);
            }
            for (Entry<String, String> e : videoOptions.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            // Enable multithreading when available
            video_c.thread_count(0);
            /* open the codec */
            if ((ret = avcodec_open2(video_c, video_codec, options)) < 0) {
                releaseUnsafe();
                av_dict_free(options);
                throw new Exception("avcodec_open2() error " + ret + ": Could not open video codec.");
            }
            av_dict_free(options);
            video_outbuf = null;
            /* allocate the encoded raw picture */
            if ((picture = av_frame_alloc()) == null) {
                releaseUnsafe();
                throw new Exception("av_frame_alloc() error: Could not allocate picture.");
            }
            // magic required by libx264
            picture.pts(0);
            int size = av_image_get_buffer_size(video_c.pix_fmt(), video_c.width(), video_c.height(), 1);
            if ((picture_buf = new BytePointer(av_malloc(size))).isNull()) {
                releaseUnsafe();
                throw new Exception("av_malloc() error: Could not allocate picture buffer.");
            }
            /* if the output format is not equal to the image format, then a temporary
               picture is needed too. It is then converted to the required output format */
            if ((tmp_picture = av_frame_alloc()) == null) {
                releaseUnsafe();
                throw new Exception("av_frame_alloc() error: Could not allocate temporary picture.");
            }
            /* copy the stream parameters to the muxer */
            if ((ret = avcodec_parameters_from_context(video_st.codecpar(), video_c)) < 0) {
                releaseUnsafe();
                throw new Exception("avcodec_parameters_from_context() error " + ret + ": Could not copy the video stream parameters.");
            }
            AVDictionary metadata = new AVDictionary(null);
            for (Entry<String, String> e : videoMetadata.entrySet()) {
                av_dict_set(metadata, new BytePointer(e.getKey(), charset), new BytePointer(e.getValue(), charset), 0);
            }
            video_st.metadata(metadata);
        }
        if (audio_st != null && inpAudioStream == null) {
            AVDictionary options = new AVDictionary(null);
            if (audioQuality >= 0) {
                av_dict_set(options, "crf", "" + audioQuality, 0);
            }
            for (Entry<String, String> e : audioOptions.entrySet()) {
                av_dict_set(options, e.getKey(), e.getValue(), 0);
            }
            // Enable multithreading when available
            audio_c.thread_count(0);
            /* open the codec */
            if ((ret = avcodec_open2(audio_c, audio_codec, options)) < 0) {
                releaseUnsafe();
                av_dict_free(options);
                throw new Exception("avcodec_open2() error " + ret + ": Could not open audio codec.");
            }
            av_dict_free(options);
            audio_outbuf_size = 256 * 1024;
            audio_outbuf = new BytePointer(av_malloc(audio_outbuf_size));
            /* ugly hack for PCM codecs (will be removed ASAP with new PCM
               support to compute the input frame size in samples */
            if (audio_c.frame_size() <= 1) {
                audio_outbuf_size = AV_INPUT_BUFFER_MIN_SIZE;
                audio_input_frame_size = audio_outbuf_size / audio_c.channels();
                switch(audio_c.codec_id()) {
                    case AV_CODEC_ID_PCM_S16LE:
                    case AV_CODEC_ID_PCM_S16BE:
                    case AV_CODEC_ID_PCM_U16LE:
                    case AV_CODEC_ID_PCM_U16BE:
                        audio_input_frame_size >>= 1;
                        break;
                    default:
                        break;
                }
            } else {
                audio_input_frame_size = audio_c.frame_size();
            }
            // int bufferSize = audio_input_frame_size * audio_c.bits_per_raw_sample()/8 * audio_c.channels();
            int planes = av_sample_fmt_is_planar(audio_c.sample_fmt()) != 0 ? (int) audio_c.channels() : 1;
            int data_size = av_samples_get_buffer_size((IntPointer) null, audio_c.channels(), audio_input_frame_size, audio_c.sample_fmt(), 1) / planes;
            samples_out = new BytePointer[planes];
            for (int i = 0; i < samples_out.length; i++) {
                samples_out[i] = new BytePointer(av_malloc(data_size)).capacity(data_size);
            }
            samples_in = new Pointer[AVFrame.AV_NUM_DATA_POINTERS];
            /* allocate the audio frame */
            if ((frame = av_frame_alloc()) == null) {
                releaseUnsafe();
                throw new Exception("av_frame_alloc() error: Could not allocate audio frame.");
            }
            // magic required by libvorbis and webm
            frame.pts(0);
            /* copy the stream parameters to the muxer */
            if ((ret = avcodec_parameters_from_context(audio_st.codecpar(), audio_c)) < 0) {
                releaseUnsafe();
                throw new Exception("avcodec_parameters_from_context() error " + ret + ": Could not copy the audio stream parameters.");
            }
            AVDictionary metadata = new AVDictionary(null);
            for (Entry<String, String> e : audioMetadata.entrySet()) {
                av_dict_set(metadata, new BytePointer(e.getKey(), charset), new BytePointer(e.getValue(), charset), 0);
            }
            audio_st.metadata(metadata);
        }
        AVDictionary options = new AVDictionary(null);
        for (Entry<String, String> e : this.options.entrySet()) {
            av_dict_set(options, e.getKey(), e.getValue(), 0);
        }
        /* open the output file, if needed */
        if (outputStream == null && (oformat.flags() & AVFMT_NOFILE) == 0) {
            AVIOContext pb = new AVIOContext(null);
            if ((ret = avio_open2(pb, filename, AVIO_FLAG_WRITE, null, options)) < 0) {
                String errorMsg = "avio_open2 error() error " + ret + ": Could not open '" + filename + "'";
                releaseUnsafe();
                av_dict_free(options);
                throw new Exception(errorMsg);
            }
            oc.pb(pb);
        }
        AVDictionary metadata = new AVDictionary(null);
        for (Entry<String, String> e : this.metadata.entrySet()) {
            av_dict_set(metadata, new BytePointer(e.getKey(), charset), new BytePointer(e.getValue(), charset), 0);
        }
        /* write the stream header, if any */
        if ((ret = avformat_write_header(oc.metadata(metadata), options)) < 0) {
            String errorMsg = "avformat_write_header error() error " + ret + ": Could not write header to '" + filename + "'";
            releaseUnsafe();
            av_dict_free(options);
            throw new Exception(errorMsg);
        }
        av_dict_free(options);
        if (av_log_get_level() >= AV_LOG_INFO) {
            av_dump_format(oc, 0, filename, 1);
        }
        started = true;
    }
}
Also used : PointerPointer(org.bytedeco.javacpp.PointerPointer) BytePointer(org.bytedeco.javacpp.BytePointer) PointerScope(org.bytedeco.javacpp.PointerScope) IOException(java.io.IOException) IntPointer(org.bytedeco.javacpp.IntPointer)

Aggregations

BytePointer (org.bytedeco.javacpp.BytePointer)84 IntPointer (org.bytedeco.javacpp.IntPointer)23 ByteBuffer (java.nio.ByteBuffer)20 PointerPointer (org.bytedeco.javacpp.PointerPointer)20 IOException (java.io.IOException)16 Pointer (org.bytedeco.javacpp.Pointer)16 PointerScope (org.bytedeco.javacpp.PointerScope)13 DoublePointer (org.bytedeco.javacpp.DoublePointer)12 FloatPointer (org.bytedeco.javacpp.FloatPointer)12 CompressedDataBuffer (org.nd4j.linalg.compression.CompressedDataBuffer)10 CompressionDescriptor (org.nd4j.linalg.compression.CompressionDescriptor)10 ShortBuffer (java.nio.ShortBuffer)9 ShortPointer (org.bytedeco.javacpp.ShortPointer)9 IntBuffer (java.nio.IntBuffer)7 DoubleBuffer (java.nio.DoubleBuffer)6 FloatBuffer (java.nio.FloatBuffer)6 Nonnull (javax.annotation.Nonnull)5 LongPointer (org.bytedeco.javacpp.LongPointer)5 TF_Status (org.tensorflow.internal.c_api.TF_Status)4 ByteOrder (java.nio.ByteOrder)3