Search in sources :

Example 1 with Format

use of androidx.media3.common.Format in project media by androidx.

the class FormatTest method createTestFormat.

private static Format createTestFormat() {
    byte[] initData1 = new byte[] { 1, 2, 3 };
    byte[] initData2 = new byte[] { 4, 5, 6 };
    List<byte[]> initializationData = new ArrayList<>();
    initializationData.add(initData1);
    initializationData.add(initData2);
    DrmInitData.SchemeData drmData1 = new DrmInitData.SchemeData(WIDEVINE_UUID, VIDEO_MP4, buildTestData(128, 1));
    DrmInitData.SchemeData drmData2 = new DrmInitData.SchemeData(C.UUID_NIL, VIDEO_WEBM, buildTestData(128, 1));
    DrmInitData drmInitData = new DrmInitData(drmData1, drmData2);
    byte[] projectionData = new byte[] { 1, 2, 3 };
    Metadata metadata = new Metadata(new FakeMetadataEntry("id1"), new FakeMetadataEntry("id2"));
    ColorInfo colorInfo = new ColorInfo(C.COLOR_SPACE_BT709, C.COLOR_RANGE_LIMITED, C.COLOR_TRANSFER_SDR, new byte[] { 1, 2, 3, 4, 5, 6, 7 });
    return new Format.Builder().setId("id").setLabel("label").setLanguage("language").setSelectionFlags(C.SELECTION_FLAG_DEFAULT).setRoleFlags(C.ROLE_FLAG_MAIN).setAverageBitrate(1024).setPeakBitrate(2048).setCodecs("codec").setMetadata(metadata).setContainerMimeType(VIDEO_MP4).setSampleMimeType(MimeTypes.VIDEO_H264).setMaxInputSize(5000).setInitializationData(initializationData).setDrmInitData(drmInitData).setSubsampleOffsetUs(Format.OFFSET_SAMPLE_RELATIVE).setWidth(1920).setHeight(1080).setFrameRate(24).setRotationDegrees(90).setPixelWidthHeightRatio(4).setProjectionData(projectionData).setStereoMode(C.STEREO_MODE_TOP_BOTTOM).setColorInfo(colorInfo).setChannelCount(6).setSampleRate(44100).setPcmEncoding(C.ENCODING_PCM_24BIT).setEncoderDelay(1001).setEncoderPadding(1002).setAccessibilityChannel(2).setCryptoType(C.CRYPTO_TYPE_CUSTOM_BASE).build();
}
Also used : FakeMetadataEntry(androidx.media3.test.utils.FakeMetadataEntry) ArrayList(java.util.ArrayList)

Example 2 with Format

use of androidx.media3.common.Format in project media by androidx.

the class MediaFormatUtilTest method createMediaFormatFromFormat_withPcmEncoding_setsCustomPcmEncodingEntry.

@Test
public void createMediaFormatFromFormat_withPcmEncoding_setsCustomPcmEncodingEntry() {
    Format format = new Format.Builder().setPcmEncoding(C.ENCODING_PCM_16BIT_BIG_ENDIAN).build();
    MediaFormat mediaFormat = MediaFormatUtil.createMediaFormatFromFormat(format);
    assertThat(mediaFormat.getInteger(MediaFormatUtil.KEY_PCM_ENCODING_EXTENDED)).isEqualTo(C.ENCODING_PCM_16BIT_BIG_ENDIAN);
    assertThat(mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)).isFalse();
}
Also used : MediaFormat(android.media.MediaFormat) MediaFormat(android.media.MediaFormat) Format(androidx.media3.common.Format) Test(org.junit.Test)

Example 3 with Format

use of androidx.media3.common.Format in project media by androidx.

the class LibflacAudioRenderer method supportsFormatInternal.

@Override
@C.FormatSupport
protected int supportsFormatInternal(Format format) {
    if (!FlacLibrary.isAvailable() || !MimeTypes.AUDIO_FLAC.equalsIgnoreCase(format.sampleMimeType)) {
        return C.FORMAT_UNSUPPORTED_TYPE;
    }
    // Compute the format that the FLAC decoder will output.
    Format outputFormat;
    if (format.initializationData.isEmpty()) {
        // The initialization data might not be set if the format was obtained from a manifest (e.g.
        // for DASH playbacks) rather than directly from the media. In this case we assume
        // ENCODING_PCM_16BIT. If the actual encoding is different then playback will still succeed as
        // long as the AudioSink supports it, which will always be true when using DefaultAudioSink.
        outputFormat = Util.getPcmFormat(C.ENCODING_PCM_16BIT, format.channelCount, format.sampleRate);
    } else {
        int streamMetadataOffset = STREAM_MARKER_SIZE + METADATA_BLOCK_HEADER_SIZE;
        FlacStreamMetadata streamMetadata = new FlacStreamMetadata(format.initializationData.get(0), streamMetadataOffset);
        outputFormat = getOutputFormat(streamMetadata);
    }
    if (!sinkSupportsFormat(outputFormat)) {
        return C.FORMAT_UNSUPPORTED_SUBTYPE;
    } else if (format.cryptoType != C.CRYPTO_TYPE_NONE) {
        return C.FORMAT_UNSUPPORTED_DRM;
    } else {
        return C.FORMAT_HANDLED;
    }
}
Also used : Format(androidx.media3.common.Format) FlacStreamMetadata(androidx.media3.extractor.FlacStreamMetadata)

Example 4 with Format

use of androidx.media3.common.Format in project media by androidx.

the class MediaCodecRenderer method drmNeedsCodecReinitialization.

/**
 * Returns whether it's necessary to re-initialize the codec to handle a DRM change. If {@code
 * false} is returned then either {@code oldSession == newSession} (i.e., there was no change), or
 * it's possible to update the existing codec using MediaCrypto.setMediaDrmSession.
 */
private boolean drmNeedsCodecReinitialization(MediaCodecInfo codecInfo, Format newFormat, @Nullable DrmSession oldSession, @Nullable DrmSession newSession) throws ExoPlaybackException {
    if (oldSession == newSession) {
        // No need to re-initialize if the old and new sessions are the same.
        return false;
    }
    if (newSession == null || oldSession == null) {
        // Changing from DRM to no DRM and vice-versa always requires re-initialization.
        return true;
    }
    if (Util.SDK_INT < 23) {
        // required to switch to newSession on older API levels.
        return true;
    }
    if (C.PLAYREADY_UUID.equals(oldSession.getSchemeUuid()) || C.PLAYREADY_UUID.equals(newSession.getSchemeUuid())) {
        // TODO: Add an API check once [Internal ref: b/128835874] is fixed.
        return true;
    }
    @Nullable FrameworkCryptoConfig newCryptoConfig = getFrameworkCryptoConfig(newSession);
    if (newCryptoConfig == null) {
        // the case is to occur, so we re-initialize in this case.
        return true;
    }
    boolean requiresSecureDecoder;
    if (newCryptoConfig.forceAllowInsecureDecoderComponents) {
        requiresSecureDecoder = false;
    } else {
        requiresSecureDecoder = newSession.requiresSecureDecoder(newFormat.sampleMimeType);
    }
    if (!codecInfo.secure && requiresSecureDecoder) {
        // output path.
        return true;
    }
    return false;
}
Also used : FrameworkCryptoConfig(androidx.media3.exoplayer.drm.FrameworkCryptoConfig) Nullable(androidx.annotation.Nullable)

Example 5 with Format

use of androidx.media3.common.Format in project media by androidx.

the class MediaCodecRenderer method feedInputBuffer.

/**
 * @return Whether it may be possible to feed more input data.
 * @throws ExoPlaybackException If an error occurs feeding the input buffer.
 */
private boolean feedInputBuffer() throws ExoPlaybackException {
    if (codec == null || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM || inputStreamEnded) {
        return false;
    }
    if (codecDrainState == DRAIN_STATE_NONE && shouldReinitCodec()) {
        drainAndReinitializeCodec();
    }
    if (inputIndex < 0) {
        inputIndex = codec.dequeueInputBufferIndex();
        if (inputIndex < 0) {
            return false;
        }
        buffer.data = codec.getInputBuffer(inputIndex);
        buffer.clear();
    }
    if (codecDrainState == DRAIN_STATE_SIGNAL_END_OF_STREAM) {
        // that it outputs any remaining buffers before we release it.
        if (codecNeedsEosPropagation) {
        // Do nothing.
        } else {
            codecReceivedEos = true;
            codec.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
            resetInputBuffer();
        }
        codecDrainState = DRAIN_STATE_WAIT_END_OF_STREAM;
        return false;
    }
    if (codecNeedsAdaptationWorkaroundBuffer) {
        codecNeedsAdaptationWorkaroundBuffer = false;
        buffer.data.put(ADAPTATION_WORKAROUND_BUFFER);
        codec.queueInputBuffer(inputIndex, 0, ADAPTATION_WORKAROUND_BUFFER.length, 0, 0);
        resetInputBuffer();
        codecReceivedBuffers = true;
        return true;
    }
    // the start of the buffer that also contains the first frame in the new format.
    if (codecReconfigurationState == RECONFIGURATION_STATE_WRITE_PENDING) {
        for (int i = 0; i < codecInputFormat.initializationData.size(); i++) {
            byte[] data = codecInputFormat.initializationData.get(i);
            buffer.data.put(data);
        }
        codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING;
    }
    int adaptiveReconfigurationBytes = buffer.data.position();
    FormatHolder formatHolder = getFormatHolder();
    @SampleStream.ReadDataResult int result;
    try {
        result = readSource(formatHolder, buffer, /* readFlags= */
        0);
    } catch (InsufficientCapacityException e) {
        onCodecError(e);
        // Skip the sample that's too large by reading it without its data. Then flush the codec so
        // that rendering will resume from the next key frame.
        readSourceOmittingSampleData(/* readFlags= */
        0);
        flushCodec();
        return true;
    }
    if (hasReadStreamToEnd()) {
        // Notify output queue of the last buffer's timestamp.
        lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs;
    }
    if (result == C.RESULT_NOTHING_READ) {
        return false;
    }
    if (result == C.RESULT_FORMAT_READ) {
        if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) {
            // We received two formats in a row. Clear the current buffer of any reconfiguration data
            // associated with the first format.
            buffer.clear();
            codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING;
        }
        onInputFormatChanged(formatHolder);
        return true;
    }
    // We've read a buffer.
    if (buffer.isEndOfStream()) {
        if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) {
            // We received a new format immediately before the end of the stream. We need to clear
            // the corresponding reconfiguration data from the current buffer, but re-write it into
            // a subsequent buffer if there are any (for example, if the user seeks backwards).
            buffer.clear();
            codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING;
        }
        inputStreamEnded = true;
        if (!codecReceivedBuffers) {
            processEndOfStream();
            return false;
        }
        try {
            if (codecNeedsEosPropagation) {
            // Do nothing.
            } else {
                codecReceivedEos = true;
                codec.queueInputBuffer(inputIndex, /* offset= */
                0, /* size= */
                0, /* presentationTimeUs= */
                0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                resetInputBuffer();
            }
        } catch (CryptoException e) {
            throw createRendererException(e, inputFormat, Util.getErrorCodeForMediaDrmErrorCode(e.getErrorCode()));
        }
        return false;
    }
    // sample that's too large to be held in one of the decoder's input buffers.
    if (!codecReceivedBuffers && !buffer.isKeyFrame()) {
        buffer.clear();
        if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) {
            // The buffer we just cleared contained reconfiguration data. We need to re-write this data
            // into a subsequent buffer (if there is one).
            codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING;
        }
        return true;
    }
    boolean bufferEncrypted = buffer.isEncrypted();
    if (bufferEncrypted) {
        buffer.cryptoInfo.increaseClearDataFirstSubSampleBy(adaptiveReconfigurationBytes);
    }
    if (codecNeedsDiscardToSpsWorkaround && !bufferEncrypted) {
        NalUnitUtil.discardToSps(buffer.data);
        if (buffer.data.position() == 0) {
            return true;
        }
        codecNeedsDiscardToSpsWorkaround = false;
    }
    long presentationTimeUs = buffer.timeUs;
    if (c2Mp3TimestampTracker != null) {
        presentationTimeUs = c2Mp3TimestampTracker.updateAndGetPresentationTimeUs(inputFormat, buffer);
        // When draining the C2 MP3 decoder it produces an extra non-empty buffer with a timestamp
        // after all queued input buffer timestamps (unlike other decoders, which generally propagate
        // the input timestamps to output buffers 1:1). To detect the end of the stream when this
        // buffer is dequeued we override the largest queued timestamp accordingly.
        largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, c2Mp3TimestampTracker.getLastOutputBufferPresentationTimeUs(inputFormat));
    }
    if (buffer.isDecodeOnly()) {
        decodeOnlyPresentationTimestamps.add(presentationTimeUs);
    }
    if (waitingForFirstSampleInFormat) {
        formatQueue.add(presentationTimeUs, inputFormat);
        waitingForFirstSampleInFormat = false;
    }
    largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, presentationTimeUs);
    buffer.flip();
    if (buffer.hasSupplementalData()) {
        handleInputBufferSupplementalData(buffer);
    }
    onQueueInputBuffer(buffer);
    try {
        if (bufferEncrypted) {
            codec.queueSecureInputBuffer(inputIndex, /* offset= */
            0, buffer.cryptoInfo, presentationTimeUs, /* flags= */
            0);
        } else {
            codec.queueInputBuffer(inputIndex, /* offset= */
            0, buffer.data.limit(), presentationTimeUs, /* flags= */
            0);
        }
    } catch (CryptoException e) {
        throw createRendererException(e, inputFormat, Util.getErrorCodeForMediaDrmErrorCode(e.getErrorCode()));
    }
    resetInputBuffer();
    codecReceivedBuffers = true;
    codecReconfigurationState = RECONFIGURATION_STATE_NONE;
    decoderCounters.queuedInputBufferCount++;
    return true;
}
Also used : FormatHolder(androidx.media3.exoplayer.FormatHolder) ReadDataResult(androidx.media3.exoplayer.source.SampleStream.ReadDataResult) InsufficientCapacityException(androidx.media3.decoder.DecoderInputBuffer.InsufficientCapacityException) MediaCryptoException(android.media.MediaCryptoException) CryptoException(android.media.MediaCodec.CryptoException)

Aggregations

Format (androidx.media3.common.Format)222 Test (org.junit.Test)176 Nullable (androidx.annotation.Nullable)64 TrackGroupArray (androidx.media3.common.TrackGroupArray)59 TrackGroup (androidx.media3.common.TrackGroup)49 RendererCapabilities (androidx.media3.exoplayer.RendererCapabilities)35 DrmSessionEventListener (androidx.media3.exoplayer.drm.DrmSessionEventListener)26 FakeSampleStream (androidx.media3.test.utils.FakeSampleStream)26 ArrayList (java.util.ArrayList)26 DefaultAllocator (androidx.media3.exoplayer.upstream.DefaultAllocator)22 MediaFormat (android.media.MediaFormat)19 FakeTimeline (androidx.media3.test.utils.FakeTimeline)18 Metadata (androidx.media3.common.Metadata)17 SuppressLint (android.annotation.SuppressLint)16 FakeMediaSource (androidx.media3.test.utils.FakeMediaSource)16 ImmutableList (com.google.common.collect.ImmutableList)16 Timeline (androidx.media3.common.Timeline)15 AndroidJUnit4 (androidx.test.ext.junit.runners.AndroidJUnit4)15 Truth.assertThat (com.google.common.truth.Truth.assertThat)15 RunWith (org.junit.runner.RunWith)15