Search in sources :

Example 1 with ColorInfo

use of androidx.media3.common.ColorInfo in project media by androidx.

the class Format method toBundle.

@UnstableApi
@Override
public Bundle toBundle() {
    Bundle bundle = new Bundle();
    bundle.putString(keyForField(FIELD_ID), id);
    bundle.putString(keyForField(FIELD_LABEL), label);
    bundle.putString(keyForField(FIELD_LANGUAGE), language);
    bundle.putInt(keyForField(FIELD_SELECTION_FLAGS), selectionFlags);
    bundle.putInt(keyForField(FIELD_ROLE_FLAGS), roleFlags);
    bundle.putInt(keyForField(FIELD_AVERAGE_BITRATE), averageBitrate);
    bundle.putInt(keyForField(FIELD_PEAK_BITRATE), peakBitrate);
    bundle.putString(keyForField(FIELD_CODECS), codecs);
    // Metadata is currently not Bundleable because Metadata.Entry is an Interface,
    // which would be difficult to unbundle in a backward compatible way.
    // The entries are additionally of limited usefulness to remote processes.
    bundle.putParcelable(keyForField(FIELD_METADATA), metadata);
    // Container specific.
    bundle.putString(keyForField(FIELD_CONTAINER_MIME_TYPE), containerMimeType);
    // Sample specific.
    bundle.putString(keyForField(FIELD_SAMPLE_MIME_TYPE), sampleMimeType);
    bundle.putInt(keyForField(FIELD_MAX_INPUT_SIZE), maxInputSize);
    for (int i = 0; i < initializationData.size(); i++) {
        bundle.putByteArray(keyForInitializationData(i), initializationData.get(i));
    }
    // DrmInitData doesn't need to be Bundleable as it's only used in the playing process to
    // initialize the decoder.
    bundle.putParcelable(keyForField(FIELD_DRM_INIT_DATA), drmInitData);
    bundle.putLong(keyForField(FIELD_SUBSAMPLE_OFFSET_US), subsampleOffsetUs);
    // Video specific.
    bundle.putInt(keyForField(FIELD_WIDTH), width);
    bundle.putInt(keyForField(FIELD_HEIGHT), height);
    bundle.putFloat(keyForField(FIELD_FRAME_RATE), frameRate);
    bundle.putInt(keyForField(FIELD_ROTATION_DEGREES), rotationDegrees);
    bundle.putFloat(keyForField(FIELD_PIXEL_WIDTH_HEIGHT_RATIO), pixelWidthHeightRatio);
    bundle.putByteArray(keyForField(FIELD_PROJECTION_DATA), projectionData);
    bundle.putInt(keyForField(FIELD_STEREO_MODE), stereoMode);
    bundle.putBundle(keyForField(FIELD_COLOR_INFO), BundleableUtil.toNullableBundle(colorInfo));
    // Audio specific.
    bundle.putInt(keyForField(FIELD_CHANNEL_COUNT), channelCount);
    bundle.putInt(keyForField(FIELD_SAMPLE_RATE), sampleRate);
    bundle.putInt(keyForField(FIELD_PCM_ENCODING), pcmEncoding);
    bundle.putInt(keyForField(FIELD_ENCODER_DELAY), encoderDelay);
    bundle.putInt(keyForField(FIELD_ENCODER_PADDING), encoderPadding);
    // Text specific.
    bundle.putInt(keyForField(FIELD_ACCESSIBILITY_CHANNEL), accessibilityChannel);
    // Source specific.
    bundle.putInt(keyForField(FIELD_CRYPTO_TYPE), cryptoType);
    return bundle;
}
Also used : Bundle(android.os.Bundle) UnstableApi(androidx.media3.common.util.UnstableApi)

Example 2 with ColorInfo

use of androidx.media3.common.ColorInfo in project media by androidx.

the class FormatTest method createTestFormat.

private static Format createTestFormat() {
    byte[] initData1 = new byte[] { 1, 2, 3 };
    byte[] initData2 = new byte[] { 4, 5, 6 };
    List<byte[]> initializationData = new ArrayList<>();
    initializationData.add(initData1);
    initializationData.add(initData2);
    DrmInitData.SchemeData drmData1 = new DrmInitData.SchemeData(WIDEVINE_UUID, VIDEO_MP4, buildTestData(128, 1));
    DrmInitData.SchemeData drmData2 = new DrmInitData.SchemeData(C.UUID_NIL, VIDEO_WEBM, buildTestData(128, 1));
    DrmInitData drmInitData = new DrmInitData(drmData1, drmData2);
    byte[] projectionData = new byte[] { 1, 2, 3 };
    Metadata metadata = new Metadata(new FakeMetadataEntry("id1"), new FakeMetadataEntry("id2"));
    ColorInfo colorInfo = new ColorInfo(C.COLOR_SPACE_BT709, C.COLOR_RANGE_LIMITED, C.COLOR_TRANSFER_SDR, new byte[] { 1, 2, 3, 4, 5, 6, 7 });
    return new Format.Builder().setId("id").setLabel("label").setLanguage("language").setSelectionFlags(C.SELECTION_FLAG_DEFAULT).setRoleFlags(C.ROLE_FLAG_MAIN).setAverageBitrate(1024).setPeakBitrate(2048).setCodecs("codec").setMetadata(metadata).setContainerMimeType(VIDEO_MP4).setSampleMimeType(MimeTypes.VIDEO_H264).setMaxInputSize(5000).setInitializationData(initializationData).setDrmInitData(drmInitData).setSubsampleOffsetUs(Format.OFFSET_SAMPLE_RELATIVE).setWidth(1920).setHeight(1080).setFrameRate(24).setRotationDegrees(90).setPixelWidthHeightRatio(4).setProjectionData(projectionData).setStereoMode(C.STEREO_MODE_TOP_BOTTOM).setColorInfo(colorInfo).setChannelCount(6).setSampleRate(44100).setPcmEncoding(C.ENCODING_PCM_24BIT).setEncoderDelay(1001).setEncoderPadding(1002).setAccessibilityChannel(2).setCryptoType(C.CRYPTO_TYPE_CUSTOM_BASE).build();
}
Also used : FakeMetadataEntry(androidx.media3.test.utils.FakeMetadataEntry) ArrayList(java.util.ArrayList)

Example 3 with ColorInfo

use of androidx.media3.common.ColorInfo in project media by androidx.

the class OutputConsumerAdapterV30 method getColorInfo.

@Nullable
private static ColorInfo getColorInfo(MediaFormat mediaFormat) {
    @Nullable ByteBuffer hdrStaticInfoByteBuffer = mediaFormat.getByteBuffer(MediaFormat.KEY_HDR_STATIC_INFO);
    @Nullable byte[] hdrStaticInfo = hdrStaticInfoByteBuffer != null ? getArray(hdrStaticInfoByteBuffer) : null;
    int colorTransfer = mediaFormat.getInteger(MediaFormat.KEY_COLOR_TRANSFER, /* defaultValue= */
    Format.NO_VALUE);
    int colorRange = mediaFormat.getInteger(MediaFormat.KEY_COLOR_RANGE, /* defaultValue= */
    Format.NO_VALUE);
    int colorStandard = mediaFormat.getInteger(MediaFormat.KEY_COLOR_STANDARD, /* defaultValue= */
    Format.NO_VALUE);
    if (hdrStaticInfo != null || colorTransfer != Format.NO_VALUE || colorRange != Format.NO_VALUE || colorStandard != Format.NO_VALUE) {
        return new ColorInfo(colorStandard, colorRange, colorTransfer, hdrStaticInfo);
    }
    return null;
}
Also used : ByteBuffer(java.nio.ByteBuffer) Nullable(androidx.annotation.Nullable) SeekPoint(androidx.media3.extractor.SeekPoint) SuppressLint(android.annotation.SuppressLint) ColorInfo(androidx.media3.common.ColorInfo) Nullable(androidx.annotation.Nullable)

Example 4 with ColorInfo

use of androidx.media3.common.ColorInfo in project media by androidx.

the class AtomParsers method parseVideoSampleEntry.

// hdrStaticInfo is allocated using allocate() in allocateHdrStaticInfo().
@SuppressWarnings("ByteBufferBackingArray")
private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType, int position, int size, int trackId, int rotationDegrees, @Nullable DrmInitData drmInitData, StsdData out, int entryIndex) throws ParserException {
    parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE);
    parent.skipBytes(16);
    int width = parent.readUnsignedShort();
    int height = parent.readUnsignedShort();
    boolean pixelWidthHeightRatioFromPasp = false;
    float pixelWidthHeightRatio = 1;
    parent.skipBytes(50);
    int childPosition = parent.getPosition();
    if (atomType == Atom.TYPE_encv) {
        @Nullable Pair<Integer, TrackEncryptionBox> sampleEntryEncryptionData = parseSampleEntryEncryptionData(parent, position, size);
        if (sampleEntryEncryptionData != null) {
            atomType = sampleEntryEncryptionData.first;
            drmInitData = drmInitData == null ? null : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType);
            out.trackEncryptionBoxes[entryIndex] = sampleEntryEncryptionData.second;
        }
        parent.setPosition(childPosition);
    }
    // TODO: Uncomment when [Internal: b/63092960] is fixed.
    // else {
    // drmInitData = null;
    // }
    @Nullable String mimeType = null;
    if (atomType == Atom.TYPE_m1v_) {
        mimeType = MimeTypes.VIDEO_MPEG;
    } else if (atomType == Atom.TYPE_H263) {
        mimeType = MimeTypes.VIDEO_H263;
    }
    @Nullable List<byte[]> initializationData = null;
    @Nullable String codecs = null;
    @Nullable byte[] projectionData = null;
    @C.StereoMode int stereoMode = Format.NO_VALUE;
    // HDR related metadata.
    @C.ColorSpace int colorSpace = Format.NO_VALUE;
    @C.ColorRange int colorRange = Format.NO_VALUE;
    @C.ColorTransfer int colorTransfer = Format.NO_VALUE;
    // The format of HDR static info is defined in CTA-861-G:2017, Table 45.
    @Nullable ByteBuffer hdrStaticInfo = null;
    while (childPosition - position < size) {
        parent.setPosition(childPosition);
        int childStartPosition = parent.getPosition();
        int childAtomSize = parent.readInt();
        if (childAtomSize == 0 && parent.getPosition() - position == size) {
            // Handle optional terminating four zero bytes in MOV files.
            break;
        }
        ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive");
        int childAtomType = parent.readInt();
        if (childAtomType == Atom.TYPE_avcC) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_H264;
            parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
            AvcConfig avcConfig = AvcConfig.parse(parent);
            initializationData = avcConfig.initializationData;
            out.nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength;
            if (!pixelWidthHeightRatioFromPasp) {
                pixelWidthHeightRatio = avcConfig.pixelWidthHeightRatio;
            }
            codecs = avcConfig.codecs;
        } else if (childAtomType == Atom.TYPE_hvcC) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_H265;
            parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
            HevcConfig hevcConfig = HevcConfig.parse(parent);
            initializationData = hevcConfig.initializationData;
            out.nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength;
            if (!pixelWidthHeightRatioFromPasp) {
                pixelWidthHeightRatio = hevcConfig.pixelWidthHeightRatio;
            }
            codecs = hevcConfig.codecs;
        } else if (childAtomType == Atom.TYPE_dvcC || childAtomType == Atom.TYPE_dvvC) {
            @Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent);
            if (dolbyVisionConfig != null) {
                codecs = dolbyVisionConfig.codecs;
                mimeType = MimeTypes.VIDEO_DOLBY_VISION;
            }
        } else if (childAtomType == Atom.TYPE_vpcC) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = (atomType == Atom.TYPE_vp08) ? MimeTypes.VIDEO_VP8 : MimeTypes.VIDEO_VP9;
        } else if (childAtomType == Atom.TYPE_av1C) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_AV1;
        } else if (childAtomType == Atom.TYPE_clli) {
            if (hdrStaticInfo == null) {
                hdrStaticInfo = allocateHdrStaticInfo();
            }
            // The contents of the clli box occupy the last 4 bytes of the HDR static info array. Note
            // that each field is read in big endian and written in little endian.
            hdrStaticInfo.position(21);
            // max_content_light_level.
            hdrStaticInfo.putShort(parent.readShort());
            // max_pic_average_light_level.
            hdrStaticInfo.putShort(parent.readShort());
        } else if (childAtomType == Atom.TYPE_mdcv) {
            if (hdrStaticInfo == null) {
                hdrStaticInfo = allocateHdrStaticInfo();
            }
            // The contents of the mdcv box occupy 20 bytes after the first byte of the HDR static info
            // array. Note that each field is read in big endian and written in little endian.
            short displayPrimariesGX = parent.readShort();
            short displayPrimariesGY = parent.readShort();
            short displayPrimariesBX = parent.readShort();
            short displayPrimariesBY = parent.readShort();
            short displayPrimariesRX = parent.readShort();
            short displayPrimariesRY = parent.readShort();
            short whitePointX = parent.readShort();
            short whitePointY = parent.readShort();
            long maxDisplayMasteringLuminance = parent.readUnsignedInt();
            long minDisplayMasteringLuminance = parent.readUnsignedInt();
            hdrStaticInfo.position(1);
            hdrStaticInfo.putShort(displayPrimariesRX);
            hdrStaticInfo.putShort(displayPrimariesRY);
            hdrStaticInfo.putShort(displayPrimariesGX);
            hdrStaticInfo.putShort(displayPrimariesGY);
            hdrStaticInfo.putShort(displayPrimariesBX);
            hdrStaticInfo.putShort(displayPrimariesBY);
            hdrStaticInfo.putShort(whitePointX);
            hdrStaticInfo.putShort(whitePointY);
            hdrStaticInfo.putShort((short) (maxDisplayMasteringLuminance / 10000));
            hdrStaticInfo.putShort((short) (minDisplayMasteringLuminance / 10000));
        } else if (childAtomType == Atom.TYPE_d263) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_H263;
        } else if (childAtomType == Atom.TYPE_esds) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            Pair<@NullableType String, byte @NullableType []> mimeTypeAndInitializationDataBytes = parseEsdsFromParent(parent, childStartPosition);
            mimeType = mimeTypeAndInitializationDataBytes.first;
            @Nullable byte[] initializationDataBytes = mimeTypeAndInitializationDataBytes.second;
            if (initializationDataBytes != null) {
                initializationData = ImmutableList.of(initializationDataBytes);
            }
        } else if (childAtomType == Atom.TYPE_pasp) {
            pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
            pixelWidthHeightRatioFromPasp = true;
        } else if (childAtomType == Atom.TYPE_sv3d) {
            projectionData = parseProjFromParent(parent, childStartPosition, childAtomSize);
        } else if (childAtomType == Atom.TYPE_st3d) {
            int version = parent.readUnsignedByte();
            // Flags.
            parent.skipBytes(3);
            if (version == 0) {
                int layout = parent.readUnsignedByte();
                switch(layout) {
                    case 0:
                        stereoMode = C.STEREO_MODE_MONO;
                        break;
                    case 1:
                        stereoMode = C.STEREO_MODE_TOP_BOTTOM;
                        break;
                    case 2:
                        stereoMode = C.STEREO_MODE_LEFT_RIGHT;
                        break;
                    case 3:
                        stereoMode = C.STEREO_MODE_STEREO_MESH;
                        break;
                    default:
                        break;
                }
            }
        } else if (childAtomType == Atom.TYPE_colr) {
            int colorType = parent.readInt();
            if (colorType == TYPE_nclx || colorType == TYPE_nclc) {
                // For more info on syntax, see Section 8.5.2.2 in ISO/IEC 14496-12:2012(E) and
                // https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html.
                int colorPrimaries = parent.readUnsignedShort();
                int transferCharacteristics = parent.readUnsignedShort();
                // matrix_coefficients.
                parent.skipBytes(2);
                // Only try and read full_range_flag if the box is long enough. It should be present in
                // all colr boxes with type=nclx (Section 8.5.2.2 in ISO/IEC 14496-12:2012(E)) but some
                // device cameras record videos with type=nclx without this final flag (and therefore
                // size=18): https://github.com/google/ExoPlayer/issues/9332
                boolean fullRangeFlag = childAtomSize == 19 && (parent.readUnsignedByte() & 0b10000000) != 0;
                colorSpace = ColorInfo.isoColorPrimariesToColorSpace(colorPrimaries);
                colorRange = fullRangeFlag ? C.COLOR_RANGE_FULL : C.COLOR_RANGE_LIMITED;
                colorTransfer = ColorInfo.isoTransferCharacteristicsToColorTransfer(transferCharacteristics);
            } else {
                Log.w(TAG, "Unsupported color type: " + Atom.getAtomTypeString(colorType));
            }
        }
        childPosition += childAtomSize;
    }
    // If the media type was not recognized, ignore the track.
    if (mimeType == null) {
        return;
    }
    Format.Builder formatBuilder = new Format.Builder().setId(trackId).setSampleMimeType(mimeType).setCodecs(codecs).setWidth(width).setHeight(height).setPixelWidthHeightRatio(pixelWidthHeightRatio).setRotationDegrees(rotationDegrees).setProjectionData(projectionData).setStereoMode(stereoMode).setInitializationData(initializationData).setDrmInitData(drmInitData);
    if (colorSpace != Format.NO_VALUE || colorRange != Format.NO_VALUE || colorTransfer != Format.NO_VALUE || hdrStaticInfo != null) {
        // Note that if either mdcv or clli are missing, we leave the corresponding HDR static
        // metadata bytes with value zero. See [Internal ref: b/194535665].
        formatBuilder.setColorInfo(new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo != null ? hdrStaticInfo.array() : null));
    }
    out.format = formatBuilder.build();
}
Also used : DolbyVisionConfig(androidx.media3.extractor.DolbyVisionConfig) AvcConfig(androidx.media3.extractor.AvcConfig) ByteBuffer(java.nio.ByteBuffer) ColorInfo(androidx.media3.common.ColorInfo) HevcConfig(androidx.media3.extractor.HevcConfig) Format(androidx.media3.common.Format) Nullable(androidx.annotation.Nullable)

Example 5 with ColorInfo

use of androidx.media3.common.ColorInfo in project media by androidx.

the class DumpableFormat method dump.

@Override
public void dump(Dumper dumper) {
    dumper.startBlock("format " + index);
    addIfNonDefault(dumper, "averageBitrate", format -> format.averageBitrate);
    addIfNonDefault(dumper, "peakBitrate", format -> format.peakBitrate);
    addIfNonDefault(dumper, "id", format -> format.id);
    addIfNonDefault(dumper, "containerMimeType", format -> format.containerMimeType);
    addIfNonDefault(dumper, "sampleMimeType", format -> format.sampleMimeType);
    addIfNonDefault(dumper, "codecs", format -> format.codecs);
    addIfNonDefault(dumper, "maxInputSize", format -> format.maxInputSize);
    addIfNonDefault(dumper, "width", format -> format.width);
    addIfNonDefault(dumper, "height", format -> format.height);
    addIfNonDefault(dumper, "frameRate", format -> format.frameRate);
    addIfNonDefault(dumper, "rotationDegrees", format -> format.rotationDegrees);
    addIfNonDefault(dumper, "pixelWidthHeightRatio", format -> format.pixelWidthHeightRatio);
    @Nullable ColorInfo colorInfo = format.colorInfo;
    if (colorInfo != null) {
        dumper.startBlock("colorInfo");
        dumper.add("colorSpace", colorInfo.colorSpace);
        dumper.add("colorRange", colorInfo.colorRange);
        dumper.add("colorTransfer", colorInfo.colorTransfer);
        dumper.add("hdrStaticInfo", colorInfo.hdrStaticInfo);
        dumper.endBlock();
    }
    addIfNonDefault(dumper, "channelCount", format -> format.channelCount);
    addIfNonDefault(dumper, "sampleRate", format -> format.sampleRate);
    addIfNonDefault(dumper, "pcmEncoding", format -> format.pcmEncoding);
    addIfNonDefault(dumper, "encoderDelay", format -> format.encoderDelay);
    addIfNonDefault(dumper, "encoderPadding", format -> format.encoderPadding);
    addIfNonDefault(dumper, "subsampleOffsetUs", format -> format.subsampleOffsetUs);
    addIfNonDefault(dumper, "selectionFlags", format -> format.selectionFlags);
    addIfNonDefault(dumper, "language", format -> format.language);
    addIfNonDefault(dumper, "label", format -> format.label);
    if (format.drmInitData != null) {
        dumper.add("drmInitData", format.drmInitData.hashCode());
    }
    addIfNonDefault(dumper, "metadata", format -> format.metadata);
    if (!format.initializationData.isEmpty()) {
        dumper.startBlock("initializationData");
        for (int i = 0; i < format.initializationData.size(); i++) {
            dumper.add("data", format.initializationData.get(i));
        }
        dumper.endBlock();
    }
    dumper.endBlock();
}
Also used : Nullable(androidx.annotation.Nullable) ColorInfo(androidx.media3.common.ColorInfo)

Aggregations

ColorInfo (androidx.media3.common.ColorInfo)6 Format (androidx.media3.common.Format)4 Nullable (androidx.annotation.Nullable)3 Test (org.junit.Test)3 ByteBuffer (java.nio.ByteBuffer)2 SuppressLint (android.annotation.SuppressLint)1 MediaFormat (android.media.MediaFormat)1 Bundle (android.os.Bundle)1 UnstableApi (androidx.media3.common.util.UnstableApi)1 AvcConfig (androidx.media3.extractor.AvcConfig)1 DolbyVisionConfig (androidx.media3.extractor.DolbyVisionConfig)1 HevcConfig (androidx.media3.extractor.HevcConfig)1 SeekPoint (androidx.media3.extractor.SeekPoint)1 FakeMetadataEntry (androidx.media3.test.utils.FakeMetadataEntry)1 ArrayList (java.util.ArrayList)1