use of com.google.android.exoplayer2.util.ParsableBitArray in project ExoPlayer by google.
the class ProjectionDecoder method parseMesh.
@Nullable
private static Mesh parseMesh(ParsableByteArray input) {
// Read the coordinates.
int coordinateCount = input.readInt();
if (coordinateCount > MAX_COORDINATE_COUNT) {
return null;
}
float[] coordinates = new float[coordinateCount];
for (int coordinate = 0; coordinate < coordinateCount; coordinate++) {
coordinates[coordinate] = input.readFloat();
}
// Read the vertices.
int vertexCount = input.readInt();
if (vertexCount > MAX_VERTEX_COUNT) {
return null;
}
final double log2 = Math.log(2.0);
int coordinateCountSizeBits = (int) Math.ceil(Math.log(2.0 * coordinateCount) / log2);
ParsableBitArray bitInput = new ParsableBitArray(input.getData());
bitInput.setPosition(input.getPosition() * 8);
float[] vertices = new float[vertexCount * 5];
int[] coordinateIndices = new int[5];
int vertexIndex = 0;
for (int vertex = 0; vertex < vertexCount; vertex++) {
for (int i = 0; i < 5; i++) {
int coordinateIndex = coordinateIndices[i] + decodeZigZag(bitInput.readBits(coordinateCountSizeBits));
if (coordinateIndex >= coordinateCount || coordinateIndex < 0) {
return null;
}
vertices[vertexIndex++] = coordinates[coordinateIndex];
coordinateIndices[i] = coordinateIndex;
}
}
// Pad to next byte boundary
bitInput.setPosition(((bitInput.getPosition() + 7) & ~7));
int subMeshCount = bitInput.readBits(32);
SubMesh[] subMeshes = new SubMesh[subMeshCount];
for (int i = 0; i < subMeshCount; i++) {
int textureId = bitInput.readBits(8);
int drawMode = bitInput.readBits(8);
int triangleIndexCount = bitInput.readBits(32);
if (triangleIndexCount > MAX_TRIANGLE_INDICES) {
return null;
}
int vertexCountSizeBits = (int) Math.ceil(Math.log(2.0 * vertexCount) / log2);
int index = 0;
float[] triangleVertices = new float[triangleIndexCount * 3];
float[] textureCoords = new float[triangleIndexCount * 2];
for (int counter = 0; counter < triangleIndexCount; counter++) {
index += decodeZigZag(bitInput.readBits(vertexCountSizeBits));
if (index < 0 || index >= vertexCount) {
return null;
}
triangleVertices[counter * 3] = vertices[index * 5];
triangleVertices[counter * 3 + 1] = vertices[index * 5 + 1];
triangleVertices[counter * 3 + 2] = vertices[index * 5 + 2];
textureCoords[counter * 2] = vertices[index * 5 + 3];
textureCoords[counter * 2 + 1] = vertices[index * 5 + 4];
}
subMeshes[i] = new SubMesh(textureId, triangleVertices, textureCoords, drawMode);
}
return new Mesh(subMeshes);
}
use of com.google.android.exoplayer2.util.ParsableBitArray in project ExoPlayer by google.
the class DtsUtil method parseDtsFormat.
/**
* Returns the DTS format given {@code data} containing the DTS frame according to ETSI TS 102 114
* subsections 5.3/5.4.
*
* @param frame The DTS frame to parse.
* @param trackId The track identifier to set on the format.
* @param language The language to set on the format.
* @param drmInitData {@link DrmInitData} to be included in the format.
* @return The DTS format parsed from data in the header.
*/
public static Format parseDtsFormat(byte[] frame, @Nullable String trackId, @Nullable String language, @Nullable DrmInitData drmInitData) {
ParsableBitArray frameBits = getNormalizedFrameHeader(frame);
// SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
frameBits.skipBits(32 + 1 + 5 + 1 + 7 + 14);
int amode = frameBits.readBits(6);
int channelCount = CHANNELS_BY_AMODE[amode];
int sfreq = frameBits.readBits(4);
int sampleRate = SAMPLE_RATE_BY_SFREQ[sfreq];
int rate = frameBits.readBits(5);
int bitrate = rate >= TWICE_BITRATE_KBPS_BY_RATE.length ? Format.NO_VALUE : TWICE_BITRATE_KBPS_BY_RATE[rate] * 1000 / 2;
// MIX, DYNF, TIMEF, AUXF, HDCD, EXT_AUDIO_ID, EXT_AUDIO, ASPF
frameBits.skipBits(10);
// LFF
channelCount += frameBits.readBits(2) > 0 ? 1 : 0;
return new Format.Builder().setId(trackId).setSampleMimeType(MimeTypes.AUDIO_DTS).setAverageBitrate(bitrate).setChannelCount(channelCount).setSampleRate(sampleRate).setDrmInitData(drmInitData).setLanguage(language).build();
}
use of com.google.android.exoplayer2.util.ParsableBitArray in project ExoPlayer by google.
the class H263Reader method parseCsdBuffer.
/**
* Parses a codec-specific data buffer, returning the {@link Format} of the media.
*
* @param csdBuffer The buffer to parse.
* @param volStartPosition The byte offset of the start of the video object layer in the buffer.
* @param formatId The ID for the generated format.
* @return The {@link Format} of the media represented in the buffer.
*/
private static Format parseCsdBuffer(CsdBuffer csdBuffer, int volStartPosition, String formatId) {
byte[] csdData = Arrays.copyOf(csdBuffer.data, csdBuffer.length);
ParsableBitArray buffer = new ParsableBitArray(csdData);
buffer.skipBytes(volStartPosition);
// Parse the video object layer defined in ISO 14496-2 (2001) subsection 6.2.3.
// video_object_layer_start_code
buffer.skipBytes(4);
// random_accessible_vol
buffer.skipBit();
// video_object_type_indication
buffer.skipBits(8);
if (buffer.readBit()) {
// is_object_layer_identifier
// video_object_layer_verid
buffer.skipBits(4);
// video_object_layer_priority
buffer.skipBits(3);
}
float pixelWidthHeightRatio;
int aspectRatioInfo = buffer.readBits(4);
if (aspectRatioInfo == 0x0F) {
// extended_PAR
int parWidth = buffer.readBits(8);
int parHeight = buffer.readBits(8);
if (parHeight == 0) {
Log.w(TAG, "Invalid aspect ratio");
pixelWidthHeightRatio = 1f;
} else {
pixelWidthHeightRatio = (float) parWidth / parHeight;
}
} else if (aspectRatioInfo < PIXEL_WIDTH_HEIGHT_RATIO_BY_ASPECT_RATIO_INFO.length) {
pixelWidthHeightRatio = PIXEL_WIDTH_HEIGHT_RATIO_BY_ASPECT_RATIO_INFO[aspectRatioInfo];
} else {
Log.w(TAG, "Invalid aspect ratio");
pixelWidthHeightRatio = 1f;
}
if (buffer.readBit()) {
// vol_control_parameters
// chroma_format
buffer.skipBits(2);
// low_delay
buffer.skipBits(1);
if (buffer.readBit()) {
// vbv_parameters
// first_half_bit_rate
buffer.skipBits(15);
// marker_bit
buffer.skipBit();
// latter_half_bit_rate
buffer.skipBits(15);
// marker_bit
buffer.skipBit();
// first_half_vbv_buffer_size
buffer.skipBits(15);
// marker_bit
buffer.skipBit();
// latter_half_vbv_buffer_size
buffer.skipBits(3);
// first_half_vbv_occupancy
buffer.skipBits(11);
// marker_bit
buffer.skipBit();
// latter_half_vbv_occupancy
buffer.skipBits(15);
// marker_bit
buffer.skipBit();
}
}
int videoObjectLayerShape = buffer.readBits(2);
if (videoObjectLayerShape != VIDEO_OBJECT_LAYER_SHAPE_RECTANGULAR) {
Log.w(TAG, "Unhandled video object layer shape");
}
// marker_bit
buffer.skipBit();
int vopTimeIncrementResolution = buffer.readBits(16);
// marker_bit
buffer.skipBit();
if (buffer.readBit()) {
// fixed_vop_rate
if (vopTimeIncrementResolution == 0) {
Log.w(TAG, "Invalid vop_increment_time_resolution");
} else {
vopTimeIncrementResolution--;
int numBits = 0;
while (vopTimeIncrementResolution > 0) {
++numBits;
vopTimeIncrementResolution >>= 1;
}
// fixed_vop_time_increment
buffer.skipBits(numBits);
}
}
// marker_bit
buffer.skipBit();
int videoObjectLayerWidth = buffer.readBits(13);
// marker_bit
buffer.skipBit();
int videoObjectLayerHeight = buffer.readBits(13);
// marker_bit
buffer.skipBit();
// interlaced
buffer.skipBit();
return new Format.Builder().setId(formatId).setSampleMimeType(MimeTypes.VIDEO_MP4V).setWidth(videoObjectLayerWidth).setHeight(videoObjectLayerHeight).setPixelWidthHeightRatio(pixelWidthHeightRatio).setInitializationData(Collections.singletonList(csdData)).build();
}
use of com.google.android.exoplayer2.util.ParsableBitArray in project ExoPlayer by google.
the class Id3Decoder method decodeMlltFrame.
private static MlltFrame decodeMlltFrame(ParsableByteArray id3Data, int frameSize) {
// See ID3v2.4.0 native frames subsection 4.6.
int mpegFramesBetweenReference = id3Data.readUnsignedShort();
int bytesBetweenReference = id3Data.readUnsignedInt24();
int millisecondsBetweenReference = id3Data.readUnsignedInt24();
int bitsForBytesDeviation = id3Data.readUnsignedByte();
int bitsForMillisecondsDeviation = id3Data.readUnsignedByte();
ParsableBitArray references = new ParsableBitArray();
references.reset(id3Data);
int referencesBits = 8 * (frameSize - 10);
int bitsPerReference = bitsForBytesDeviation + bitsForMillisecondsDeviation;
int referencesCount = referencesBits / bitsPerReference;
int[] bytesDeviations = new int[referencesCount];
int[] millisecondsDeviations = new int[referencesCount];
for (int i = 0; i < referencesCount; i++) {
int bytesDeviation = references.readBits(bitsForBytesDeviation);
int millisecondsDeviation = references.readBits(bitsForMillisecondsDeviation);
bytesDeviations[i] = bytesDeviation;
millisecondsDeviations[i] = millisecondsDeviation;
}
return new MlltFrame(mpegFramesBetweenReference, bytesBetweenReference, millisecondsBetweenReference, bytesDeviations, millisecondsDeviations);
}
use of com.google.android.exoplayer2.util.ParsableBitArray in project ExoPlayer by google.
the class DvbParser method decode.
/**
* Decodes a subtitling packet, returning a list of parsed {@link Cue}s.
*
* @param data The subtitling packet data to decode.
* @param limit The limit in {@code data} at which to stop decoding.
* @return The parsed {@link Cue}s.
*/
public List<Cue> decode(byte[] data, int limit) {
// Parse the input data.
ParsableBitArray dataBitArray = new ParsableBitArray(data, limit);
while (// sync_byte (8) + segment header (40)
dataBitArray.bitsLeft() >= 48 && dataBitArray.readBits(8) == 0x0F) {
parseSubtitlingSegment(dataBitArray, subtitleService);
}
@Nullable PageComposition pageComposition = subtitleService.pageComposition;
if (pageComposition == null) {
return Collections.emptyList();
}
// Update the canvas bitmap if necessary.
DisplayDefinition displayDefinition = subtitleService.displayDefinition != null ? subtitleService.displayDefinition : defaultDisplayDefinition;
if (bitmap == null || displayDefinition.width + 1 != bitmap.getWidth() || displayDefinition.height + 1 != bitmap.getHeight()) {
bitmap = Bitmap.createBitmap(displayDefinition.width + 1, displayDefinition.height + 1, Bitmap.Config.ARGB_8888);
canvas.setBitmap(bitmap);
}
// Build the cues.
List<Cue> cues = new ArrayList<>();
SparseArray<PageRegion> pageRegions = pageComposition.regions;
for (int i = 0; i < pageRegions.size(); i++) {
// Save clean clipping state.
canvas.save();
PageRegion pageRegion = pageRegions.valueAt(i);
int regionId = pageRegions.keyAt(i);
RegionComposition regionComposition = subtitleService.regions.get(regionId);
// Clip drawing to the current region and display definition window.
int baseHorizontalAddress = pageRegion.horizontalAddress + displayDefinition.horizontalPositionMinimum;
int baseVerticalAddress = pageRegion.verticalAddress + displayDefinition.verticalPositionMinimum;
int clipRight = min(baseHorizontalAddress + regionComposition.width, displayDefinition.horizontalPositionMaximum);
int clipBottom = min(baseVerticalAddress + regionComposition.height, displayDefinition.verticalPositionMaximum);
canvas.clipRect(baseHorizontalAddress, baseVerticalAddress, clipRight, clipBottom);
ClutDefinition clutDefinition = subtitleService.cluts.get(regionComposition.clutId);
if (clutDefinition == null) {
clutDefinition = subtitleService.ancillaryCluts.get(regionComposition.clutId);
if (clutDefinition == null) {
clutDefinition = defaultClutDefinition;
}
}
SparseArray<RegionObject> regionObjects = regionComposition.regionObjects;
for (int j = 0; j < regionObjects.size(); j++) {
int objectId = regionObjects.keyAt(j);
RegionObject regionObject = regionObjects.valueAt(j);
ObjectData objectData = subtitleService.objects.get(objectId);
if (objectData == null) {
objectData = subtitleService.ancillaryObjects.get(objectId);
}
if (objectData != null) {
@Nullable Paint paint = objectData.nonModifyingColorFlag ? null : defaultPaint;
paintPixelDataSubBlocks(objectData, clutDefinition, regionComposition.depth, baseHorizontalAddress + regionObject.horizontalPosition, baseVerticalAddress + regionObject.verticalPosition, paint, canvas);
}
}
if (regionComposition.fillFlag) {
int color;
if (regionComposition.depth == REGION_DEPTH_8_BIT) {
color = clutDefinition.clutEntries8Bit[regionComposition.pixelCode8Bit];
} else if (regionComposition.depth == REGION_DEPTH_4_BIT) {
color = clutDefinition.clutEntries4Bit[regionComposition.pixelCode4Bit];
} else {
color = clutDefinition.clutEntries2Bit[regionComposition.pixelCode2Bit];
}
fillRegionPaint.setColor(color);
canvas.drawRect(baseHorizontalAddress, baseVerticalAddress, baseHorizontalAddress + regionComposition.width, baseVerticalAddress + regionComposition.height, fillRegionPaint);
}
cues.add(new Cue.Builder().setBitmap(Bitmap.createBitmap(bitmap, baseHorizontalAddress, baseVerticalAddress, regionComposition.width, regionComposition.height)).setPosition((float) baseHorizontalAddress / displayDefinition.width).setPositionAnchor(Cue.ANCHOR_TYPE_START).setLine((float) baseVerticalAddress / displayDefinition.height, Cue.LINE_TYPE_FRACTION).setLineAnchor(Cue.ANCHOR_TYPE_START).setSize((float) regionComposition.width / displayDefinition.width).setBitmapHeight((float) regionComposition.height / displayDefinition.height).build());
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
// Restore clean clipping state.
canvas.restore();
}
return Collections.unmodifiableList(cues);
}
Aggregations