use of org.geotoolkit.wps.xml.v200.Format in project ExoPlayer by google.
the class SampleQueue method format.
// TrackOutput implementation. Called by the loading thread.
@Override
public final void format(Format unadjustedUpstreamFormat) {
Format adjustedUpstreamFormat = getAdjustedUpstreamFormat(unadjustedUpstreamFormat);
upstreamFormatAdjustmentRequired = false;
this.unadjustedUpstreamFormat = unadjustedUpstreamFormat;
boolean upstreamFormatChanged = setUpstreamFormat(adjustedUpstreamFormat);
if (upstreamFormatChangeListener != null && upstreamFormatChanged) {
upstreamFormatChangeListener.onUpstreamFormatChanged(adjustedUpstreamFormat);
}
}
use of org.geotoolkit.wps.xml.v200.Format in project ExoPlayer by google.
the class SampleQueue method peekSampleMetadata.
// See comments in setUpstreamFormat
@SuppressWarnings("ReferenceEquality")
private synchronized int peekSampleMetadata(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired, boolean loadingFinished, SampleExtrasHolder extrasHolder) {
buffer.waitingForKeys = false;
if (!hasNextSample()) {
if (loadingFinished || isLastSampleQueued) {
buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
return C.RESULT_BUFFER_READ;
} else if (upstreamFormat != null && (formatRequired || upstreamFormat != downstreamFormat)) {
onFormatResult(Assertions.checkNotNull(upstreamFormat), formatHolder);
return C.RESULT_FORMAT_READ;
} else {
return C.RESULT_NOTHING_READ;
}
}
Format format = sharedSampleMetadata.get(getReadIndex()).format;
if (formatRequired || format != downstreamFormat) {
onFormatResult(format, formatHolder);
return C.RESULT_FORMAT_READ;
}
int relativeReadIndex = getRelativeIndex(readPosition);
if (!mayReadSample(relativeReadIndex)) {
buffer.waitingForKeys = true;
return C.RESULT_NOTHING_READ;
}
buffer.setFlags(flags[relativeReadIndex]);
buffer.timeUs = timesUs[relativeReadIndex];
if (buffer.timeUs < startTimeUs) {
buffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
}
extrasHolder.size = sizes[relativeReadIndex];
extrasHolder.offset = offsets[relativeReadIndex];
extrasHolder.cryptoData = cryptoDatas[relativeReadIndex];
return C.RESULT_BUFFER_READ;
}
use of org.geotoolkit.wps.xml.v200.Format in project ExoPlayer by google.
the class AdaptiveTrackSelection method evaluateQueueSize.
@Override
public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
long nowMs = clock.elapsedRealtime();
if (!shouldEvaluateQueueSize(nowMs, queue)) {
return queue.size();
}
lastBufferEvaluationMs = nowMs;
lastBufferEvaluationMediaChunk = queue.isEmpty() ? null : Iterables.getLast(queue);
if (queue.isEmpty()) {
return 0;
}
int queueSize = queue.size();
MediaChunk lastChunk = queue.get(queueSize - 1);
long playoutBufferedDurationBeforeLastChunkUs = Util.getPlayoutDurationForMediaDuration(lastChunk.startTimeUs - playbackPositionUs, playbackSpeed);
long minDurationToRetainAfterDiscardUs = getMinDurationToRetainAfterDiscardUs();
if (playoutBufferedDurationBeforeLastChunkUs < minDurationToRetainAfterDiscardUs) {
return queueSize;
}
int idealSelectedIndex = determineIdealSelectedIndex(nowMs, getLastChunkDurationUs(queue));
Format idealFormat = getFormat(idealSelectedIndex);
// are less than or equal to maxWidthToDiscard and maxHeightToDiscard respectively.
for (int i = 0; i < queueSize; i++) {
MediaChunk chunk = queue.get(i);
Format format = chunk.trackFormat;
long mediaDurationBeforeThisChunkUs = chunk.startTimeUs - playbackPositionUs;
long playoutDurationBeforeThisChunkUs = Util.getPlayoutDurationForMediaDuration(mediaDurationBeforeThisChunkUs, playbackSpeed);
if (playoutDurationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs && format.bitrate < idealFormat.bitrate && format.height != Format.NO_VALUE && format.height <= maxHeightToDiscard && format.width != Format.NO_VALUE && format.width <= maxWidthToDiscard && format.height < idealFormat.height) {
return i;
}
}
return queueSize;
}
use of org.geotoolkit.wps.xml.v200.Format in project ExoPlayer by google.
the class TextRenderer method render.
@Override
public void render(long positionUs, long elapsedRealtimeUs) {
if (isCurrentStreamFinal() && finalStreamEndPositionUs != C.TIME_UNSET && positionUs >= finalStreamEndPositionUs) {
releaseBuffers();
outputStreamEnded = true;
}
if (outputStreamEnded) {
return;
}
if (nextSubtitle == null) {
checkNotNull(decoder).setPositionUs(positionUs);
try {
nextSubtitle = checkNotNull(decoder).dequeueOutputBuffer();
} catch (SubtitleDecoderException e) {
handleDecoderError(e);
return;
}
}
if (getState() != STATE_STARTED) {
return;
}
boolean textRendererNeedsUpdate = false;
if (subtitle != null) {
// We're iterating through the events in a subtitle. Set textRendererNeedsUpdate if we
// advance to the next event.
long subtitleNextEventTimeUs = getNextEventTime();
while (subtitleNextEventTimeUs <= positionUs) {
nextSubtitleEventIndex++;
subtitleNextEventTimeUs = getNextEventTime();
textRendererNeedsUpdate = true;
}
}
if (nextSubtitle != null) {
SubtitleOutputBuffer nextSubtitle = this.nextSubtitle;
if (nextSubtitle.isEndOfStream()) {
if (!textRendererNeedsUpdate && getNextEventTime() == Long.MAX_VALUE) {
if (decoderReplacementState == REPLACEMENT_STATE_WAIT_END_OF_STREAM) {
replaceDecoder();
} else {
releaseBuffers();
outputStreamEnded = true;
}
}
} else if (nextSubtitle.timeUs <= positionUs) {
// Advance to the next subtitle. Sync the next event index and trigger an update.
if (subtitle != null) {
subtitle.release();
}
nextSubtitleEventIndex = nextSubtitle.getNextEventTimeIndex(positionUs);
subtitle = nextSubtitle;
this.nextSubtitle = null;
textRendererNeedsUpdate = true;
}
}
if (textRendererNeedsUpdate) {
// If textRendererNeedsUpdate then subtitle must be non-null.
checkNotNull(subtitle);
// textRendererNeedsUpdate is set and we're playing. Update the renderer.
updateOutput(subtitle.getCues(positionUs));
}
if (decoderReplacementState == REPLACEMENT_STATE_WAIT_END_OF_STREAM) {
return;
}
try {
while (!inputStreamEnded) {
@Nullable SubtitleInputBuffer nextInputBuffer = this.nextInputBuffer;
if (nextInputBuffer == null) {
nextInputBuffer = checkNotNull(decoder).dequeueInputBuffer();
if (nextInputBuffer == null) {
return;
}
this.nextInputBuffer = nextInputBuffer;
}
if (decoderReplacementState == REPLACEMENT_STATE_SIGNAL_END_OF_STREAM) {
nextInputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
checkNotNull(decoder).queueInputBuffer(nextInputBuffer);
this.nextInputBuffer = null;
decoderReplacementState = REPLACEMENT_STATE_WAIT_END_OF_STREAM;
return;
}
// Try and read the next subtitle from the source.
@ReadDataResult int result = readSource(formatHolder, nextInputBuffer, /* readFlags= */
0);
if (result == C.RESULT_BUFFER_READ) {
if (nextInputBuffer.isEndOfStream()) {
inputStreamEnded = true;
waitingForKeyFrame = false;
} else {
@Nullable Format format = formatHolder.format;
if (format == null) {
// We haven't received a format yet.
return;
}
nextInputBuffer.subsampleOffsetUs = format.subsampleOffsetUs;
nextInputBuffer.flip();
waitingForKeyFrame &= !nextInputBuffer.isKeyFrame();
}
if (!waitingForKeyFrame) {
checkNotNull(decoder).queueInputBuffer(nextInputBuffer);
this.nextInputBuffer = null;
}
} else if (result == C.RESULT_NOTHING_READ) {
return;
}
}
} catch (SubtitleDecoderException e) {
handleDecoderError(e);
}
}
use of org.geotoolkit.wps.xml.v200.Format in project ExoPlayer by google.
the class MediaCodecVideoRenderer method getCodecMaxValues.
/**
* Returns {@link CodecMaxValues} suitable for configuring a codec for {@code format} in a way
* that will allow possible adaptation to other compatible formats in {@code streamFormats}.
*
* @param codecInfo Information about the {@link MediaCodec} being configured.
* @param format The {@link Format} for which the codec is being configured.
* @param streamFormats The possible stream formats.
* @return Suitable {@link CodecMaxValues}.
*/
protected CodecMaxValues getCodecMaxValues(MediaCodecInfo codecInfo, Format format, Format[] streamFormats) {
int maxWidth = format.width;
int maxHeight = format.height;
int maxInputSize = getMaxInputSize(codecInfo, format);
if (streamFormats.length == 1) {
// being configured.
if (maxInputSize != Format.NO_VALUE) {
int codecMaxInputSize = getCodecMaxInputSize(codecInfo, format);
if (codecMaxInputSize != Format.NO_VALUE) {
// Scale up the initial video decoder maximum input size so playlist item transitions with
// small increases in maximum sample size don't require reinitialization. This only makes
// a difference if the exact maximum sample sizes are known from the container.
int scaledMaxInputSize = (int) (maxInputSize * INITIAL_FORMAT_MAX_INPUT_SIZE_SCALE_FACTOR);
// Avoid exceeding the maximum expected for the codec.
maxInputSize = min(scaledMaxInputSize, codecMaxInputSize);
}
}
return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
}
boolean haveUnknownDimensions = false;
for (Format streamFormat : streamFormats) {
if (format.colorInfo != null && streamFormat.colorInfo == null) {
// streamFormat likely has incomplete color information. Copy the complete color information
// from format to avoid codec re-use being ruled out for only this reason.
streamFormat = streamFormat.buildUpon().setColorInfo(format.colorInfo).build();
}
if (codecInfo.canReuseCodec(format, streamFormat).result != REUSE_RESULT_NO) {
haveUnknownDimensions |= (streamFormat.width == Format.NO_VALUE || streamFormat.height == Format.NO_VALUE);
maxWidth = max(maxWidth, streamFormat.width);
maxHeight = max(maxHeight, streamFormat.height);
maxInputSize = max(maxInputSize, getMaxInputSize(codecInfo, streamFormat));
}
}
if (haveUnknownDimensions) {
Log.w(TAG, "Resolutions unknown. Codec max resolution: " + maxWidth + "x" + maxHeight);
Point codecMaxSize = getCodecMaxSize(codecInfo, format);
if (codecMaxSize != null) {
maxWidth = max(maxWidth, codecMaxSize.x);
maxHeight = max(maxHeight, codecMaxSize.y);
maxInputSize = max(maxInputSize, getCodecMaxInputSize(codecInfo, format.buildUpon().setWidth(maxWidth).setHeight(maxHeight).build()));
Log.w(TAG, "Codec max resolution adjusted to: " + maxWidth + "x" + maxHeight);
}
}
return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
}
Aggregations