use of android.media.MediaFormat in project LanSoEditor_common by LanSoSdk.
the class AudioEncodeDecode method encodePcmData.
/**
* 把原始的音频pcm数据,压缩成aac格式的数据,一直阻塞,只到处理完毕.
* 注意:如果时长过大,则可能出问题,因为内部分配一个大buffer来接收所有压缩好的字节数
*
* @param srcPath
* @param dstPath
* @param startTime 单位秒
* @param endTime
* @param sampleRate 测试用44100,根据pcm之前的参数数据而定
* @param channels 测试用2, 根据pcm之前的参数而定.
* @param bitRate 测试用的是 64000
* @return 成功返回1,失败则返回-1;
*/
public static int encodePcmData(String srcPath, String dstPath, float startTime, float endTime, int sampleRate, int channels, int bitRate) {
File tmpFile = new File(srcPath);
if (tmpFile.exists() == false)
return -1;
// 开始的偏移字节 等于 开始时间*采样率*2 * 通道数.
int startOffset = (int) (startTime * sampleRate) * 2 * channels;
// 总共多少个采样点,时间单位是秒. 这个时间段的采样点等于 时间段*采样率
int numSamples = (int) ((endTime - startTime) * sampleRate);
// Some devices have problems reading mono AAC files (e.g. Samsung S3). Making it stereo.
// 一定要用立体声,因为一些不支持单声道.
int numChannels = (channels == 1) ? 2 : channels;
String mimeType = "audio/mp4a-latm";
// bitrate等于 每个通道的bitrate*总通道数
// rule of thumb for a good quality: 64kbps per channel. /<---每个通道是64000的比特率
int bitrate = bitRate * numChannels;
// Get an estimation of the encoded data based on the bitrate. Add 10% to it.
int estimatedEncodedSize = (int) ((endTime - startTime) * (bitrate / 8) * 1.1);
// 一次性分配所有编码后的字节需要的大小,暂时不清楚,如果分配失败,会怎样.????
ByteBuffer encodedBytes = ByteBuffer.allocate(estimatedEncodedSize);
MediaCodec codec = null;
try {
codec = MediaCodec.createEncoderByType(mimeType);
MediaFormat format = MediaFormat.createAudioFormat(mimeType, sampleRate, numChannels);
// 设置bitrate.
format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
} catch (IOException e1) {
// TODO Auto-generated catch block
// e1.printStackTrace();
Log.e("lansoeidtor", e1.toString());
return -1;
}
// <----这里可能调整.
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean done_reading = false;
long presentation_time = 0;
// 每次取1024个采样点.
// number of samples per frame per channel for an mp4 (AAC) stream.
int frame_size = 1024;
// a sample is coded with a short.因为单位是short.
byte[] buffer = new byte[frame_size * numChannels * 2];
// 总的采样点是 增加两帧
// Adding 2 frames, Cf. priming frames for AAC.
numSamples += (2 * frame_size);
// 总帧数,
// first AAC frame = 2 bytes
int tot_num_frames = 1 + (numSamples / frame_size);
if (numSamples % frame_size != 0) {
tot_num_frames++;
}
int[] frame_sizes = new int[tot_num_frames];
int num_out_frames = 0;
int num_frames = 0;
int num_samples_left = numSamples;
// size of the output buffer containing the encoded samples.
int encodedSamplesSize = 0;
byte[] encodedSamples = null;
InputStream is = null;
// TODO Auto-generated method stub
try {
is = new FileInputStream(srcPath);
} catch (FileNotFoundException e) {
// e.printStackTrace();
Log.e("lansongeditor", e.toString());
return -1;
}
try {
is.skip(startOffset);
} catch (IOException e1) {
// e1.printStackTrace();
try {
is.close();
} catch (IOException e) {
// TODO Auto-generated catch block
Log.e("lansongeditor", e.toString());
}
Log.e("lansongeditor", e1.toString());
return -1;
}
while (true) {
// step1://把采样点填入到编码器中.
// Feed the samples to the encoder.
int inputBufferIndex = codec.dequeueInputBuffer(100);
if (!done_reading && inputBufferIndex >= 0) {
if (num_samples_left <= 0) {
// All samples have been read.
codec.queueInputBuffer(inputBufferIndex, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
done_reading = true;
} else {
inputBuffers[inputBufferIndex].clear();
if (buffer.length > inputBuffers[inputBufferIndex].remaining()) {
// Input buffer is smaller than one frame. This should never happen.
continue;
}
// bufferSize is a hack to create a stereo file from a mono stream.
int bufferSize = (channels == 1) ? (buffer.length / 2) : buffer.length;
// 获取数据.
int getsize = 0;
try {
getsize = is.read(buffer, 0, bufferSize);
// 如果最后的数据不足一帧,则最后的部分填0
if (getsize < bufferSize) {
for (int i = getsize; i < bufferSize; i++) {
// pad with extra 0s to make a full frame.
buffer[i] = 0;
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
// e.printStackTrace();
Log.e("lansongeditor", e.toString());
}
if (channels == 1) {
for (int i = bufferSize - 1; i >= 1; i -= 2) {
buffer[2 * i + 1] = buffer[i];
buffer[2 * i] = buffer[i - 1];
buffer[2 * i - 1] = buffer[2 * i + 1];
buffer[2 * i - 2] = buffer[2 * i];
}
}
num_samples_left -= frame_size;
// <-------------放入到inputbuffer中.
inputBuffers[inputBufferIndex].put(buffer);
presentation_time = (long) (((num_frames++) * frame_size * 1e6) / sampleRate);
codec.queueInputBuffer(inputBufferIndex, 0, buffer.length, presentation_time, 0);
}
}
// step2:从编码器中获取数据.
// Get the encoded samples from the encoder.
int outputBufferIndex = codec.dequeueOutputBuffer(info, 100);
if (outputBufferIndex >= 0 && info.size > 0 && info.presentationTimeUs >= 0) {
if (num_out_frames < frame_sizes.length) {
frame_sizes[num_out_frames++] = info.size;
}
if (encodedSamplesSize < info.size) {
encodedSamplesSize = info.size;
encodedSamples = new byte[encodedSamplesSize];
}
outputBuffers[outputBufferIndex].get(encodedSamples, 0, info.size);
outputBuffers[outputBufferIndex].clear();
// release
codec.releaseOutputBuffer(outputBufferIndex, false);
if (encodedBytes.remaining() < info.size) {
// Hopefully this should not happen.
// Add 20%.
estimatedEncodedSize = (int) (estimatedEncodedSize * 1.2);
ByteBuffer newEncodedBytes = ByteBuffer.allocate(estimatedEncodedSize);
int position = encodedBytes.position();
encodedBytes.rewind();
newEncodedBytes.put(encodedBytes);
encodedBytes = newEncodedBytes;
encodedBytes.position(position);
}
encodedBytes.put(encodedSamples, 0, info.size);
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
// We could check that codec.getOutputFormat(), which is the new output format,
// is what we expect.
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// We got all the encoded data from the encoder.
break;
}
}
// end of encoder
if (is != null) {
try {
is.close();
is = null;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
int encoded_size = encodedBytes.position();
encodedBytes.rewind();
codec.stop();
codec.release();
codec = null;
// Write the encoded stream to the file, 4kB at a time.
buffer = new byte[4096];
try {
// 先写头
FileOutputStream outputStream = new FileOutputStream(dstPath);
outputStream.write(MP4Header.getMP4Header(sampleRate, numChannels, frame_sizes, bitrate));
// 再写数据.
while (encoded_size - encodedBytes.position() > buffer.length) {
encodedBytes.get(buffer);
outputStream.write(buffer);
}
int remaining = encoded_size - encodedBytes.position();
if (remaining > 0) {
encodedBytes.get(buffer, 0, remaining);
outputStream.write(buffer, 0, remaining);
}
// 完成 close
outputStream.close();
} catch (IOException e) {
Log.e("Ringdroid", "Failed to create the .m4a file.");
StringWriter writer = new StringWriter();
e.printStackTrace(new PrintWriter(writer));
Log.e("Ringdroid", writer.toString());
return -1;
}
return 0;
}
use of android.media.MediaFormat in project EasyPlayer-RTMP-Android by EasyDSS.
the class EasyPlayerClient method startCodec.
private void startCodec() {
final int delayUS = PreferenceManager.getDefaultSharedPreferences(mContext).getInt("delayUs", 0);
mThread = new Thread("VIDEO_CONSUMER") {
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
MediaCodec mCodec = null;
VideoCodec.VideoDecoderLite mDecoder = null;
try {
boolean pushBlankBuffersOnStop = true;
int index = 0;
long previewStampUs = 0l;
long previewTickUs = 0l;
long differ = 0;
long previewStampUs1 = 0;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (mThread != null) {
Client.FrameInfo frameInfo;
if (mCodec == null && mDecoder == null) {
frameInfo = mQueue.takeVideoFrame();
try {
if (PreferenceManager.getDefaultSharedPreferences(mContext).getBoolean("use-sw-codec", false) || i420callback != null) {
throw new IllegalStateException("user set sw codec");
}
final String mime = frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264 ? "video/avc" : "video/hevc";
MediaFormat format = MediaFormat.createVideoFormat(mime, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
format.setInteger(MediaFormat.KEY_PUSH_BLANK_BUFFERS_ON_STOP, pushBlankBuffersOnStop ? 1 : 0);
if (mCSD0 != null) {
format.setByteBuffer("csd-0", mCSD0);
} else {
throw new InvalidParameterException("csd-0 is invalid.");
}
if (mCSD1 != null) {
format.setByteBuffer("csd-1", mCSD1);
} else {
if (frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264)
throw new InvalidParameterException("csd-1 is invalid.");
}
MediaCodec codec = MediaCodec.createDecoderByType(mime);
Log.i(TAG, String.format("config codec:%s", format));
codec.configure(format, mSurface, null, 0);
codec.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
codec.start();
mCodec = codec;
} catch (Throwable e) {
Log.e(TAG, String.format("init codec error due to %s", e.getMessage()));
e.printStackTrace();
final VideoCodec.VideoDecoderLite decoder = new VideoCodec.VideoDecoderLite();
decoder.create(mSurface, frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264);
mDecoder = decoder;
}
// previewTickUs = mTexture.getTimestamp();
// differ = previewTickUs - frameInfo.stamp;
// index = mCodec.dequeueInputBuffer(0);
// if (index >= 0) {
// ByteBuffer buffer = mCodec.getInputBuffers()[index];
// buffer.clear();
// mCSD0.clear();
// mCSD1.clear();
// buffer.put(mCSD0.array(), 0, mCSD0.remaining());
// buffer.put(mCSD1.array(), 0, mCSD1.remaining());
// mCodec.queueInputBuffer(index, 0, buffer.position(), 0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
// }
} else {
frameInfo = mQueue.takeVideoFrame(5);
}
if (frameInfo != null) {
Log.d(TAG, "video " + frameInfo.stamp + " take[" + (frameInfo.stamp - previewStampUs1) + "]");
previewStampUs1 = frameInfo.stamp;
pumpVideoSample(frameInfo);
}
if (mDecoder != null) {
if (frameInfo != null) {
long decodeBegin = System.currentTimeMillis();
int[] size = new int[2];
// mDecoder.decodeFrame(frameInfo, size);
ByteBuffer buf = mDecoder.decodeFrameYUV(frameInfo, size);
if (i420callback != null && buf != null)
i420callback.onI420Data(buf);
if (buf != null)
mDecoder.releaseBuffer(buf);
long decodeSpend = System.currentTimeMillis() - decodeBegin;
boolean firstFrame = previewStampUs == 0l;
if (firstFrame) {
Log.i(TAG, String.format("POST VIDEO_DISPLAYED!!!"));
ResultReceiver rr = mRR;
if (rr != null)
rr.send(RESULT_VIDEO_DISPLAYED, null);
}
long current = frameInfo.stamp;
if (previewStampUs != 0l) {
long sleepTime = current - previewStampUs - decodeSpend * 1000;
if (sleepTime > 50000) {
Log.w(TAG, "sleep time.too long:" + sleepTime);
sleepTime = 50000;
}
if (sleepTime > 0) {
sleepTime %= 100000;
long cache = mNewestStample - frameInfo.stamp;
sleepTime = fixSleepTime(sleepTime, cache, 100000);
if (sleepTime > 0) {
Thread.sleep(sleepTime / 1000);
}
Log.d(TAG, "cache:" + cache);
}
}
previewStampUs = current;
}
} else {
do {
if (frameInfo != null) {
byte[] pBuf = frameInfo.buffer;
index = mCodec.dequeueInputBuffer(10);
if (index >= 0) {
ByteBuffer buffer = mCodec.getInputBuffers()[index];
buffer.clear();
if (pBuf.length > buffer.remaining()) {
mCodec.queueInputBuffer(index, 0, 0, frameInfo.stamp, 0);
} else {
buffer.put(pBuf, frameInfo.offset, frameInfo.length);
mCodec.queueInputBuffer(index, 0, buffer.position(), frameInfo.stamp + differ, 0);
}
frameInfo = null;
}
}
//
index = mCodec.dequeueOutputBuffer(info, 10);
switch(index) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.i(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat mf = mCodec.getOutputFormat();
Log.i(TAG, "INFO_OUTPUT_FORMAT_CHANGED :" + mf);
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// 输出为空
break;
default:
// 输出队列不为空
// -1表示为第一帧数据
long newSleepUs = -1;
boolean firstTime = previewStampUs == 0l;
if (!firstTime) {
long sleepUs = (info.presentationTimeUs - previewStampUs);
if (sleepUs > 50000) {
// 时间戳异常,可能服务器丢帧了。
Log.w(TAG, "sleep time.too long:" + sleepUs);
sleepUs = 50000;
}
{
long cache = mNewestStample - previewStampUs;
newSleepUs = fixSleepTime(sleepUs, cache, 0);
// Log.d(TAG, String.format("sleepUs:%d,newSleepUs:%d,Cache:%d", sleepUs, newSleepUs, cache));
Log.d(TAG, "cache:" + cache);
}
}
previewStampUs = info.presentationTimeUs;
if (false && Build.VERSION.SDK_INT >= 21) {
Log.d(TAG, String.format("releaseoutputbuffer:%d,stampUs:%d", index, previewStampUs));
mCodec.releaseOutputBuffer(index, previewStampUs);
} else {
if (newSleepUs < 0) {
newSleepUs = 0;
}
// Log.i(TAG,String.format("sleep:%d", newSleepUs/1000));
Thread.sleep(newSleepUs / 1000);
mCodec.releaseOutputBuffer(index, true);
}
if (firstTime) {
Log.i(TAG, String.format("POST VIDEO_DISPLAYED!!!"));
ResultReceiver rr = mRR;
if (rr != null)
rr.send(RESULT_VIDEO_DISPLAYED, null);
}
}
} while (frameInfo != null || index < MediaCodec.INFO_TRY_AGAIN_LATER);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (mCodec != null) {
// mCodec.stop();
mCodec.release();
}
if (mDecoder != null) {
mDecoder.close();
}
}
}
};
mThread.start();
}
use of android.media.MediaFormat in project EasyPlayer-RTMP-Android by EasyDSS.
the class EasyAACMuxer method pumpPCMStream.
public synchronized void pumpPCMStream(byte[] pcm, int length, long timeUs) throws IOException {
if (mMediaCodec == null) {
// 启动AAC编码器。这里用MediaCodec来编码
if (mAudioFormat == null)
return;
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
Log.i(TAG, String.valueOf(mAudioFormat));
mAudioFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 16000);
// mAudioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 320);
mMediaCodec.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
mBuffers = mMediaCodec.getOutputBuffers();
}
int index = 0;
// 将pcm编码成AAC
do {
index = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 1000);
if (index >= 0) {
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
continue;
}
if (mBufferInfo.presentationTimeUs == 0) {
continue;
}
if (VERBOSE)
Log.d(TAG, String.format("dequeueOutputBuffer data length:%d,tmUS:%d", mBufferInfo.size, mBufferInfo.presentationTimeUs));
ByteBuffer outputBuffer = mBuffers[index];
// ok,编码成功了。将AAC数据写入muxer.
pumpStream(outputBuffer, mBufferInfo, false);
mMediaCodec.releaseOutputBuffer(index, false);
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mBuffers = mMediaCodec.getOutputBuffers();
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.v(TAG, "output format changed...");
MediaFormat newFormat = mMediaCodec.getOutputFormat();
Log.v(TAG, "output format changed..." + newFormat);
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.v(TAG, "No buffer available...");
} else {
Log.e(TAG, "Message: " + index);
}
} while (index >= 0 && !Thread.currentThread().isInterrupted());
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
do {
index = mMediaCodec.dequeueInputBuffer(1000);
if (index >= 0) {
inputBuffers[index].clear();
inputBuffers[index].put(pcm, 0, length);
if (VERBOSE)
Log.d(TAG, String.format("queueInputBuffer pcm data length:%d,tmUS:%d", length, timeUs));
mMediaCodec.queueInputBuffer(index, 0, length, timeUs, 0);
}
} while (!Thread.currentThread().isInterrupted() && index < 0);
}
use of android.media.MediaFormat in project Bebop-Ui-Test by orhuncng.
the class H264VideoView method configureMediaCodec.
private void configureMediaCodec() {
mMediaCodec.stop();
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
format.setByteBuffer("csd-0", mSpsBuffer);
format.setByteBuffer("csd-1", mPpsBuffer);
mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
mMediaCodec.start();
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
mBuffers = mMediaCodec.getInputBuffers();
}
mIsCodecConfigured = true;
}
use of android.media.MediaFormat in project grafika by google.
the class MoviePlayer method doExtract.
/**
* Work loop. We execute here until we run out of video or are told to stop.
*/
private void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder, FrameCallback frameCallback) {
// We need to strike a balance between providing input and reading output that
// operates efficiently without delays on the output side.
//
// To avoid delays on the output side, we need to keep the codec's input buffers
// fed. There can be significant latency between submitting frame N to the decoder
// and receiving frame N on the output, so we need to stay ahead of the game.
//
// Many video decoders seem to want several frames of video before they start
// producing output -- one implementation wanted four before it appeared to
// configure itself. We need to provide a bunch of input frames up front, and try
// to keep the queue full as we go.
//
// (Note it's possible for the encoded data to be written to the stream out of order,
// so we can't generally submit a single frame and wait for it to appear.)
//
// We can't just fixate on the input side though. If we spend too much time trying
// to stuff the input, we might miss a presentation deadline. At 60Hz we have 16.7ms
// between frames, so sleeping for 10ms would eat up a significant fraction of the
// time allowed. (Most video is at 30Hz or less, so for most content we'll have
// significantly longer.) Waiting for output is okay, but sleeping on availability
// of input buffers is unwise if we need to be providing output on a regular schedule.
//
//
// In some situations, startup latency may be a concern. To minimize startup time,
// we'd want to stuff the input full as quickly as possible. This turns out to be
// somewhat complicated, as the codec may still be starting up and will refuse to
// accept input. Removing the timeout from dequeueInputBuffer() results in spinning
// on the CPU.
//
// If you have tight startup latency requirements, it would probably be best to
// "prime the pump" with a sequence of frames that aren't actually shown (e.g.
// grab the first 10 NAL units and shove them through, then rewind to the start of
// the first key frame).
//
// The actual latency seems to depend on strongly on the nature of the video (e.g.
// resolution).
//
//
// One conceptually nice approach is to loop on the input side to ensure that the codec
// always has all the input it can handle. After submitting a buffer, we immediately
// check to see if it will accept another. We can use a short timeout so we don't
// miss a presentation deadline. On the output side we only check once, with a longer
// timeout, then return to the outer loop to see if the codec is hungry for more input.
//
// In practice, every call to check for available buffers involves a lot of message-
// passing between threads and processes. Setting a very brief timeout doesn't
// exactly work because the overhead required to determine that no buffer is available
// is substantial. On one device, the "clever" approach caused significantly greater
// and more highly variable startup latency.
//
// The code below takes a very simple-minded approach that works, but carries a risk
// of occasionally running out of output. A more sophisticated approach might
// detect an output timeout and use that as a signal to try to enqueue several input
// buffers on the next iteration.
//
// If you want to experiment, set the VERBOSE flag to true and watch the behavior
// in logcat. Use "logcat -v threadtime" to see sub-second timing.
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
int inputChunk = 0;
long firstInputTimeNsec = -1;
boolean outputDone = false;
boolean inputDone = false;
while (!outputDone) {
if (VERBOSE)
Log.d(TAG, "loop");
if (mIsStopRequested) {
Log.d(TAG, "Stop requested");
return;
}
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
if (firstInputTimeNsec == -1) {
firstInputTimeNsec = System.nanoTime();
}
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE)
Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, presentationTimeUs, 0);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" + chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE)
Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE)
Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE)
Log.d(TAG, "decoder output buffers changed");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
if (VERBOSE)
Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
throw new RuntimeException("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else {
// decoderStatus >= 0
if (firstInputTimeNsec != 0) {
// Log the delay from the first buffer of input to the first buffer
// of output.
long nowNsec = System.nanoTime();
Log.d(TAG, "startup lag " + ((nowNsec - firstInputTimeNsec) / 1000000.0) + " ms");
firstInputTimeNsec = 0;
}
boolean doLoop = false;
if (VERBOSE)
Log.d(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + mBufferInfo.size + ")");
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE)
Log.d(TAG, "output EOS");
if (mLoop) {
doLoop = true;
} else {
outputDone = true;
}
}
boolean doRender = (mBufferInfo.size != 0);
// the buffers.
if (doRender && frameCallback != null) {
frameCallback.preRender(mBufferInfo.presentationTimeUs);
}
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender && frameCallback != null) {
frameCallback.postRender();
}
if (doLoop) {
Log.d(TAG, "Reached EOS, looping");
extractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
inputDone = false;
// reset decoder state
decoder.flush();
frameCallback.loopReset();
}
}
}
}
}
Aggregations