use of android.media.MediaCodec.BufferInfo in project libstreaming by fyhertz.
the class EncoderDebugger method searchSPSandPPS.
/**
* Tries to obtain the SPS and the PPS for the encoder.
*/
private long searchSPSandPPS() {
ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
byte[] csd = new byte[128];
int len = 0, p = 4, q = 4;
long elapsed = 0, now = timestamp();
while (elapsed < 3000000 && (mSPS == null || mPPS == null)) {
// Some encoders won't give us the SPS and PPS unless they receive something to encode first...
int bufferIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE);
if (bufferIndex >= 0) {
check(inputBuffers[bufferIndex].capacity() >= mData.length, "The input buffer is not big enough.");
inputBuffers[bufferIndex].clear();
inputBuffers[bufferIndex].put(mData, 0, mData.length);
mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
} else {
if (VERBOSE)
Log.e(TAG, "No buffer available !");
}
// We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
// encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
// But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...
int index = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// The PPS and PPS shoud be there
MediaFormat format = mEncoder.getOutputFormat();
ByteBuffer spsb = format.getByteBuffer("csd-0");
ByteBuffer ppsb = format.getByteBuffer("csd-1");
mSPS = new byte[spsb.capacity() - 4];
spsb.position(4);
spsb.get(mSPS, 0, mSPS.length);
mPPS = new byte[ppsb.capacity() - 4];
ppsb.position(4);
ppsb.get(mPPS, 0, mPPS.length);
break;
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mEncoder.getOutputBuffers();
} else if (index >= 0) {
len = info.size;
if (len < 128) {
outputBuffers[index].get(csd, 0, len);
if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0 && csd[3] == 1) {
// depending on the phone so we don't make any assumption about that
while (p < len) {
while (!(csd[p + 0] == 0 && csd[p + 1] == 0 && csd[p + 2] == 0 && csd[p + 3] == 1) && p + 3 < len) p++;
if (p + 3 >= len)
p = len;
if ((csd[q] & 0x1F) == 7) {
mSPS = new byte[p - q];
System.arraycopy(csd, q, mSPS, 0, p - q);
} else {
mPPS = new byte[p - q];
System.arraycopy(csd, q, mPPS, 0, p - q);
}
p += 4;
q = p;
}
}
}
mEncoder.releaseOutputBuffer(index, false);
}
elapsed = timestamp() - now;
}
check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);
return elapsed;
}
use of android.media.MediaCodec.BufferInfo in project chromeview by pwnall.
the class WebAudioMediaCodecBridge method decodeAudioFile.
@CalledByNative
private static boolean decodeAudioFile(Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) {
if (dataSize < 0 || dataSize > 0x7fffffff)
return false;
MediaExtractor extractor = new MediaExtractor();
ParcelFileDescriptor encodedFD;
encodedFD = ParcelFileDescriptor.adoptFd(inputFD);
try {
extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize);
} catch (Exception e) {
e.printStackTrace();
encodedFD.detachFd();
return false;
}
if (extractor.getTrackCount() <= 0) {
encodedFD.detachFd();
return false;
}
MediaFormat format = extractor.getTrackFormat(0);
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
String mime = format.getString(MediaFormat.KEY_MIME);
long durationMicroseconds = 0;
if (format.containsKey(MediaFormat.KEY_DURATION)) {
try {
durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION);
} catch (Exception e) {
Log.d(LOG_TAG, "Cannot get duration");
}
}
if (DEBUG) {
Log.d(LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + channelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec");
}
nativeInitializeDestination(nativeMediaCodecBridge, channelCount, sampleRate, durationMicroseconds);
// Create decoder
MediaCodec codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, /* surface */
null, /* crypto */
0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
// A track must be selected and will be used to read samples.
extractor.selectTrack(0);
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
// Keep processing until the output is done.
while (!sawOutputEOS) {
if (!sawInputEOS) {
// Input side
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeMicroSec = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeMicroSec = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, /* offset */
sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
// Output side
MediaCodec.BufferInfo info = new BufferInfo();
final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS);
if (outputBufIndex >= 0) {
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
if (info.size > 0) {
nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size);
}
buf.clear();
codec.releaseOutputBuffer(outputBufIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
}
encodedFD.detachFd();
codec.stop();
codec.release();
codec = null;
return true;
}
Aggregations