use of android.media.MediaFormat in project libstreaming by fyhertz.
the class AACStream method encodeWithMediaCodec.
@Override
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodec() throws IOException {
final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 2;
((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate);
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mAudioRecord.startRecording();
mMediaCodec.start();
final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
mThread = new Thread(new Runnable() {
@Override
public void run() {
int len = 0, bufferIndex = 0;
try {
while (!Thread.interrupted()) {
bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
if (bufferIndex >= 0) {
inputBuffers[bufferIndex].clear();
len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG, "An error occured with the AudioRecord API !");
} else {
// Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime() / 1000, 0);
}
}
}
} catch (RuntimeException e) {
e.printStackTrace();
}
}
});
mThread.start();
// The packetizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setInputStream(inputStream);
mPacketizer.start();
mStreaming = true;
}
use of android.media.MediaFormat in project speechutils by Kaljurand.
the class MediaFormatFactory method createMediaFormat.
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public static MediaFormat createMediaFormat(String mime, int sampleRate) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
MediaFormat format = new MediaFormat();
// TODO: this causes a crash in MediaCodec.configure
// format.setString(MediaFormat.KEY_FRAME_RATE, null);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setString(MediaFormat.KEY_MIME, mime);
if ("audio/mp4a-latm".equals(mime)) {
// TODO: or 39?
format.setInteger(MediaFormat.KEY_AAC_PROFILE, 2);
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000);
} else if ("audio/flac".equals(mime)) {
// format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_FLAC); // API=21
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000);
// TODO: use another bit rate, does not seem to have effect always
// format.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
// TODO: experiment with 0 (fastest/least) to 8 (slowest/most)
// format.setInteger(MediaFormat.KEY_FLAC_COMPRESSION_LEVEL, 0);
} else if ("audio/opus".equals(mime)) {
// format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_OPUS); // API=21
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000);
} else {
// TODO: assuming: "audio/amr-wb"
format.setInteger(MediaFormat.KEY_BIT_RATE, 23050);
}
return format;
}
return null;
}
use of android.media.MediaFormat in project speechutils by Kaljurand.
the class EncodedAudioRecorder method recorderLoop.
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@RequiresPermission(RECORD_AUDIO)
@Override
protected void recorderLoop(AudioRecord speechRecord) {
mNumBytesSubmitted = 0;
mNumBytesDequeued = 0;
MediaFormat format = MediaFormatFactory.createMediaFormat(MIME, getSampleRate());
MediaCodec codec = getCodec(format);
if (codec == null) {
handleError("no codec found");
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i("Using codec: " + codec.getCanonicalName());
}
int status = recorderEncoderLoop(codec, speechRecord);
if (Log.DEBUG) {
AudioUtils.showMetrics(format, mNumBytesSubmitted, mNumBytesDequeued);
}
if (status < 0) {
handleError("encoder error");
}
}
}
use of android.media.MediaFormat in project ExoPlayer by google.
the class Format method getFrameworkMediaFormatV16.
/**
* Returns a {@link MediaFormat} representation of this format.
*/
@SuppressLint("InlinedApi")
@TargetApi(16)
public final MediaFormat getFrameworkMediaFormatV16() {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, sampleMimeType);
maybeSetStringV16(format, MediaFormat.KEY_LANGUAGE, language);
maybeSetIntegerV16(format, MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
maybeSetIntegerV16(format, MediaFormat.KEY_WIDTH, width);
maybeSetIntegerV16(format, MediaFormat.KEY_HEIGHT, height);
maybeSetFloatV16(format, MediaFormat.KEY_FRAME_RATE, frameRate);
maybeSetIntegerV16(format, "rotation-degrees", rotationDegrees);
maybeSetIntegerV16(format, MediaFormat.KEY_CHANNEL_COUNT, channelCount);
maybeSetIntegerV16(format, MediaFormat.KEY_SAMPLE_RATE, sampleRate);
maybeSetIntegerV16(format, "encoder-delay", encoderDelay);
maybeSetIntegerV16(format, "encoder-padding", encoderPadding);
for (int i = 0; i < initializationData.size(); i++) {
format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
}
return format;
}
use of android.media.MediaFormat in project ExoPlayer by google.
the class MediaCodecVideoRenderer method getMediaFormat.
@SuppressLint("InlinedApi")
private static MediaFormat getMediaFormat(Format format, CodecMaxValues codecMaxValues, boolean deviceNeedsAutoFrcWorkaround, int tunnelingAudioSessionId) {
MediaFormat frameworkMediaFormat = format.getFrameworkMediaFormatV16();
// Set the maximum adaptive video dimensions.
frameworkMediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, codecMaxValues.width);
frameworkMediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, codecMaxValues.height);
// Set the maximum input size.
if (codecMaxValues.inputSize != Format.NO_VALUE) {
frameworkMediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxValues.inputSize);
}
// Set FRC workaround.
if (deviceNeedsAutoFrcWorkaround) {
frameworkMediaFormat.setInteger("auto-frc", 0);
}
// Configure tunneling if enabled.
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
configureTunnelingV21(frameworkMediaFormat, tunnelingAudioSessionId);
}
return frameworkMediaFormat;
}
Aggregations