use of android.media.MediaPlayer in project android_frameworks_base by ResurrectionRemix.
the class SimplePlayer method playOrPause.
public void playOrPause() {
if (mMediaPlayer == null || !mMediaPlayer.isPlaying()) {
if (mMediaPlayer == null) {
try {
mMediaPlayer = new MediaPlayer();
if (mSession != 0) {
mMediaPlayer.setAudioSessionId(mSession);
Log.d(TAG, "mMediaPlayer.setAudioSessionId(): " + mSession);
}
if (mFileName.equals("")) {
Log.d(TAG, "Playing from resource");
AssetFileDescriptor afd = mContext.getResources().openRawResourceFd(mFileResId);
mMediaPlayer.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
afd.close();
} else {
Log.d(TAG, "Playing file: " + mFileName);
mMediaPlayer.setDataSource(mFileName);
}
mMediaPlayer.setAudioStreamType(mStreamType);
mMediaPlayer.prepare();
mMediaPlayer.setLooping(true);
} catch (IOException ex) {
Log.e(TAG, "mMediaPlayercreate failed:", ex);
mMediaPlayer = null;
} catch (IllegalArgumentException ex) {
Log.e(TAG, "mMediaPlayercreate failed:", ex);
mMediaPlayer = null;
} catch (SecurityException ex) {
Log.e(TAG, "mMediaPlayercreate failed:", ex);
mMediaPlayer = null;
}
if (mMediaPlayer != null) {
mMediaPlayer.setAuxEffectSendLevel(mSendLevel);
mMediaPlayer.attachAuxEffect(mEffectId);
mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
public void onCompletion(MediaPlayer mp) {
updatePlayPauseButton();
}
});
mSessionText.setText("Session: " + Integer.toString(mMediaPlayer.getAudioSessionId()));
}
}
if (mMediaPlayer != null) {
mMediaPlayer.start();
}
} else {
mMediaPlayer.pause();
}
updatePlayPauseButton();
}
use of android.media.MediaPlayer in project android_frameworks_base by ResurrectionRemix.
the class VideoView method openVideo.
private void openVideo() {
if (mUri == null || mSurfaceHolder == null) {
// not ready for playback just yet, will try again later
return;
}
// we shouldn't clear the target state, because somebody might have
// called start() previously
release(false);
AudioManager am = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
am.requestAudioFocus(null, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN);
try {
mMediaPlayer = new MediaPlayer();
// TODO: create SubtitleController in MediaPlayer, but we need
// a context for the subtitle renderers
final Context context = getContext();
final SubtitleController controller = new SubtitleController(context, mMediaPlayer.getMediaTimeProvider(), mMediaPlayer);
controller.registerRenderer(new WebVttRenderer(context));
controller.registerRenderer(new TtmlRenderer(context));
controller.registerRenderer(new Cea708CaptionRenderer(context));
controller.registerRenderer(new ClosedCaptionRenderer(context));
mMediaPlayer.setSubtitleAnchor(controller, this);
if (mAudioSession != 0) {
mMediaPlayer.setAudioSessionId(mAudioSession);
} else {
mAudioSession = mMediaPlayer.getAudioSessionId();
}
mMediaPlayer.setOnPreparedListener(mPreparedListener);
mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
mMediaPlayer.setOnCompletionListener(mCompletionListener);
mMediaPlayer.setOnErrorListener(mErrorListener);
mMediaPlayer.setOnInfoListener(mInfoListener);
mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
mCurrentBufferPercentage = 0;
mMediaPlayer.setDataSource(mContext, mUri, mHeaders);
mMediaPlayer.setDisplay(mSurfaceHolder);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.prepareAsync();
for (Pair<InputStream, MediaFormat> pending : mPendingSubtitleTracks) {
try {
mMediaPlayer.addSubtitleSource(pending.first, pending.second);
} catch (IllegalStateException e) {
mInfoListener.onInfo(mMediaPlayer, MediaPlayer.MEDIA_INFO_UNSUPPORTED_SUBTITLE, 0);
}
}
// we don't set the target state here either, but preserve the
// target state that was there before.
mCurrentState = STATE_PREPARING;
attachMediaController();
} catch (IOException ex) {
Log.w(TAG, "Unable to open content: " + mUri, ex);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
} catch (IllegalArgumentException ex) {
Log.w(TAG, "Unable to open content: " + mUri, ex);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
} finally {
mPendingSubtitleTracks.clear();
}
}
use of android.media.MediaPlayer in project android_frameworks_base by ResurrectionRemix.
the class VideoView method getAudioSessionId.
@Override
public int getAudioSessionId() {
if (mAudioSession == 0) {
MediaPlayer foo = new MediaPlayer();
mAudioSession = foo.getAudioSessionId();
foo.release();
}
return mAudioSession;
}
use of android.media.MediaPlayer in project android_frameworks_base by ResurrectionRemix.
the class MediaVisualizerTest method test2_1ListenerCapture.
//Test case 2.1: test capture with listener
@LargeTest
public void test2_1ListenerCapture() throws Exception {
boolean result = false;
String msg = "test2_1ListenerCapture()";
AudioEffect vc = null;
MediaPlayer mp = null;
AudioManager am = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);
int ringerMode = am.getRingerMode();
am.setRingerMode(AudioManager.RINGER_MODE_NORMAL);
int volume = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
try {
// creating a volume controller on output mix ensures that ro.audio.silent mutes
// audio after the effects and not before
vc = new AudioEffect(AudioEffect.EFFECT_TYPE_NULL, VOLUME_EFFECT_UUID, 0, 0);
vc.setEnabled(true);
mp = new MediaPlayer();
mp.setDataSource(MediaNames.SINE_200_1000);
mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
getVisualizer(mp.getAudioSessionId());
createListenerLooper();
synchronized (lock) {
try {
lock.wait(1000);
} catch (Exception e) {
Log.e(TAG, "Looper creation: wait was interrupted.");
}
}
assertTrue(mInitialized);
mVisualizer.setEnabled(true);
// check capture on silence
synchronized (lock) {
try {
mCaptureWaveform = true;
lock.wait(1000);
mCaptureWaveform = false;
} catch (Exception e) {
Log.e(TAG, "Capture waveform: wait was interrupted.");
}
}
assertNotNull(msg + ": waveform capture failed", mWaveform);
int energy = computeEnergy(mWaveform, true);
assertEquals(msg + ": getWaveForm reports energy for silence", 0, energy);
synchronized (lock) {
try {
mCaptureFft = true;
lock.wait(1000);
mCaptureFft = false;
} catch (Exception e) {
Log.e(TAG, "Capture FFT: wait was interrupted.");
}
}
assertNotNull(msg + ": FFT capture failed", mFft);
energy = computeEnergy(mFft, false);
assertEquals(msg + ": getFft reports energy for silence", 0, energy);
mp.prepare();
mp.start();
Thread.sleep(500);
// check capture on sound
synchronized (lock) {
try {
mCaptureWaveform = true;
lock.wait(1000);
mCaptureWaveform = false;
} catch (Exception e) {
Log.e(TAG, "Capture waveform: wait was interrupted.");
}
}
assertNotNull(msg + ": waveform capture failed", mWaveform);
energy = computeEnergy(mWaveform, true);
assertTrue(msg + ": getWaveForm reads insufficient level", energy > 0);
synchronized (lock) {
try {
mCaptureFft = true;
lock.wait(1000);
mCaptureFft = false;
} catch (Exception e) {
Log.e(TAG, "Capture FFT: wait was interrupted.");
}
}
assertNotNull(msg + ": FFT capture failed", mFft);
energy = computeEnergy(mFft, false);
assertTrue(msg + ": getFft reads insufficient level", energy > 0);
result = true;
} catch (IllegalArgumentException e) {
msg = msg.concat(": Bad parameter value");
loge(msg, "Bad parameter value");
} catch (UnsupportedOperationException e) {
msg = msg.concat(": get parameter() rejected");
loge(msg, "get parameter() rejected");
} catch (IllegalStateException e) {
msg = msg.concat("get parameter() called in wrong state");
loge(msg, "get parameter() called in wrong state");
} catch (InterruptedException e) {
loge(msg, "sleep() interrupted");
} finally {
terminateListenerLooper();
releaseVisualizer();
if (mp != null) {
mp.release();
}
if (vc != null) {
vc.release();
}
am.setStreamVolume(AudioManager.STREAM_MUSIC, volume, 0);
am.setRingerMode(ringerMode);
}
assertTrue(msg, result);
}
use of android.media.MediaPlayer in project android_frameworks_base by ResurrectionRemix.
the class MediaEnvReverbTest method test2_1InsertSoundModification.
//Test case 2.1: test actual insert reverb influence on sound
@LargeTest
public void test2_1InsertSoundModification() throws Exception {
boolean result = false;
String msg = "test2_1InsertSoundModification()";
EnergyProbe probe = null;
AudioEffect vc = null;
MediaPlayer mp = null;
AudioEffect rvb = null;
AudioManager am = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);
int ringerMode = am.getRingerMode();
am.setRingerMode(AudioManager.RINGER_MODE_NORMAL);
int volume = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
try {
// creating a volume controller on output mix ensures that ro.audio.silent mutes
// audio after the effects and not before
vc = new AudioEffect(AudioEffect.EFFECT_TYPE_NULL, VOLUME_EFFECT_UUID, 0, 0);
vc.setEnabled(true);
mp = new MediaPlayer();
mp.setDataSource(MediaNames.SINE_200_1000);
mp.setAudioStreamType(AudioManager.STREAM_MUSIC);
// create reverb with UUID instead of EnvironmentalReverb constructor otherwise an
// auxiliary reverb will be chosen by the effect framework as we are on session 0
rvb = new AudioEffect(AudioEffect.EFFECT_TYPE_NULL, ENV_REVERB_EFFECT_UUID, 0, 0);
rvb.setParameter(EnvironmentalReverb.PARAM_ROOM_LEVEL, (short) 0);
rvb.setParameter(EnvironmentalReverb.PARAM_REVERB_LEVEL, (short) 0);
rvb.setParameter(EnvironmentalReverb.PARAM_DECAY_TIME, 2000);
rvb.setEnabled(true);
// create probe after reverb so that it is chained behind the reverb in the
// effect chain
probe = new EnergyProbe(0);
mp.prepare();
mp.start();
Thread.sleep(1000);
mp.stop();
Thread.sleep(300);
// measure energy around 1kHz after media player was stopped for 300 ms
int energy1000 = probe.capture(1000);
assertTrue(msg + ": reverb has no effect", energy1000 > 0);
result = true;
} catch (IllegalArgumentException e) {
msg = msg.concat(": Bad parameter value");
loge(msg, "Bad parameter value");
} catch (UnsupportedOperationException e) {
msg = msg.concat(": get parameter() rejected");
loge(msg, "get parameter() rejected");
} catch (IllegalStateException e) {
msg = msg.concat("get parameter() called in wrong state");
loge(msg, "get parameter() called in wrong state");
} catch (InterruptedException e) {
loge(msg, "sleep() interrupted");
} finally {
if (mp != null) {
mp.release();
}
if (vc != null) {
vc.release();
}
if (rvb != null) {
rvb.release();
}
if (probe != null) {
probe.release();
}
am.setStreamVolume(AudioManager.STREAM_MUSIC, volume, 0);
am.setRingerMode(ringerMode);
}
assertTrue(msg, result);
}
Aggregations