use of android.media.AudioTrack in project actor-platform by actorapp.
the class OpusPlayerActor method onPlayMessage.
protected void onPlayMessage(String fileName, float seek) {
if (state != STATE_NONE) {
destroyPlayer();
}
state = STATE_NONE;
currentFileName = fileName;
int res = opusLib.openOpusFile(currentFileName);
if (res == 0) {
callback.onError(currentFileName);
return;
}
duration = opusLib.getTotalPcmDuration();
offset = 0;
try {
bufferSize = AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
audioTrack.play();
} catch (Exception e) {
e.printStackTrace();
destroyPlayer();
callback.onError(currentFileName);
return;
}
state = STATE_STARTED;
if (seek != 0) {
opusLib.seekOpusFile(seek);
}
callback.onStart(fileName);
self().send(new Iterate());
}
use of android.media.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class BlockingAudioTrack method createStreamingAudioTrack.
private AudioTrack createStreamingAudioTrack() {
final int channelConfig = getChannelConfig(mChannelCount);
int minBufferSizeInBytes = AudioTrack.getMinBufferSize(mSampleRateInHz, channelConfig, mAudioFormat);
int bufferSizeInBytes = Math.max(MIN_AUDIO_BUFFER_SIZE, minBufferSizeInBytes);
AudioTrack audioTrack = new AudioTrack(mStreamType, mSampleRateInHz, channelConfig, mAudioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM);
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
Log.w(TAG, "Unable to create audio track.");
audioTrack.release();
return null;
}
mAudioBufferSize = bufferSizeInBytes;
setupVolume(audioTrack, mVolume, mPan);
return audioTrack;
}
use of android.media.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class BlockingAudioTrack method waitAndRelease.
public void waitAndRelease() {
AudioTrack track = null;
synchronized (mAudioTrackLock) {
track = mAudioTrack;
}
if (track == null) {
if (DBG)
Log.d(TAG, "Audio track null [duplicate call to waitAndRelease ?]");
return;
}
// much point not doing that again.
if (mBytesWritten < mAudioBufferSize && !mStopped) {
if (DBG) {
Log.d(TAG, "Stopping audio track to flush audio, state was : " + track.getPlayState() + ",stopped= " + mStopped);
}
mIsShortUtterance = true;
track.stop();
}
// Block until the audio track is done only if we haven't stopped yet.
if (!mStopped) {
if (DBG)
Log.d(TAG, "Waiting for audio track to complete : " + mAudioTrack.hashCode());
blockUntilDone(mAudioTrack);
}
// it's safe to release at this point.
if (DBG)
Log.d(TAG, "Releasing audio track [" + track.hashCode() + "]");
synchronized (mAudioTrackLock) {
mAudioTrack = null;
}
track.release();
}
use of android.media.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class MediaAudioEffectTest method test1_7AuxiliaryOnAudioTrack.
//Test case 1.7: test auxiliary effect attachement on AudioTrack
@LargeTest
public void test1_7AuxiliaryOnAudioTrack() throws Exception {
boolean result = false;
String msg = "test1_7AuxiliaryOnAudioTrack()";
try {
AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT), AudioTrack.MODE_STREAM);
assertNotNull(msg + ": could not create AudioTrack", track);
AudioEffect effect = new AudioEffect(AudioEffect.EFFECT_TYPE_ENV_REVERB, AudioEffect.EFFECT_TYPE_NULL, 0, 0);
track.attachAuxEffect(effect.getId());
track.setAuxEffectSendLevel(1.0f);
result = true;
effect.release();
track.release();
} catch (IllegalArgumentException e) {
msg = msg.concat(": Equalizer not found");
loge(msg, ": Equalizer not found");
} catch (UnsupportedOperationException e) {
msg = msg.concat(": Effect library not loaded");
loge(msg, ": Effect library not loaded");
}
assertTrue(msg, result);
}
use of android.media.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class MediaAudioTrackTest method testPlaybackHeadPositionAfterFlush.
//Test case 3: getPlaybackHeadPosition() is 0 after flush();
@LargeTest
public void testPlaybackHeadPositionAfterFlush() throws Exception {
// constants for test
final String TEST_NAME = "testPlaybackHeadPositionAfterFlush";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT, minBuffSize, TEST_MODE);
byte[] data = new byte[minBuffSize / 2];
//-------- test --------------
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
track.write(data, 0, data.length);
track.write(data, 0, data.length);
track.play();
Thread.sleep(100);
track.stop();
track.flush();
log(TEST_NAME, "position =" + track.getPlaybackHeadPosition());
assertTrue(TEST_NAME, track.getPlaybackHeadPosition() == 0);
//-------- tear down --------------
track.release();
}
Aggregations