use of android.media.AudioTrack in project android_frameworks_base by DirtyUnicorns.
the class MediaAudioTrackTest method testSetStereoVolumeMin.
//Test case 2: setStereoVolume() with min volume returns SUCCESS
@LargeTest
public void testSetStereoVolumeMin() throws Exception {
// constants for test
final String TEST_NAME = "testSetStereoVolumeMin";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT, minBuffSize, TEST_MODE);
byte[] data = new byte[minBuffSize / 2];
//-------- test --------------
track.write(data, 0, data.length);
track.write(data, 0, data.length);
track.play();
float minVol = AudioTrack.getMinVolume();
assertTrue(TEST_NAME, track.setStereoVolume(minVol, minVol) == AudioTrack.SUCCESS);
//-------- tear down --------------
track.release();
}
use of android.media.AudioTrack in project android_frameworks_base by DirtyUnicorns.
the class MediaAudioTrackTest method testSetLoopPointsEndTooFar.
//Test case 9: setLoopPoints() fails with end beyond what can be written for the track
@LargeTest
public void testSetLoopPointsEndTooFar() throws Exception {
// constants for test
final String TEST_NAME = "testSetLoopPointsEndTooFar";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STATIC;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT, minBuffSize, TEST_MODE);
byte[] data = new byte[minBuffSize];
//16bit data
int dataSizeInFrames = minBuffSize / 2;
//-------- test --------------
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_NO_STATIC_DATA);
track.write(data, 0, data.length);
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
assertTrue(TEST_NAME, track.setLoopPoints(dataSizeInFrames - 10, dataSizeInFrames + 50, 2) == AudioTrack.ERROR_BAD_VALUE);
//-------- tear down --------------
track.release();
}
use of android.media.AudioTrack in project android_frameworks_base by DirtyUnicorns.
the class MediaAudioTrackTest method testSetPlaybackHeadPositionStopped.
//Test case 2: setPlaybackHeadPosition() on stopped track
@LargeTest
public void testSetPlaybackHeadPositionStopped() throws Exception {
// constants for test
final String TEST_NAME = "testSetPlaybackHeadPositionStopped";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT, 2 * minBuffSize, TEST_MODE);
byte[] data = new byte[minBuffSize];
//-------- test --------------
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
track.write(data, 0, data.length);
track.write(data, 0, data.length);
track.play();
track.stop();
assumeTrue(TEST_NAME, track.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
assertTrue(TEST_NAME, track.setPlaybackHeadPosition(10) == AudioTrack.SUCCESS);
//-------- tear down --------------
track.release();
}
use of android.media.AudioTrack in project android_frameworks_base by AOSPA.
the class MediaAudioTrackTest method testWriteShort.
//Test case 10: write() succeeds and returns the size that was written for 16bit
@LargeTest
public void testWriteShort() throws Exception {
// constants for test
final String TEST_NAME = "testWriteShort";
final int TEST_SR = 22050;
final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO;
final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
final int TEST_MODE = AudioTrack.MODE_STREAM;
final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC;
//-------- initialization --------------
int minBuffSize = AudioTrack.getMinBufferSize(TEST_SR, TEST_CONF, TEST_FORMAT);
AudioTrack track = new AudioTrack(TEST_STREAM_TYPE, TEST_SR, TEST_CONF, TEST_FORMAT, 2 * minBuffSize, TEST_MODE);
short[] data = new short[minBuffSize / 2];
//-------- test --------------
assumeTrue(TEST_NAME, track.getState() == AudioTrack.STATE_INITIALIZED);
assertTrue(TEST_NAME, track.write(data, 0, data.length) == data.length);
//-------- tear down --------------
track.release();
}
use of android.media.AudioTrack in project android_frameworks_base by AOSPA.
the class BlockingAudioTrack method waitAndRelease.
public void waitAndRelease() {
AudioTrack track = null;
synchronized (mAudioTrackLock) {
track = mAudioTrack;
}
if (track == null) {
if (DBG)
Log.d(TAG, "Audio track null [duplicate call to waitAndRelease ?]");
return;
}
// much point not doing that again.
if (mBytesWritten < mAudioBufferSize && !mStopped) {
if (DBG) {
Log.d(TAG, "Stopping audio track to flush audio, state was : " + track.getPlayState() + ",stopped= " + mStopped);
}
mIsShortUtterance = true;
track.stop();
}
// Block until the audio track is done only if we haven't stopped yet.
if (!mStopped) {
if (DBG)
Log.d(TAG, "Waiting for audio track to complete : " + mAudioTrack.hashCode());
blockUntilDone(mAudioTrack);
}
// it's safe to release at this point.
if (DBG)
Log.d(TAG, "Releasing audio track [" + track.hashCode() + "]");
synchronized (mAudioTrackLock) {
mAudioTrack = null;
}
track.release();
}
Aggregations