use of android.annotation.SuppressLint in project qualitymatters by artem-zinnatullin.
the class DeveloperSettingsSpinnerAdapterTest method getDropDownView_shouldBindDataAndReturnViewWithoutConvertView.
@SuppressLint("SetTextI18n")
@Test
public void getDropDownView_shouldBindDataAndReturnViewWithoutConvertView() {
List<SelectionOption> selectionOptions = asList(mock(SelectionOption.class), mock(SelectionOption.class), mock(SelectionOption.class));
adapter.setSelectionOptions(selectionOptions);
ViewGroup container = mock(ViewGroup.class);
for (int position = 0; position < selectionOptions.size(); position++) {
when(selectionOptions.get(position).title()).thenReturn("Title " + position);
View view = mock(View.class);
when(layoutInflater.inflate(R.layout.list_developer_settings_spinner_drop_down_item, container, false)).thenReturn(view);
TextView titleTextView = mock(TextView.class);
when(view.findViewById(R.id.list_developer_settings_spinner_item_title_text_view)).thenReturn(titleTextView);
// Notice: there is NO convertView, that what we want to check.
assertThat(adapter.getDropDownView(position, null, container)).isSameAs(view);
verify(titleTextView).setText("Title " + position);
}
}
use of android.annotation.SuppressLint in project ExoPlayer by google.
the class TrackSelectionHelper method buildView.
@SuppressLint("InflateParams")
private View buildView(Context context) {
LayoutInflater inflater = LayoutInflater.from(context);
View view = inflater.inflate(R.layout.track_selection_dialog, null);
ViewGroup root = (ViewGroup) view.findViewById(R.id.root);
TypedArray attributeArray = context.getTheme().obtainStyledAttributes(new int[] { android.R.attr.selectableItemBackground });
int selectableItemBackgroundResourceId = attributeArray.getResourceId(0, 0);
attributeArray.recycle();
// View for disabling the renderer.
disableView = (CheckedTextView) inflater.inflate(android.R.layout.simple_list_item_single_choice, root, false);
disableView.setBackgroundResource(selectableItemBackgroundResourceId);
disableView.setText(R.string.selection_disabled);
disableView.setFocusable(true);
disableView.setOnClickListener(this);
root.addView(disableView);
// View for clearing the override to allow the selector to use its default selection logic.
defaultView = (CheckedTextView) inflater.inflate(android.R.layout.simple_list_item_single_choice, root, false);
defaultView.setBackgroundResource(selectableItemBackgroundResourceId);
defaultView.setText(R.string.selection_default);
defaultView.setFocusable(true);
defaultView.setOnClickListener(this);
root.addView(inflater.inflate(R.layout.list_divider, root, false));
root.addView(defaultView);
// Per-track views.
boolean haveSupportedTracks = false;
boolean haveAdaptiveTracks = false;
trackViews = new CheckedTextView[trackGroups.length][];
for (int groupIndex = 0; groupIndex < trackGroups.length; groupIndex++) {
TrackGroup group = trackGroups.get(groupIndex);
boolean groupIsAdaptive = trackGroupsAdaptive[groupIndex];
haveAdaptiveTracks |= groupIsAdaptive;
trackViews[groupIndex] = new CheckedTextView[group.length];
for (int trackIndex = 0; trackIndex < group.length; trackIndex++) {
if (trackIndex == 0) {
root.addView(inflater.inflate(R.layout.list_divider, root, false));
}
int trackViewLayoutId = groupIsAdaptive ? android.R.layout.simple_list_item_multiple_choice : android.R.layout.simple_list_item_single_choice;
CheckedTextView trackView = (CheckedTextView) inflater.inflate(trackViewLayoutId, root, false);
trackView.setBackgroundResource(selectableItemBackgroundResourceId);
trackView.setText(buildTrackName(group.getFormat(trackIndex)));
if (trackInfo.getTrackFormatSupport(rendererIndex, groupIndex, trackIndex) == RendererCapabilities.FORMAT_HANDLED) {
trackView.setFocusable(true);
trackView.setTag(Pair.create(groupIndex, trackIndex));
trackView.setOnClickListener(this);
haveSupportedTracks = true;
} else {
trackView.setFocusable(false);
trackView.setEnabled(false);
}
trackViews[groupIndex][trackIndex] = trackView;
root.addView(trackView);
}
}
if (!haveSupportedTracks) {
// Indicate that the default selection will be nothing.
defaultView.setText(R.string.selection_default_none);
} else if (haveAdaptiveTracks) {
// View for using random adaptation.
enableRandomAdaptationView = (CheckedTextView) inflater.inflate(android.R.layout.simple_list_item_multiple_choice, root, false);
enableRandomAdaptationView.setBackgroundResource(selectableItemBackgroundResourceId);
enableRandomAdaptationView.setText(R.string.enable_random_adaptation);
enableRandomAdaptationView.setOnClickListener(this);
root.addView(inflater.inflate(R.layout.list_divider, root, false));
root.addView(enableRandomAdaptationView);
}
updateViews();
return view;
}
use of android.annotation.SuppressLint in project ExoPlayer by google.
the class FormatTest method testConversionToFrameworkMediaFormatV16.
@SuppressLint("InlinedApi")
@TargetApi(16)
private static void testConversionToFrameworkMediaFormatV16(Format in) {
MediaFormat out = in.getFrameworkMediaFormatV16();
assertEquals(in.sampleMimeType, out.getString(MediaFormat.KEY_MIME));
assertOptionalV16(out, MediaFormat.KEY_LANGUAGE, in.language);
assertOptionalV16(out, MediaFormat.KEY_MAX_INPUT_SIZE, in.maxInputSize);
assertOptionalV16(out, MediaFormat.KEY_WIDTH, in.width);
assertOptionalV16(out, MediaFormat.KEY_HEIGHT, in.height);
assertOptionalV16(out, MediaFormat.KEY_CHANNEL_COUNT, in.channelCount);
assertOptionalV16(out, MediaFormat.KEY_SAMPLE_RATE, in.sampleRate);
assertOptionalV16(out, MediaFormat.KEY_FRAME_RATE, in.frameRate);
for (int i = 0; i < in.initializationData.size(); i++) {
byte[] originalData = in.initializationData.get(i);
ByteBuffer frameworkBuffer = out.getByteBuffer("csd-" + i);
byte[] frameworkData = Arrays.copyOf(frameworkBuffer.array(), frameworkBuffer.limit());
assertTrue(Arrays.equals(originalData, frameworkData));
}
}
use of android.annotation.SuppressLint in project libstreaming by fyhertz.
the class AACStream method testADTS.
/**
* Records a short sample of AAC ADTS from the microphone to find out what the sampling rate really is
* On some phone indeed, no error will be reported if the sampling rate used differs from the
* one selected with setAudioSamplingRate
* @throws IOException
* @throws IllegalStateException
*/
@SuppressLint("InlinedApi")
private void testADTS() throws IllegalStateException, IOException {
setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
try {
Field name = MediaRecorder.OutputFormat.class.getField("AAC_ADTS");
setOutputFormat(name.getInt(null));
} catch (Exception ignore) {
setOutputFormat(6);
}
String key = PREF_PREFIX + "aac-" + mQuality.samplingRate;
if (mSettings != null && mSettings.contains(key)) {
String[] s = mSettings.getString(key, "").split(",");
mQuality.samplingRate = Integer.valueOf(s[0]);
mConfig = Integer.valueOf(s[1]);
mChannel = Integer.valueOf(s[2]);
return;
}
final String TESTFILE = Environment.getExternalStorageDirectory().getPath() + "/spydroid-test.adts";
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
throw new IllegalStateException("No external storage or external storage not ready !");
}
// The structure of an ADTS packet is described here: http://wiki.multimedia.cx/index.php?title=ADTS
// ADTS header is 7 or 9 bytes long
byte[] buffer = new byte[9];
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(mAudioSource);
mMediaRecorder.setOutputFormat(mOutputFormat);
mMediaRecorder.setAudioEncoder(mAudioEncoder);
mMediaRecorder.setAudioChannels(1);
mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
mMediaRecorder.setOutputFile(TESTFILE);
mMediaRecorder.setMaxDuration(1000);
mMediaRecorder.prepare();
mMediaRecorder.start();
// TODO: use the MediaRecorder.OnInfoListener
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
}
mMediaRecorder.stop();
mMediaRecorder.release();
mMediaRecorder = null;
File file = new File(TESTFILE);
RandomAccessFile raf = new RandomAccessFile(file, "r");
// ADTS packets start with a sync word: 12bits set to 1
while (true) {
if ((raf.readByte() & 0xFF) == 0xFF) {
buffer[0] = raf.readByte();
if ((buffer[0] & 0xF0) == 0xF0)
break;
}
}
raf.read(buffer, 1, 5);
mSamplingRateIndex = (buffer[1] & 0x3C) >> 2;
mProfile = ((buffer[1] & 0xC0) >> 6) + 1;
mChannel = (buffer[1] & 0x01) << 2 | (buffer[2] & 0xC0) >> 6;
mQuality.samplingRate = AUDIO_SAMPLING_RATES[mSamplingRateIndex];
// 5 bits for the object type / 4 bits for the sampling rate / 4 bits for the channel / padding
mConfig = (mProfile & 0x1F) << 11 | (mSamplingRateIndex & 0x0F) << 7 | (mChannel & 0x0F) << 3;
Log.i(TAG, "MPEG VERSION: " + ((buffer[0] & 0x08) >> 3));
Log.i(TAG, "PROTECTION: " + (buffer[0] & 0x01));
Log.i(TAG, "PROFILE: " + AUDIO_OBJECT_TYPES[mProfile]);
Log.i(TAG, "SAMPLING FREQUENCY: " + mQuality.samplingRate);
Log.i(TAG, "CHANNEL: " + mChannel);
raf.close();
if (mSettings != null) {
Editor editor = mSettings.edit();
editor.putString(key, mQuality.samplingRate + "," + mConfig + "," + mChannel);
editor.commit();
}
if (!file.delete())
Log.e(TAG, "Temp file could not be erased");
}
use of android.annotation.SuppressLint in project libstreaming by fyhertz.
the class AACStream method encodeWithMediaCodec.
@Override
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodec() throws IOException {
final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 2;
((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate);
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mAudioRecord.startRecording();
mMediaCodec.start();
final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
mThread = new Thread(new Runnable() {
@Override
public void run() {
int len = 0, bufferIndex = 0;
try {
while (!Thread.interrupted()) {
bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
if (bufferIndex >= 0) {
inputBuffers[bufferIndex].clear();
len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG, "An error occured with the AudioRecord API !");
} else {
//Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime() / 1000, 0);
}
}
}
} catch (RuntimeException e) {
e.printStackTrace();
}
}
});
mThread.start();
// The packetizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setInputStream(inputStream);
mPacketizer.start();
mStreaming = true;
}
Aggregations