use of androidx.annotation.RequiresApi in project xabber-android by redsolution.
the class CustomNotifSettingsFragment method getSoundTitle.
@RequiresApi(api = Build.VERSION_CODES.O)
private String getSoundTitle(NotificationChannel channel) {
if (channel == null)
return null;
Uri uri = channel.getSound();
Ringtone ringtone = RingtoneManager.getRingtone(getActivity(), uri);
if (ringtone != null)
return ringtone.getTitle(getActivity());
else
return "Unknown ringtone";
}
use of androidx.annotation.RequiresApi in project Signal-Android by WhisperSystems.
the class AudioWaveForm method generateWaveForm.
/**
* Based on decode sample from:
* <p>
* https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/DecoderTest.java
*/
@WorkerThread
@RequiresApi(api = 23)
@NonNull
private AudioFileInfo generateWaveForm(@NonNull Uri uri) throws IOException {
try (MediaInput dataSource = DecryptableUriMediaInput.createForUri(context, uri)) {
long[] wave = new long[BAR_COUNT];
int[] waveSamples = new int[BAR_COUNT];
MediaExtractor extractor = dataSource.createExtractor();
if (extractor.getTrackCount() == 0) {
throw new IOException("No audio track");
}
MediaFormat format = extractor.getTrackFormat(0);
if (!format.containsKey(MediaFormat.KEY_DURATION)) {
throw new IOException("Unknown duration");
}
long totalDurationUs = format.getLong(MediaFormat.KEY_DURATION);
String mime = format.getString(MediaFormat.KEY_MIME);
if (!mime.startsWith("audio/")) {
throw new IOException("Mime not audio");
}
MediaCodec codec = MediaCodec.createDecoderByType(mime);
if (totalDurationUs == 0) {
throw new IOException("Zero duration");
}
codec.configure(format, null, null, 0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
long kTimeOutUs = 5000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
while (!sawOutputEOS && noOutputCounter < 50) {
noOutputCounter++;
if (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeUs = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
int barSampleIndex = (int) (SAMPLES_PER_BAR * (wave.length * extractor.getSampleTime()) / totalDurationUs);
sawInputEOS = !extractor.advance();
int nextBarSampleIndex = (int) (SAMPLES_PER_BAR * (wave.length * extractor.getSampleTime()) / totalDurationUs);
while (!sawInputEOS && nextBarSampleIndex == barSampleIndex) {
sawInputEOS = !extractor.advance();
if (!sawInputEOS) {
nextBarSampleIndex = (int) (SAMPLES_PER_BAR * (wave.length * extractor.getSampleTime()) / totalDurationUs);
}
}
}
}
}
int outputBufferIndex;
do {
outputBufferIndex = codec.dequeueOutputBuffer(info, kTimeOutUs);
if (outputBufferIndex >= 0) {
if (info.size > 0) {
noOutputCounter = 0;
}
ByteBuffer buf = codecOutputBuffers[outputBufferIndex];
int barIndex = (int) ((wave.length * info.presentationTimeUs) / totalDurationUs);
long total = 0;
for (int i = 0; i < info.size; i += 2 * 4) {
short aShort = buf.getShort(i);
total += Math.abs(aShort);
}
if (barIndex >= 0 && barIndex < wave.length) {
wave[barIndex] += total;
waveSamples[barIndex] += info.size / 2;
}
codec.releaseOutputBuffer(outputBufferIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d(TAG, "output format has changed to " + codec.getOutputFormat());
}
} while (outputBufferIndex >= 0);
}
codec.stop();
codec.release();
extractor.release();
float[] floats = new float[BAR_COUNT];
byte[] bytes = new byte[BAR_COUNT];
float max = 0;
for (int i = 0; i < BAR_COUNT; i++) {
if (waveSamples[i] == 0)
continue;
floats[i] = wave[i] / (float) waveSamples[i];
if (floats[i] > max) {
max = floats[i];
}
}
for (int i = 0; i < BAR_COUNT; i++) {
float normalized = floats[i] / max;
bytes[i] = (byte) (255 * normalized);
}
return new AudioFileInfo(totalDurationUs, bytes);
}
}
use of androidx.annotation.RequiresApi in project Signal-Android by WhisperSystems.
the class JobSchedulerScheduler method schedule.
@RequiresApi(26)
@Override
public void schedule(long delay, @NonNull List<Constraint> constraints) {
SignalExecutors.BOUNDED.execute(() -> {
JobScheduler jobScheduler = application.getSystemService(JobScheduler.class);
String constraintNames = constraints.isEmpty() ? "" : Stream.of(constraints).map(Constraint::getJobSchedulerKeyPart).withoutNulls().sorted().collect(Collectors.joining("-"));
int jobId = constraintNames.hashCode();
if (jobScheduler.getPendingJob(jobId) != null) {
return;
}
Log.i(TAG, String.format(Locale.US, "JobScheduler enqueue of %s (%d)", constraintNames, jobId));
JobInfo.Builder jobInfoBuilder = new JobInfo.Builder(jobId, new ComponentName(application, SystemService.class)).setMinimumLatency(delay).setPersisted(true);
for (Constraint constraint : constraints) {
constraint.applyToJobInfo(jobInfoBuilder);
}
jobScheduler.schedule(jobInfoBuilder.build());
});
}
use of androidx.annotation.RequiresApi in project Signal-Android by WhisperSystems.
the class CameraXUtil method shouldCropImage.
@RequiresApi(21)
private static boolean shouldCropImage(@NonNull ImageProxy image) {
Size sourceSize = new Size(image.getWidth(), image.getHeight());
Size targetSize = new Size(image.getCropRect().width(), image.getCropRect().height());
return !targetSize.equals(sourceSize);
}
use of androidx.annotation.RequiresApi in project Signal-Android by WhisperSystems.
the class CameraXUtil method getLowestSupportedHardwareLevel.
@RequiresApi(21)
public static int getLowestSupportedHardwareLevel(@NonNull Context context) {
@SuppressLint("RestrictedApi") CameraManager cameraManager = CameraManagerCompat.from(context).unwrap();
try {
int supported = maxHardwareLevel();
for (String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
Integer hwLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (hwLevel == null || hwLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
supported = smallerHardwareLevel(supported, hwLevel);
}
return supported;
} catch (CameraAccessException e) {
Log.w(TAG, "Failed to enumerate cameras", e);
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
}
Aggregations