use of androidx.annotation.AnyThread in project Signal-Android by WhisperSystems.
the class ConfirmPaymentRepository method confirmPayment.
@AnyThread
void confirmPayment(@NonNull ConfirmPaymentState state, @NonNull Consumer<ConfirmPaymentResult> consumer) {
Log.i(TAG, "confirmPayment");
SignalExecutors.BOUNDED.execute(() -> {
Balance balance = wallet.getCachedBalance();
if (state.getTotal().requireMobileCoin().greaterThan(balance.getFullAmount().requireMobileCoin())) {
Log.w(TAG, "The total was greater than the wallet's balance");
consumer.accept(new ConfirmPaymentResult.Error());
return;
}
Payee payee = state.getPayee();
RecipientId recipientId;
MobileCoinPublicAddress mobileCoinPublicAddress;
if (payee.hasRecipientId()) {
recipientId = payee.requireRecipientId();
try {
mobileCoinPublicAddress = ProfileUtil.getAddressForRecipient(Recipient.resolved(recipientId));
} catch (IOException e) {
Log.w(TAG, "Failed to get address for recipient " + recipientId);
consumer.accept(new ConfirmPaymentResult.Error());
return;
} catch (PaymentsAddressException e) {
Log.w(TAG, "Failed to get address for recipient " + recipientId);
consumer.accept(new ConfirmPaymentResult.Error(e.getCode()));
return;
}
} else if (payee.hasPublicAddress()) {
recipientId = null;
mobileCoinPublicAddress = payee.requirePublicAddress();
} else
throw new AssertionError();
UUID paymentUuid = PaymentSendJob.enqueuePayment(recipientId, mobileCoinPublicAddress, Util.emptyIfNull(state.getNote()), state.getAmount().requireMobileCoin(), state.getFee().requireMobileCoin());
Log.i(TAG, "confirmPayment: PaymentSendJob enqueued");
consumer.accept(new ConfirmPaymentResult.Success(paymentUuid));
});
}
use of androidx.annotation.AnyThread in project Signal-Android by WhisperSystems.
the class SimpleProgressDialog method showDelayed.
/**
* Shows the dialog after {@param delayMs} ms.
* <p>
* To dismiss, call {@link DismissibleDialog#dismiss()} on the result. If dismiss is called before
* the delay has elapsed, the dialog will not show at all.
* <p>
* Dismiss can be called on any thread.
*
* @param minimumShowTimeMs If the dialog does display, then it will be visible for at least this duration.
* This is to prevent flicker.
*/
@AnyThread
@NonNull
public static DismissibleDialog showDelayed(@NonNull Context context, int delayMs, int minimumShowTimeMs) {
AtomicReference<AlertDialog> dialogAtomicReference = new AtomicReference<>();
AtomicLong shownAt = new AtomicLong();
Runnable showRunnable = () -> {
Log.i(TAG, "Taking some time. Showing a progress dialog.");
shownAt.set(System.currentTimeMillis());
dialogAtomicReference.set(show(context));
};
ThreadUtil.runOnMainDelayed(showRunnable, delayMs);
return new DismissibleDialog() {
@Override
public void dismiss() {
ThreadUtil.cancelRunnableOnMain(showRunnable);
ThreadUtil.runOnMain(() -> {
AlertDialog alertDialog = dialogAtomicReference.getAndSet(null);
if (alertDialog != null) {
long beenShowingForMs = System.currentTimeMillis() - shownAt.get();
long remainingTimeMs = minimumShowTimeMs - beenShowingForMs;
if (remainingTimeMs > 0) {
ThreadUtil.runOnMainDelayed(alertDialog::dismiss, remainingTimeMs);
} else {
alertDialog.dismiss();
}
}
});
}
@Override
public void dismissNow() {
ThreadUtil.cancelRunnableOnMain(showRunnable);
ThreadUtil.runOnMain(() -> {
AlertDialog alertDialog = dialogAtomicReference.getAndSet(null);
if (alertDialog != null) {
alertDialog.dismiss();
}
});
}
};
}
use of androidx.annotation.AnyThread in project Signal-Android by WhisperSystems.
the class AudioWaveForm method getWaveForm.
@AnyThread
public void getWaveForm(@NonNull Consumer<AudioFileInfo> onSuccess, @NonNull Runnable onFailure) {
Uri uri = slide.getUri();
Attachment attachment = slide.asAttachment();
if (uri == null) {
Log.w(TAG, "No uri");
ThreadUtil.runOnMain(onFailure);
return;
}
String cacheKey = uri.toString();
AudioFileInfo cached = WAVE_FORM_CACHE.get(cacheKey);
if (cached != null) {
Log.i(TAG, "Loaded wave form from cache " + cacheKey);
ThreadUtil.runOnMain(() -> onSuccess.accept(cached));
return;
}
AUDIO_DECODER_EXECUTOR.execute(() -> {
AudioFileInfo cachedInExecutor = WAVE_FORM_CACHE.get(cacheKey);
if (cachedInExecutor != null) {
Log.i(TAG, "Loaded wave form from cache inside executor" + cacheKey);
ThreadUtil.runOnMain(() -> onSuccess.accept(cachedInExecutor));
return;
}
AudioHash audioHash = attachment.getAudioHash();
if (audioHash != null) {
AudioFileInfo audioFileInfo = AudioFileInfo.fromDatabaseProtobuf(audioHash.getAudioWaveForm());
if (audioFileInfo.waveForm.length == 0) {
Log.w(TAG, "Recovering from a wave form generation error " + cacheKey);
ThreadUtil.runOnMain(onFailure);
return;
} else if (audioFileInfo.waveForm.length != BAR_COUNT) {
Log.w(TAG, "Wave form from database does not match bar count, regenerating " + cacheKey);
} else {
WAVE_FORM_CACHE.put(cacheKey, audioFileInfo);
Log.i(TAG, "Loaded wave form from DB " + cacheKey);
ThreadUtil.runOnMain(() -> onSuccess.accept(audioFileInfo));
return;
}
}
if (attachment instanceof DatabaseAttachment) {
try {
AttachmentDatabase attachmentDatabase = SignalDatabase.attachments();
DatabaseAttachment dbAttachment = (DatabaseAttachment) attachment;
long startTime = System.currentTimeMillis();
attachmentDatabase.writeAudioHash(dbAttachment.getAttachmentId(), AudioWaveFormData.getDefaultInstance());
Log.i(TAG, String.format("Starting wave form generation (%s)", cacheKey));
AudioFileInfo fileInfo = generateWaveForm(uri);
Log.i(TAG, String.format(Locale.US, "Audio wave form generation time %d ms (%s)", System.currentTimeMillis() - startTime, cacheKey));
attachmentDatabase.writeAudioHash(dbAttachment.getAttachmentId(), fileInfo.toDatabaseProtobuf());
WAVE_FORM_CACHE.put(cacheKey, fileInfo);
ThreadUtil.runOnMain(() -> onSuccess.accept(fileInfo));
} catch (Throwable e) {
Log.w(TAG, "Failed to create audio wave form for " + cacheKey, e);
ThreadUtil.runOnMain(onFailure);
}
} else {
try {
Log.i(TAG, "Not in database and not cached. Generating wave form on-the-fly.");
long startTime = System.currentTimeMillis();
Log.i(TAG, String.format("Starting wave form generation (%s)", cacheKey));
AudioFileInfo fileInfo = generateWaveForm(uri);
Log.i(TAG, String.format(Locale.US, "Audio wave form generation time %d ms (%s)", System.currentTimeMillis() - startTime, cacheKey));
WAVE_FORM_CACHE.put(cacheKey, fileInfo);
ThreadUtil.runOnMain(() -> onSuccess.accept(fileInfo));
} catch (IOException e) {
Log.w(TAG, "Failed to create audio wave form for " + cacheKey, e);
ThreadUtil.runOnMain(onFailure);
}
}
});
}
use of androidx.annotation.AnyThread in project Slide by ccrama.
the class SubsamplingScaleImageView method getExifOrientation.
/**
* Helper method for load tasks. Examines the EXIF info on the image file to determine the orientation.
* This will only work for external files, not assets, resources or other URIs.
*/
@AnyThread
private int getExifOrientation(Context context, String sourceUri) {
int exifOrientation = ORIENTATION_0;
if (sourceUri.startsWith(ContentResolver.SCHEME_CONTENT)) {
Cursor cursor = null;
try {
String[] columns = { MediaStore.Images.Media.ORIENTATION };
cursor = context.getContentResolver().query(Uri.parse(sourceUri), columns, null, null, null);
if (cursor != null) {
if (cursor.moveToFirst()) {
int orientation = cursor.getInt(0);
if (VALID_ORIENTATIONS.contains(orientation) && orientation != ORIENTATION_USE_EXIF) {
exifOrientation = orientation;
} else {
Log.w(TAG, "Unsupported orientation: " + orientation);
}
}
}
} catch (Exception e) {
Log.w(TAG, "Could not get orientation of image from media store");
} finally {
if (cursor != null) {
cursor.close();
}
}
} else if (sourceUri.startsWith(ImageSource.FILE_SCHEME) && !sourceUri.startsWith(ImageSource.ASSET_SCHEME)) {
try {
ExifInterface exifInterface = new ExifInterface(sourceUri.substring(ImageSource.FILE_SCHEME.length() - 1));
int orientationAttr = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
if (orientationAttr == ExifInterface.ORIENTATION_NORMAL || orientationAttr == ExifInterface.ORIENTATION_UNDEFINED) {
exifOrientation = ORIENTATION_0;
} else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_90) {
exifOrientation = ORIENTATION_90;
} else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_180) {
exifOrientation = ORIENTATION_180;
} else if (orientationAttr == ExifInterface.ORIENTATION_ROTATE_270) {
exifOrientation = ORIENTATION_270;
} else {
Log.w(TAG, "Unsupported EXIF orientation: " + orientationAttr);
}
} catch (Exception e) {
Log.w(TAG, "Could not get EXIF orientation of image");
}
}
return exifOrientation;
}
use of androidx.annotation.AnyThread in project Signal-Android by signalapp.
the class AudioWaveForm method getWaveForm.
@AnyThread
public void getWaveForm(@NonNull Consumer<AudioFileInfo> onSuccess, @NonNull Runnable onFailure) {
Uri uri = slide.getUri();
Attachment attachment = slide.asAttachment();
if (uri == null) {
Log.w(TAG, "No uri");
ThreadUtil.runOnMain(onFailure);
return;
}
String cacheKey = uri.toString();
AudioFileInfo cached = WAVE_FORM_CACHE.get(cacheKey);
if (cached != null) {
Log.i(TAG, "Loaded wave form from cache " + cacheKey);
ThreadUtil.runOnMain(() -> onSuccess.accept(cached));
return;
}
AUDIO_DECODER_EXECUTOR.execute(() -> {
AudioFileInfo cachedInExecutor = WAVE_FORM_CACHE.get(cacheKey);
if (cachedInExecutor != null) {
Log.i(TAG, "Loaded wave form from cache inside executor" + cacheKey);
ThreadUtil.runOnMain(() -> onSuccess.accept(cachedInExecutor));
return;
}
AudioHash audioHash = attachment.getAudioHash();
if (audioHash != null) {
AudioFileInfo audioFileInfo = AudioFileInfo.fromDatabaseProtobuf(audioHash.getAudioWaveForm());
if (audioFileInfo.waveForm.length == 0) {
Log.w(TAG, "Recovering from a wave form generation error " + cacheKey);
ThreadUtil.runOnMain(onFailure);
return;
} else if (audioFileInfo.waveForm.length != BAR_COUNT) {
Log.w(TAG, "Wave form from database does not match bar count, regenerating " + cacheKey);
} else {
WAVE_FORM_CACHE.put(cacheKey, audioFileInfo);
Log.i(TAG, "Loaded wave form from DB " + cacheKey);
ThreadUtil.runOnMain(() -> onSuccess.accept(audioFileInfo));
return;
}
}
if (attachment instanceof DatabaseAttachment) {
try {
AttachmentDatabase attachmentDatabase = SignalDatabase.attachments();
DatabaseAttachment dbAttachment = (DatabaseAttachment) attachment;
long startTime = System.currentTimeMillis();
attachmentDatabase.writeAudioHash(dbAttachment.getAttachmentId(), AudioWaveFormData.getDefaultInstance());
Log.i(TAG, String.format("Starting wave form generation (%s)", cacheKey));
AudioFileInfo fileInfo = generateWaveForm(uri);
Log.i(TAG, String.format(Locale.US, "Audio wave form generation time %d ms (%s)", System.currentTimeMillis() - startTime, cacheKey));
attachmentDatabase.writeAudioHash(dbAttachment.getAttachmentId(), fileInfo.toDatabaseProtobuf());
WAVE_FORM_CACHE.put(cacheKey, fileInfo);
ThreadUtil.runOnMain(() -> onSuccess.accept(fileInfo));
} catch (Throwable e) {
Log.w(TAG, "Failed to create audio wave form for " + cacheKey, e);
ThreadUtil.runOnMain(onFailure);
}
} else {
try {
Log.i(TAG, "Not in database and not cached. Generating wave form on-the-fly.");
long startTime = System.currentTimeMillis();
Log.i(TAG, String.format("Starting wave form generation (%s)", cacheKey));
AudioFileInfo fileInfo = generateWaveForm(uri);
Log.i(TAG, String.format(Locale.US, "Audio wave form generation time %d ms (%s)", System.currentTimeMillis() - startTime, cacheKey));
WAVE_FORM_CACHE.put(cacheKey, fileInfo);
ThreadUtil.runOnMain(() -> onSuccess.accept(fileInfo));
} catch (IOException e) {
Log.w(TAG, "Failed to create audio wave form for " + cacheKey, e);
ThreadUtil.runOnMain(onFailure);
}
}
});
}
Aggregations