use of androidx.annotation.RequiresPermission in project BigImageViewer by Piasy.
the class BigImageView method saveImageIntoGallery.
@WorkerThread
@RequiresPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE)
public void saveImageIntoGallery() {
if (mCurrentImageFile == null) {
fireSaveImageCallback(null, new IllegalStateException("image not downloaded yet"));
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
OutputStream outputStream = null;
FileInputStream inputStream = null;
Uri imageUri = null;
boolean saved = false;
try {
ContentResolver resolver = getContext().getContentResolver();
ContentValues contentValues = new ContentValues();
contentValues.put(MediaStore.MediaColumns.DISPLAY_NAME, mCurrentImageFile.getName());
// http://androidxref.com/4.4.4_r1/xref/libcore/luni/src/main/java/libcore/net/MimeUtils.java
// Please select the appropriate MIME_TYPE in the webpage
contentValues.put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg");
contentValues.put(MediaStore.MediaColumns.RELATIVE_PATH, Environment.DIRECTORY_PICTURES);
imageUri = resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, contentValues);
if (imageUri != null) {
outputStream = resolver.openOutputStream(imageUri);
inputStream = new FileInputStream(mCurrentImageFile);
// a simple file copy is enough.
IOUtils.copy(inputStream, outputStream);
saved = true;
} else {
fireSaveImageCallback(null, new RuntimeException("saveImageIntoGallery fail: insert to MediaStore error"));
}
} catch (IOException e) {
fireSaveImageCallback(null, e);
} finally {
IOUtils.closeQuietly(inputStream);
IOUtils.closeQuietly(outputStream);
}
if (saved) {
fireSaveImageCallback(imageUri.toString(), null);
}
} else {
try {
String result = MediaStore.Images.Media.insertImage(getContext().getContentResolver(), mCurrentImageFile.getAbsolutePath(), mCurrentImageFile.getName(), "");
fireSaveImageCallback(result, null);
} catch (IOException e) {
fireSaveImageCallback(null, e);
}
}
}
use of androidx.annotation.RequiresPermission in project speechutils by Kaljurand.
the class AbstractRecognitionService method onStartListening.
/**
* Starts recording and opens the connection to the server to start sending the recorded packages.
*/
@RequiresPermission(RECORD_AUDIO)
@Override
protected void onStartListening(final Intent recognizerIntent, RecognitionService.Callback listener) {
mListener = listener;
Log.i("onStartListening");
mExtras = recognizerIntent.getExtras();
if (mExtras == null) {
mExtras = new Bundle();
}
if (mExtras.containsKey(Extras.EXTRA_AUDIO_CUES)) {
setAudioCuesEnabled(mExtras.getBoolean(Extras.EXTRA_AUDIO_CUES));
} else {
setAudioCuesEnabled(isAudioCues());
}
try {
configure(recognizerIntent);
} catch (IOException e) {
onError(SpeechRecognizer.ERROR_CLIENT);
return;
}
mAudioPauser = AudioPauser.createAudioPauser(this, true);
Log.i("AudioPauser can mute stream: " + mAudioPauser.isMuteStream());
mAudioPauser.pause();
try {
onReadyForSpeech(new Bundle());
startRecord();
} catch (IOException e) {
onError(SpeechRecognizer.ERROR_AUDIO);
return;
}
onBeginningOfSpeech();
connect();
}
use of androidx.annotation.RequiresPermission in project speechutils by Kaljurand.
the class EncodedAudioRecorder method queueInputBuffer.
/**
* Copy audio from the recorder into the encoder.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@RequiresPermission(RECORD_AUDIO)
private int queueInputBuffer(MediaCodec codec, ByteBuffer[] inputBuffers, int index, AudioRecord speechRecord) {
if (speechRecord == null || speechRecord.getRecordingState() != SpeechRecord.RECORDSTATE_RECORDING) {
return -1;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
ByteBuffer inputBuffer = inputBuffers[index];
inputBuffer.clear();
int size = inputBuffer.limit();
byte[] buffer = new byte[size];
int status = read(speechRecord, buffer);
if (status < 0) {
handleError("status = " + status);
return -1;
}
inputBuffer.put(buffer);
codec.queueInputBuffer(index, 0, size, 0, 0);
return size;
}
return -1;
}
use of androidx.annotation.RequiresPermission in project speechutils by Kaljurand.
the class EncodedAudioRecorder method recorderLoop.
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@RequiresPermission(RECORD_AUDIO)
@Override
protected void recorderLoop(AudioRecord speechRecord) {
mNumBytesSubmitted = 0;
mNumBytesDequeued = 0;
MediaFormat format = MediaFormatFactory.createMediaFormat(MIME, getSampleRate());
MediaCodec codec = getCodec(format);
if (codec == null) {
handleError("no codec found");
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i("Using codec: " + codec.getCanonicalName());
}
int status = recorderEncoderLoop(codec, speechRecord);
if (Log.DEBUG) {
AudioUtils.showMetrics(format, mNumBytesSubmitted, mNumBytesDequeued);
}
if (status < 0) {
handleError("encoder error");
}
}
}
use of androidx.annotation.RequiresPermission in project Signal-Android by WhisperSystems.
the class SignalCameraXModule method bindToLifecycleAfterViewMeasured.
@RequiresPermission(permission.CAMERA)
void bindToLifecycleAfterViewMeasured() {
if (mNewLifecycle == null) {
return;
}
clearCurrentLifecycle();
if (mNewLifecycle.getLifecycle().getCurrentState() == Lifecycle.State.DESTROYED) {
// Lifecycle is already in a destroyed state. Since it may have been a valid
// lifecycle when bound, but became destroyed while waiting for layout, treat this as
// a no-op now that we have cleared the previous lifecycle.
mNewLifecycle = null;
return;
}
mCurrentLifecycle = mNewLifecycle;
mNewLifecycle = null;
if (mCameraProvider == null) {
// try again once the camera provider is no longer null
return;
}
Set<Integer> available = getAvailableCameraLensFacing();
if (available.isEmpty()) {
Logger.w(TAG, "Unable to bindToLifeCycle since no cameras available");
mCameraLensFacing = null;
}
// Ensure the current camera exists, or default to another camera
if (mCameraLensFacing != null && !available.contains(mCameraLensFacing)) {
Logger.w(TAG, "Camera does not exist with direction " + mCameraLensFacing);
// Default to the first available camera direction
mCameraLensFacing = available.iterator().next();
Logger.w(TAG, "Defaulting to primary camera with direction " + mCameraLensFacing);
}
// were no available cameras, which should be logged in the logic above.
if (mCameraLensFacing == null) {
return;
}
// Set the preferred aspect ratio as 4:3 if it is IMAGE only mode. Set the preferred aspect
// ratio as 16:9 if it is VIDEO or MIXED mode. Then, it will be WYSIWYG when the view finder
// is in CENTER_INSIDE mode.
boolean isDisplayPortrait = getDisplayRotationDegrees() == 0 || getDisplayRotationDegrees() == 180;
// Begin Signal Custom Code Block
int resolution = CameraXUtil.getIdealResolution(Resources.getSystem().getDisplayMetrics().widthPixels, Resources.getSystem().getDisplayMetrics().heightPixels);
// End Signal Custom Code Block
Rational targetAspectRatio;
// Begin Signal Custom Code Block
mImageCaptureBuilder.setTargetResolution(CameraXUtil.buildResolutionForRatio(resolution, ASPECT_RATIO_16_9, isDisplayPortrait));
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9;
// End Signal Custom Code Block
// Begin Signal Custom Code Block
mImageCaptureBuilder.setCaptureMode(CameraXUtil.getOptimalCaptureMode());
// End Signal Custom Code Block
mImageCaptureBuilder.setTargetRotation(getDisplaySurfaceRotation());
mImageCapture = mImageCaptureBuilder.build();
// Begin Signal Custom Code Block
Size size = VideoUtil.getVideoRecordingSize();
mVideoCaptureBuilder.setTargetResolution(size);
mVideoCaptureBuilder.setMaxResolution(size);
// End Signal Custom Code Block
mVideoCaptureBuilder.setTargetRotation(getDisplaySurfaceRotation());
// Begin Signal Custom Code Block
if (MediaConstraints.isVideoTranscodeAvailable()) {
mVideoCapture = mVideoCaptureBuilder.build();
}
// End Signal Custom Code Block
// Adjusts the preview resolution according to the view size and the target aspect ratio.
int height = (int) (getMeasuredWidth() / targetAspectRatio.floatValue());
mPreviewBuilder.setTargetResolution(new Size(getMeasuredWidth(), height));
mPreview = mPreviewBuilder.build();
mPreview.setSurfaceProvider(mCameraView.getPreviewView().getSurfaceProvider());
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(mCameraLensFacing).build();
if (getCaptureMode() == SignalCameraView.CaptureMode.IMAGE) {
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector, mImageCapture, mPreview);
} else if (getCaptureMode() == SignalCameraView.CaptureMode.VIDEO) {
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector, mVideoCapture, mPreview);
} else {
mCamera = mCameraProvider.bindToLifecycle(mCurrentLifecycle, cameraSelector, mImageCapture, mVideoCapture, mPreview);
}
setZoomRatio(UNITY_ZOOM_SCALE);
mCurrentLifecycle.getLifecycle().addObserver(mCurrentLifecycleObserver);
// Enable flash setting in ImageCapture after use cases are created and binded.
setFlash(getFlash());
}
Aggregations