use of android.view.Surface in project GSYVideoPlayer by CarGuo.
the class GSYVideoPlayer method onSurfaceTextureAvailable.
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mSurface = new Surface(surface);
GSYVideoManager.instance().setDisplay(mSurface);
//显示暂停切换显示的图片
showPauseCover();
}
use of android.view.Surface in project Telecine by JakeWharton.
the class RecordingSession method startRecording.
private void startRecording() {
Timber.d("Starting screen recording...");
if (!outputRoot.exists() && !outputRoot.mkdirs()) {
Timber.e("Unable to create output directory '%s'.", outputRoot.getAbsolutePath());
Toast.makeText(context, "Unable to create output directory.\nCannot record screen.", LENGTH_SHORT).show();
return;
}
RecordingInfo recordingInfo = getRecordingInfo();
Timber.d("Recording: %s x %s @ %s", recordingInfo.width, recordingInfo.height, recordingInfo.density);
recorder = new MediaRecorder();
recorder.setVideoSource(SURFACE);
recorder.setOutputFormat(MPEG_4);
recorder.setVideoFrameRate(recordingInfo.frameRate);
recorder.setVideoEncoder(H264);
recorder.setVideoSize(recordingInfo.width, recordingInfo.height);
recorder.setVideoEncodingBitRate(8 * 1000 * 1000);
String outputName = fileFormat.format(new Date());
outputFile = new File(outputRoot, outputName).getAbsolutePath();
Timber.i("Output file '%s'.", outputFile);
recorder.setOutputFile(outputFile);
try {
recorder.prepare();
} catch (IOException e) {
throw new RuntimeException("Unable to prepare MediaRecorder.", e);
}
projection = projectionManager.getMediaProjection(resultCode, data);
Surface surface = recorder.getSurface();
display = projection.createVirtualDisplay(DISPLAY_NAME, recordingInfo.width, recordingInfo.height, recordingInfo.density, VIRTUAL_DISPLAY_FLAG_PRESENTATION, surface, null, null);
recorder.start();
running = true;
recordingStartNanos = System.nanoTime();
listener.onStart();
Timber.d("Screen recording started.");
analytics.send(//
new HitBuilders.EventBuilder().setCategory(Analytics.CATEGORY_RECORDING).setAction(Analytics.ACTION_RECORDING_START).build());
}
use of android.view.Surface in project AndroidDevelop by 7449.
the class Camera2 method startCaptureSession.
/**
* <p>Starts a capture session for camera preview.</p>
* <p>This rewrites {@link #mPreviewRequestBuilder}.</p>
* <p>The result will be continuously processed in {@link #mSessionCallback}.</p>
*/
void startCaptureSession() {
if (!isCameraOpened() || !mPreview.isReady() || mImageReader == null) {
return;
}
Size previewSize = chooseOptimalSize();
mPreview.setBufferSize(previewSize.getWidth(), previewSize.getHeight());
Surface surface = mPreview.getSurface();
try {
mPreviewRequestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCamera.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), mSessionCallback, null);
} catch (CameraAccessException e) {
throw new RuntimeException("Failed to start camera session");
}
}
use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class RequestThreadManager method configureOutputs.
private void configureOutputs(Collection<Pair<Surface, Size>> outputs) {
if (DEBUG) {
String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
Log.d(TAG, "configureOutputs with " + outputsStr);
}
try {
stopPreview();
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception in configure call: ", e);
mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
return;
}
/*
* Try to release the previous preview's surface texture earlier if we end up
* using a different one; this also reduces the likelihood of getting into a deadlock
* when disconnecting from the old previous texture at a later time.
*/
try {
mCamera.setPreviewTexture(/*surfaceTexture*/
null);
} catch (IOException e) {
Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e);
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception in configure call: ", e);
mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
return;
}
if (mGLThreadManager != null) {
mGLThreadManager.waitUntilStarted();
mGLThreadManager.ignoreNewFrames();
mGLThreadManager.waitUntilIdle();
}
resetJpegSurfaceFormats(mCallbackOutputs);
for (Surface s : mCallbackOutputs) {
try {
LegacyCameraDevice.disconnectSurface(s);
} catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
Log.w(TAG, "Surface abandoned, skipping...", e);
}
}
mPreviewOutputs.clear();
mCallbackOutputs.clear();
mJpegSurfaceIds.clear();
mPreviewTexture = null;
List<Size> previewOutputSizes = new ArrayList<>();
List<Size> callbackOutputSizes = new ArrayList<>();
int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (outputs != null) {
for (Pair<Surface, Size> outPair : outputs) {
Surface s = outPair.first;
Size outSize = outPair.second;
try {
int format = LegacyCameraDevice.detectSurfaceType(s);
LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
switch(format) {
case CameraMetadataNative.NATIVE_JPEG_FORMAT:
if (USE_BLOB_FORMAT_OVERRIDE) {
// Override to RGBA_8888 format.
LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
}
mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s));
mCallbackOutputs.add(s);
callbackOutputSizes.add(outSize);
// LegacyCameraDevice is the producer of JPEG output surfaces
// so LegacyCameraDevice needs to connect to the surfaces.
LegacyCameraDevice.connectSurface(s);
break;
default:
LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice.NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
mPreviewOutputs.add(s);
previewOutputSizes.add(outSize);
break;
}
} catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
Log.w(TAG, "Surface abandoned, skipping...", e);
}
}
}
try {
mParams = mCamera.getParameters();
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception: ", e);
mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
return;
}
List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
if (DEBUG) {
Log.d(TAG, "doPreviewCapture - Selected range [" + bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
}
mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, callbackOutputSizes, mParams);
if (previewOutputSizes.size() > 0) {
Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes);
// Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize : largestJpegDimen;
List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(mParams.getSupportedPreviewSizes());
// Use smallest preview dimension with same aspect ratio as sensor that is >= than all
// of the configured output dimensions. If none exists, fall back to using the largest
// supported preview size.
long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
for (Size s : supportedPreviewSizes) {
long currArea = s.getWidth() * s.getHeight();
long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea && currArea >= largestOutputArea)) {
bestPreviewDimen = s;
}
}
mIntermediateBufferSize = bestPreviewDimen;
mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), mIntermediateBufferSize.getHeight());
if (DEBUG) {
Log.d(TAG, "Intermediate buffer selected with dimens: " + bestPreviewDimen.toString());
}
} else {
mIntermediateBufferSize = null;
if (DEBUG) {
Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
}
}
if (smallestSupportedJpegSize != null) {
/*
* Set takePicture size to the smallest supported JPEG size large enough
* to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
*/
Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
mParams.setPictureSize(smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
}
// TODO: Detect and optimize single-output paths here to skip stream teeing.
if (mGLThreadManager == null) {
mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState);
mGLThreadManager.start();
}
mGLThreadManager.waitUntilStarted();
List<Pair<Surface, Size>> previews = new ArrayList<>();
Iterator<Size> previewSizeIter = previewOutputSizes.iterator();
for (Surface p : mPreviewOutputs) {
previews.add(new Pair<>(p, previewSizeIter.next()));
}
mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector);
mGLThreadManager.allowNewFrames();
mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
if (mPreviewTexture != null) {
mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
}
try {
mCamera.setParameters(mParams);
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception while configuring: ", e);
mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
}
}
use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class RequestThreadManager method createDummySurface.
/**
* Fake preview for jpeg captures when there is no active preview
*/
private void createDummySurface() {
if (mDummyTexture == null || mDummySurface == null) {
mDummyTexture = new SurfaceTexture(/*ignored*/
0);
// TODO: use smallest default sizes
mDummyTexture.setDefaultBufferSize(640, 480);
mDummySurface = new Surface(mDummyTexture);
}
}
Aggregations