use of android.view.Surface in project platform_frameworks_base by android.
the class MediaPlayer method setDisplay.
/**
* Sets the {@link SurfaceHolder} to use for displaying the video
* portion of the media.
*
* Either a surface holder or surface must be set if a display or video sink
* is needed. Not calling this method or {@link #setSurface(Surface)}
* when playing back a video will result in only the audio track being played.
* A null surface holder or surface will result in only the audio track being
* played.
*
* @param sh the SurfaceHolder to use for video display
* @throws IllegalStateException if the internal player engine has not been
* initialized or has been released.
*/
public void setDisplay(SurfaceHolder sh) {
mSurfaceHolder = sh;
Surface surface;
if (sh != null) {
surface = sh.getSurface();
} else {
surface = null;
}
_setVideoSurface(surface);
updateSurfaceScreenOn();
}
use of android.view.Surface in project platform_frameworks_base by android.
the class MediaSource method setupMediaPlayer.
/** Creates a media player, sets it up, and calls prepare */
private synchronized boolean setupMediaPlayer(boolean useUrl) {
mPrepared = false;
mGotSize = false;
mPlaying = false;
mPaused = false;
mCompleted = false;
mNewFrameAvailable = false;
if (mLogVerbose)
Log.v(TAG, "Setting up playback.");
if (mMediaPlayer != null) {
// Clean up existing media players
if (mLogVerbose)
Log.v(TAG, "Resetting existing MediaPlayer.");
mMediaPlayer.reset();
} else {
// Create new media player
if (mLogVerbose)
Log.v(TAG, "Creating new MediaPlayer.");
mMediaPlayer = new MediaPlayer();
}
if (mMediaPlayer == null) {
throw new RuntimeException("Unable to create a MediaPlayer!");
}
// Set up data sources, etc
try {
if (useUrl) {
if (mLogVerbose)
Log.v(TAG, "Setting MediaPlayer source to URI " + mSourceUrl);
if (mContext == null) {
mMediaPlayer.setDataSource(mSourceUrl);
} else {
mMediaPlayer.setDataSource(mContext, Uri.parse(mSourceUrl.toString()));
}
} else {
if (mLogVerbose)
Log.v(TAG, "Setting MediaPlayer source to asset " + mSourceAsset);
mMediaPlayer.setDataSource(mSourceAsset.getFileDescriptor(), mSourceAsset.getStartOffset(), mSourceAsset.getLength());
}
} catch (IOException e) {
mMediaPlayer.release();
mMediaPlayer = null;
if (useUrl) {
throw new RuntimeException(String.format("Unable to set MediaPlayer to URL %s!", mSourceUrl), e);
} else {
throw new RuntimeException(String.format("Unable to set MediaPlayer to asset %s!", mSourceAsset), e);
}
} catch (IllegalArgumentException e) {
mMediaPlayer.release();
mMediaPlayer = null;
if (useUrl) {
throw new RuntimeException(String.format("Unable to set MediaPlayer to URL %s!", mSourceUrl), e);
} else {
throw new RuntimeException(String.format("Unable to set MediaPlayer to asset %s!", mSourceAsset), e);
}
}
mMediaPlayer.setLooping(mLooping);
mMediaPlayer.setVolume(mVolume, mVolume);
// Bind it to our media frame
Surface surface = new Surface(mSurfaceTexture);
mMediaPlayer.setSurface(surface);
surface.release();
// Connect Media Player to callbacks
mMediaPlayer.setOnVideoSizeChangedListener(onVideoSizeChangedListener);
mMediaPlayer.setOnPreparedListener(onPreparedListener);
mMediaPlayer.setOnCompletionListener(onCompletionListener);
// Connect SurfaceTexture to callback
mSurfaceTexture.setOnFrameAvailableListener(onMediaFrameAvailableListener);
if (mLogVerbose)
Log.v(TAG, "Preparing MediaPlayer.");
mMediaPlayer.prepareAsync();
return true;
}
use of android.view.Surface in project platform_frameworks_base by android.
the class EGLImpl method eglCreateWindowSurface.
public EGLSurface eglCreateWindowSurface(EGLDisplay display, EGLConfig config, Object native_window, int[] attrib_list) {
Surface sur = null;
if (native_window instanceof SurfaceView) {
SurfaceView surfaceView = (SurfaceView) native_window;
sur = surfaceView.getHolder().getSurface();
} else if (native_window instanceof SurfaceHolder) {
SurfaceHolder holder = (SurfaceHolder) native_window;
sur = holder.getSurface();
} else if (native_window instanceof Surface) {
sur = (Surface) native_window;
}
long eglSurfaceId;
if (sur != null) {
eglSurfaceId = _eglCreateWindowSurface(display, config, sur, attrib_list);
} else if (native_window instanceof SurfaceTexture) {
eglSurfaceId = _eglCreateWindowSurfaceTexture(display, config, native_window, attrib_list);
} else {
throw new java.lang.UnsupportedOperationException("eglCreateWindowSurface() can only be called with an instance of " + "Surface, SurfaceView, SurfaceHolder or SurfaceTexture at the moment.");
}
if (eglSurfaceId == 0) {
return EGL10.EGL_NO_SURFACE;
}
return new EGLSurfaceImpl(eglSurfaceId);
}
use of android.view.Surface in project platform_frameworks_base by android.
the class Camera2Source method onOpen.
@Override
protected void onOpen() {
mLooperThread = new CameraTestThread();
Handler mHandler;
try {
mHandler = mLooperThread.start();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException(e);
}
try {
String backCameraId = "0";
BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
mCamera = blkManager.openCamera(backCameraId, /*listener*/
null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (BlockingOpenException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV);
rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS, ele);
Type.Builder yuvBuilder = new Type.Builder(mRS, ele);
yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
yuvBuilder.setX(mWidth);
yuvBuilder.setY(mHeight);
mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
mSurface = mAllocationIn.getSurface();
mAllocationIn.setOnBufferAvailableListener(this);
rgbConverter.setInput(mAllocationIn);
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
Log.v(TAG, "mcamera: " + mCamera);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(mSurface);
CaptureRequest.Builder mCaptureRequest = null;
try {
BlockingSessionCallback blkSession = new BlockingSessionCallback();
mCamera.createCaptureSession(surfaces, blkSession, mHandler);
mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequest.addTarget(mSurface);
mCameraSession = blkSession.waitAndGetSession(SESSION_TIMEOUT_MS);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
try {
mCameraSession.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureCallback(), mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
mProperties = null;
try {
mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of android.view.Surface in project platform_frameworks_base by android.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
Aggregations