use of android.view.Surface in project robolectric by robolectric.
the class ShadowMediaRecorderTest method testPreviewDisplay.
@Test
public void testPreviewDisplay() throws Exception {
assertThat(shadowMediaRecorder.getState()).isNotEqualTo(ShadowMediaRecorder.STATE_DATA_SOURCE_CONFIGURED);
assertThat(shadowMediaRecorder.getPreviewDisplay()).isNull();
Surface surface = Shadow.newInstanceOf(Surface.class);
mediaRecorder.setPreviewDisplay(surface);
assertThat(shadowMediaRecorder.getPreviewDisplay()).isNotNull();
assertThat(shadowMediaRecorder.getPreviewDisplay()).isSameAs(surface);
assertThat(shadowMediaRecorder.getState()).isEqualTo(ShadowMediaRecorder.STATE_DATA_SOURCE_CONFIGURED);
}
use of android.view.Surface in project VideoPlayerManager by danylovolokh.
the class MediaPlayerWrapper method setSurfaceTexture.
public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
if (SHOW_LOGS)
Logger.v(TAG, ">> setSurfaceTexture " + surfaceTexture);
if (SHOW_LOGS)
Logger.v(TAG, "setSurfaceTexture mSurface " + mSurface);
if (surfaceTexture != null) {
mSurface = new Surface(surfaceTexture);
// TODO fix illegal state exception
mMediaPlayer.setSurface(mSurface);
} else {
mMediaPlayer.setSurface(null);
}
if (SHOW_LOGS)
Logger.v(TAG, "<< setSurfaceTexture " + surfaceTexture);
}
use of android.view.Surface in project Android-Developers-Samples by johnjohndoe.
the class MainActivity method startPlayback.
public void startPlayback() {
// Construct a URI that points to the video resource that we want to play
Uri videoUri = Uri.parse("android.resource://" + getPackageName() + "/" + R.raw.vid_bigbuckbunny);
try {
// BEGIN_INCLUDE(initialize_extractor)
mExtractor.setDataSource(this, videoUri, null);
int nTracks = mExtractor.getTrackCount();
// any tracks that we haven't explicitly selected.
for (int i = 0; i < nTracks; ++i) {
mExtractor.unselectTrack(i);
}
// sample assumes that we just want to play the first one.
for (int i = 0; i < nTracks; ++i) {
// Try to create a video codec for this track. This call will return null if the
// track is not a video track, or not a recognized video format. Once it returns
// a valid MediaCodecWrapper, we can break out of the loop.
mCodecWrapper = MediaCodecWrapper.fromVideoFormat(mExtractor.getTrackFormat(i), new Surface(mPlaybackView.getSurfaceTexture()));
if (mCodecWrapper != null) {
mExtractor.selectTrack(i);
break;
}
}
// END_INCLUDE(initialize_extractor)
// By using a {@link TimeAnimator}, we can sync our media rendering commands with
// the system display frame rendering. The animator ticks as the {@link Choreographer}
// recieves VSYNC events.
mTimeAnimator.setTimeListener(new TimeAnimator.TimeListener() {
@Override
public void onTimeUpdate(final TimeAnimator animation, final long totalTime, final long deltaTime) {
boolean isEos = ((mExtractor.getSampleFlags() & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == MediaCodec.BUFFER_FLAG_END_OF_STREAM);
// BEGIN_INCLUDE(write_sample)
if (!isEos) {
// Try to submit the sample to the codec and if successful advance the
// extractor to the next available sample to read.
boolean result = mCodecWrapper.writeSample(mExtractor, false, mExtractor.getSampleTime(), mExtractor.getSampleFlags());
if (result) {
// Advancing the extractor is a blocking operation and it MUST be
// executed outside the main thread in real applications.
mExtractor.advance();
}
}
// END_INCLUDE(write_sample)
// Examine the sample at the head of the queue to see if its ready to be
// rendered and is not zero sized End-of-Stream record.
MediaCodec.BufferInfo out_bufferInfo = new MediaCodec.BufferInfo();
mCodecWrapper.peekSample(out_bufferInfo);
// BEGIN_INCLUDE(render_sample)
if (out_bufferInfo.size <= 0 && isEos) {
mTimeAnimator.end();
mCodecWrapper.stopAndRelease();
mExtractor.release();
} else if (out_bufferInfo.presentationTimeUs / 1000 < totalTime) {
// Pop the sample off the queue and send it to {@link Surface}
mCodecWrapper.popSample(true);
}
// END_INCLUDE(render_sample)
}
});
// We're all set. Kick off the animator to process buffers and render video frames as
// they become available
mTimeAnimator.start();
} catch (IOException e) {
e.printStackTrace();
}
}
use of android.view.Surface in project material-camera by afollestad.
the class Camera2Fragment method startPreview.
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize)
return;
try {
if (!mInterface.useStillshot()) {
if (!setUpMediaRecorder()) {
return;
}
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
if (mInterface.useStillshot()) {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewBuilder.addTarget(previewSurface);
surfaces.add(mImageReader.getSurface());
} else {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mPreviewBuilder.addTarget(previewSurface);
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
}
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (mCameraDevice == null) {
return;
}
mPreviewSession = cameraCaptureSession;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
throwError(new Exception("Camera configuration failed"));
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
use of android.view.Surface in project cornerstone by Onskreen.
the class WindowState method createSurfaceLocked.
Surface createSurfaceLocked() {
if (mSurface == null) {
mReportDestroySurface = false;
mSurfacePendingDestroy = false;
if (WindowManagerService.DEBUG_ORIENTATION)
Slog.i(WindowManagerService.TAG, "createSurface " + this + ": DRAW NOW PENDING");
mDrawPending = true;
mCommitDrawPending = false;
mReadyToShow = false;
if (mAppToken != null) {
mAppToken.allDrawn = false;
}
mService.makeWindowFreezingScreenIfNeededLocked(this);
int flags = 0;
if ((mAttrs.flags & WindowManager.LayoutParams.FLAG_SECURE) != 0) {
flags |= Surface.SECURE;
}
if (DEBUG_VISIBILITY)
Slog.v(WindowManagerService.TAG, "Creating surface in session " + mSession.mSurfaceSession + " window " + this + " w=" + mCompatFrame.width() + " h=" + mCompatFrame.height() + " format=" + mAttrs.format + " flags=" + flags);
int w = mCompatFrame.width();
int h = mCompatFrame.height();
if ((mAttrs.flags & LayoutParams.FLAG_SCALED) != 0) {
// for a scaled surface, we always want the requested
// size.
w = mRequestedWidth;
h = mRequestedHeight;
}
// try to revert to sane values
if (w <= 0)
w = 1;
if (h <= 0)
h = 1;
mSurfaceShown = false;
mSurfaceLayer = 0;
mSurfaceAlpha = 1;
mSurfaceX = 0;
mSurfaceY = 0;
mSurfaceW = w;
mSurfaceH = h;
try {
final boolean isHwAccelerated = (mAttrs.flags & WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED) != 0;
final int format = isHwAccelerated ? PixelFormat.TRANSLUCENT : mAttrs.format;
if (!PixelFormat.formatHasAlpha(mAttrs.format)) {
flags |= Surface.OPAQUE;
}
mSurface = new Surface(mSession.mSurfaceSession, mSession.mPid, mAttrs.getTitle().toString(), 0, w, h, format, flags);
if (SHOW_TRANSACTIONS || SHOW_SURFACE_ALLOC)
Slog.i(WindowManagerService.TAG, " CREATE SURFACE " + mSurface + " IN SESSION " + mSession.mSurfaceSession + ": pid=" + mSession.mPid + " format=" + mAttrs.format + " flags=0x" + Integer.toHexString(flags) + " / " + this);
} catch (Surface.OutOfResourcesException e) {
Slog.w(WindowManagerService.TAG, "OutOfResourcesException creating surface");
mService.reclaimSomeSurfaceMemoryLocked(this, "create", true);
return null;
} catch (Exception e) {
Slog.e(WindowManagerService.TAG, "Exception creating surface", e);
return null;
}
if (WindowManagerService.localLOGV)
Slog.v(WindowManagerService.TAG, "Got surface: " + mSurface + ", set left=" + mFrame.left + " top=" + mFrame.top + ", animLayer=" + mAnimLayer);
if (SHOW_LIGHT_TRANSACTIONS) {
Slog.i(WindowManagerService.TAG, ">>> OPEN TRANSACTION createSurfaceLocked");
WindowManagerService.logSurface(this, "CREATE pos=(" + mFrame.left + "," + mFrame.top + ") (" + mCompatFrame.width() + "x" + mCompatFrame.height() + "), layer=" + mAnimLayer + " HIDE", null);
}
Surface.openTransaction();
try {
try {
mSurfaceX = mFrame.left + mXOffset;
mSurfaceY = mFrame.top + mYOffset;
mSurface.setPosition(mSurfaceX, mSurfaceY);
mSurfaceLayer = mAnimLayer;
mSurface.setLayer(mAnimLayer);
mSurfaceShown = false;
mSurface.hide();
if ((mAttrs.flags & WindowManager.LayoutParams.FLAG_DITHER) != 0) {
if (SHOW_TRANSACTIONS)
WindowManagerService.logSurface(this, "DITHER", null);
mSurface.setFlags(Surface.SURFACE_DITHER, Surface.SURFACE_DITHER);
}
} catch (RuntimeException e) {
Slog.w(WindowManagerService.TAG, "Error creating surface in " + w, e);
mService.reclaimSomeSurfaceMemoryLocked(this, "create-init", true);
}
mLastHidden = true;
} finally {
Surface.closeTransaction();
if (SHOW_LIGHT_TRANSACTIONS)
Slog.i(WindowManagerService.TAG, "<<< CLOSE TRANSACTION createSurfaceLocked");
}
if (WindowManagerService.localLOGV)
Slog.v(WindowManagerService.TAG, "Created surface " + this);
}
return mSurface;
}
Aggregations