Search in sources :

Example 1 with VideoEncoderCap

use of android.media.EncoderCapabilities.VideoEncoderCap in project android_frameworks_base by ParanoidAndroid.

the class MediaRecorderTest method testDeviceSpecificCodec.

@LargeTest
public //test cases for the new codec
void testDeviceSpecificCodec() throws Exception {
    int noOfFailure = 0;
    boolean recordSuccess = false;
    String deviceType = MediaProfileReader.getDeviceType();
    Log.v(TAG, "deviceType = " + deviceType);
    List<VideoEncoderCap> videoEncoders = MediaProfileReader.getVideoEncoders();
    List<AudioEncoderCap> audioEncoders = MediaProfileReader.getAudioEncoders();
    for (int k = 0; k < 2; k++) {
        for (VideoEncoderCap videoEncoder : videoEncoders) {
            for (AudioEncoderCap audioEncoder : audioEncoders) {
                if (k == 0) {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, true);
                } else {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, false);
                }
                if (!recordSuccess) {
                    Log.v(TAG, "testDeviceSpecificCodec failed");
                    Log.v(TAG, "Encoder = " + videoEncoder.mCodec + "Audio Encoder = " + audioEncoder.mCodec);
                    noOfFailure++;
                }
            }
        }
    }
    if (noOfFailure != 0) {
        assertTrue("testDeviceSpecificCodec", false);
    }
}
Also used : AudioEncoderCap(android.media.EncoderCapabilities.AudioEncoderCap) VideoEncoderCap(android.media.EncoderCapabilities.VideoEncoderCap) LargeTest(android.test.suitebuilder.annotation.LargeTest)

Example 2 with VideoEncoderCap

use of android.media.EncoderCapabilities.VideoEncoderCap in project android_frameworks_base by ResurrectionRemix.

the class MediaRecorderTest method testDeviceSpecificCodec.

@LargeTest
public //test cases for the new codec
void testDeviceSpecificCodec() throws Exception {
    int noOfFailure = 0;
    boolean recordSuccess = false;
    String deviceType = MediaProfileReader.getDeviceType();
    Log.v(TAG, "deviceType = " + deviceType);
    List<VideoEncoderCap> videoEncoders = MediaProfileReader.getVideoEncoders();
    List<AudioEncoderCap> audioEncoders = MediaProfileReader.getAudioEncoders();
    for (int k = 0; k < 2; k++) {
        for (VideoEncoderCap videoEncoder : videoEncoders) {
            for (AudioEncoderCap audioEncoder : audioEncoders) {
                if (k == 0) {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, true);
                } else {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, false);
                }
                if (!recordSuccess) {
                    Log.v(TAG, "testDeviceSpecificCodec failed");
                    Log.v(TAG, "Encoder = " + videoEncoder.mCodec + "Audio Encoder = " + audioEncoder.mCodec);
                    noOfFailure++;
                }
            }
        }
    }
    if (noOfFailure != 0) {
        assertTrue("testDeviceSpecificCodec", false);
    }
}
Also used : AudioEncoderCap(android.media.EncoderCapabilities.AudioEncoderCap) Paint(android.graphics.Paint) VideoEncoderCap(android.media.EncoderCapabilities.VideoEncoderCap) LargeTest(android.test.suitebuilder.annotation.LargeTest)

Example 3 with VideoEncoderCap

use of android.media.EncoderCapabilities.VideoEncoderCap in project android_frameworks_base by AOSPA.

the class MediaRecorderTest method testDeviceSpecificCodec.

@LargeTest
public //test cases for the new codec
void testDeviceSpecificCodec() throws Exception {
    int noOfFailure = 0;
    boolean recordSuccess = false;
    String deviceType = MediaProfileReader.getDeviceType();
    Log.v(TAG, "deviceType = " + deviceType);
    List<VideoEncoderCap> videoEncoders = MediaProfileReader.getVideoEncoders();
    List<AudioEncoderCap> audioEncoders = MediaProfileReader.getAudioEncoders();
    for (int k = 0; k < 2; k++) {
        for (VideoEncoderCap videoEncoder : videoEncoders) {
            for (AudioEncoderCap audioEncoder : audioEncoders) {
                if (k == 0) {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, true);
                } else {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, false);
                }
                if (!recordSuccess) {
                    Log.v(TAG, "testDeviceSpecificCodec failed");
                    Log.v(TAG, "Encoder = " + videoEncoder.mCodec + "Audio Encoder = " + audioEncoder.mCodec);
                    noOfFailure++;
                }
            }
        }
    }
    if (noOfFailure != 0) {
        assertTrue("testDeviceSpecificCodec", false);
    }
}
Also used : AudioEncoderCap(android.media.EncoderCapabilities.AudioEncoderCap) Paint(android.graphics.Paint) VideoEncoderCap(android.media.EncoderCapabilities.VideoEncoderCap) LargeTest(android.test.suitebuilder.annotation.LargeTest)

Example 4 with VideoEncoderCap

use of android.media.EncoderCapabilities.VideoEncoderCap in project android_frameworks_base by crdroidandroid.

the class MediaRecorderTest method testDeviceSpecificCodec.

@LargeTest
public //test cases for the new codec
void testDeviceSpecificCodec() throws Exception {
    int noOfFailure = 0;
    boolean recordSuccess = false;
    String deviceType = MediaProfileReader.getDeviceType();
    Log.v(TAG, "deviceType = " + deviceType);
    List<VideoEncoderCap> videoEncoders = MediaProfileReader.getVideoEncoders();
    List<AudioEncoderCap> audioEncoders = MediaProfileReader.getAudioEncoders();
    for (int k = 0; k < 2; k++) {
        for (VideoEncoderCap videoEncoder : videoEncoders) {
            for (AudioEncoderCap audioEncoder : audioEncoders) {
                if (k == 0) {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, true);
                } else {
                    recordSuccess = recordVideoWithPara(videoEncoder, audioEncoder, false);
                }
                if (!recordSuccess) {
                    Log.v(TAG, "testDeviceSpecificCodec failed");
                    Log.v(TAG, "Encoder = " + videoEncoder.mCodec + "Audio Encoder = " + audioEncoder.mCodec);
                    noOfFailure++;
                }
            }
        }
    }
    if (noOfFailure != 0) {
        assertTrue("testDeviceSpecificCodec", false);
    }
}
Also used : AudioEncoderCap(android.media.EncoderCapabilities.AudioEncoderCap) Paint(android.graphics.Paint) VideoEncoderCap(android.media.EncoderCapabilities.VideoEncoderCap) LargeTest(android.test.suitebuilder.annotation.LargeTest)

Example 5 with VideoEncoderCap

use of android.media.EncoderCapabilities.VideoEncoderCap in project android_packages_apps_Snap by LineageOS.

the class VideoModule method initializeRecorder.

// Prepares media recorder.
private void initializeRecorder() {
    Log.v(TAG, "initializeRecorder");
    // If the mCameraDevice is null, then this activity is going to finish
    if (mCameraDevice == null)
        return;
    if (!ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) {
        // Set the SurfaceView to visible so the surface gets created.
        // surfaceCreated() is called immediately when the visibility is
        // changed to visible. Thus, mSurfaceViewReady should become true
        // right after calling setVisibility().
        mUI.showSurfaceView();
    }
    Intent intent = mActivity.getIntent();
    Bundle myExtras = intent.getExtras();
    videoWidth = mProfile.videoFrameWidth;
    videoHeight = mProfile.videoFrameHeight;
    mUnsupportedResolution = false;
    // check if codec supports the resolution, otherwise throw toast
    List<VideoEncoderCap> videoEncoders = EncoderCapabilities.getVideoEncoders();
    for (VideoEncoderCap videoEncoder : videoEncoders) {
        if (videoEncoder.mCodec == mVideoEncoder) {
            if (videoWidth > videoEncoder.mMaxFrameWidth || videoWidth < videoEncoder.mMinFrameWidth || videoHeight > videoEncoder.mMaxFrameHeight || videoHeight < videoEncoder.mMinFrameHeight) {
                Log.e(TAG, "Selected codec " + mVideoEncoder + " does not support " + videoWidth + "x" + videoHeight + " resolution");
                Log.e(TAG, "Codec capabilities: " + "mMinFrameWidth = " + videoEncoder.mMinFrameWidth + " , " + "mMinFrameHeight = " + videoEncoder.mMinFrameHeight + " , " + "mMaxFrameWidth = " + videoEncoder.mMaxFrameWidth + " , " + "mMaxFrameHeight = " + videoEncoder.mMaxFrameHeight);
                mUnsupportedResolution = true;
                RotateTextToast.makeText(mActivity, R.string.error_app_unsupported, Toast.LENGTH_LONG).show();
                return;
            }
            break;
        }
    }
    long requestedSizeLimit = 0;
    closeVideoFileDescriptor();
    mCurrentVideoUriFromMediaSaved = false;
    if (mIsVideoCaptureIntent && myExtras != null) {
        Uri saveUri = (Uri) myExtras.getParcelable(MediaStore.EXTRA_OUTPUT);
        if (saveUri != null) {
            try {
                mVideoFileDescriptor = mContentResolver.openFileDescriptor(saveUri, "rw");
                mCurrentVideoUri = saveUri;
            } catch (java.io.FileNotFoundException ex) {
                // invalid uri
                Log.e(TAG, ex.toString());
            }
        }
        requestedSizeLimit = myExtras.getLong(MediaStore.EXTRA_SIZE_LIMIT);
    }
    mMediaRecorder = new MediaRecorder();
    // Unlock the camera object before passing it to media recorder.
    mCameraDevice.unlock();
    mMediaRecorder.setCamera(mCameraDevice.getCamera());
    String hfr = ParametersWrapper.getVideoHighFrameRate(mParameters);
    String hsr = mParameters.get(CameraSettings.KEY_VIDEO_HSR);
    Log.i(TAG, "NOTE: hfr = " + hfr + " : hsr = " + hsr);
    int captureRate = 0;
    boolean isHFR = (hfr != null && !hfr.equals("off"));
    boolean isHSR = (hsr != null && !hsr.equals("off"));
    try {
        captureRate = isHFR ? Integer.parseInt(hfr) : isHSR ? Integer.parseInt(hsr) : 0;
    } catch (NumberFormatException nfe) {
        Log.e(TAG, "Invalid hfr(" + hfr + ") or hsr(" + hsr + ")");
    }
    mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
    mProfile.videoCodec = mVideoEncoder;
    mProfile.audioCodec = mAudioEncoder;
    mProfile.duration = mMaxVideoDurationInMs;
    if ((mProfile.audioCodec == MediaRecorder.AudioEncoder.AMR_NB) && !mCaptureTimeLapse && !isHFR) {
        mProfile.fileFormat = MediaRecorder.OutputFormat.THREE_GPP;
    }
    // Set params individually for HFR case, as we do not want to encode audio
    if ((isHFR || isHSR) && captureRate > 0) {
        if (isHSR) {
            Log.i(TAG, "Enabling audio for HSR");
            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
        }
        mMediaRecorder.setOutputFormat(mProfile.fileFormat);
        mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate);
        mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate * ((isHSR ? captureRate : 30) / 30));
        mMediaRecorder.setVideoEncoder(mProfile.videoCodec);
        if (isHSR) {
            Log.i(TAG, "Configuring audio for HSR");
            mMediaRecorder.setAudioEncodingBitRate(mProfile.audioBitRate);
            mMediaRecorder.setAudioChannels(mProfile.audioChannels);
            mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate);
            mMediaRecorder.setAudioEncoder(mProfile.audioCodec);
        }
    } else {
        if (!mCaptureTimeLapse) {
            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
        }
        mMediaRecorder.setProfile(mProfile);
    }
    mMediaRecorder.setVideoSize(mProfile.videoFrameWidth, mProfile.videoFrameHeight);
    mMediaRecorder.setMaxDuration(mMaxVideoDurationInMs);
    if (mCaptureTimeLapse) {
        double fps = 1000 / (double) mTimeBetweenTimeLapseFrameCaptureMs;
        setCaptureRate(mMediaRecorder, fps);
    } else if (captureRate > 0) {
        Log.i(TAG, "Setting capture-rate = " + captureRate);
        mMediaRecorder.setCaptureRate(captureRate);
        // for HSR, encoder's target-framerate = capture-rate
        // for HFR, encoder's taget-framerate = 30fps (from profile)
        int targetFrameRate = isHSR ? captureRate : isHFR ? 30 : mProfile.videoFrameRate;
        Log.i(TAG, "Setting target fps = " + targetFrameRate);
        mMediaRecorder.setVideoFrameRate(targetFrameRate);
        // Profiles advertizes bitrate corresponding to published framerate.
        // In case framerate is different, scale the bitrate
        int scaledBitrate = getHighSpeedVideoEncoderBitRate(mProfile, targetFrameRate);
        Log.i(TAG, "Scaled Video bitrate : " + scaledBitrate);
        if (scaledBitrate > 0) {
            mMediaRecorder.setVideoEncodingBitRate(scaledBitrate);
        } else {
            Log.e(TAG, "Cannot set Video bitrate because its negative");
        }
    }
    setRecordLocation();
    // instead.
    if (mVideoFileDescriptor != null) {
        mMediaRecorder.setOutputFile(mVideoFileDescriptor.getFileDescriptor());
    } else {
        generateVideoFilename(mProfile.fileFormat);
        mMediaRecorder.setOutputFile(mVideoFilename);
    }
    // Set maximum file size.
    long maxFileSize = mActivity.getStorageSpaceBytes() - Storage.LOW_STORAGE_THRESHOLD_BYTES;
    if (requestedSizeLimit > 0 && requestedSizeLimit < maxFileSize) {
        maxFileSize = requestedSizeLimit;
    }
    if (Storage.isSaveSDCard() && maxFileSize > SDCARD_SIZE_LIMIT) {
        maxFileSize = SDCARD_SIZE_LIMIT;
    }
    try {
        mMediaRecorder.setMaxFileSize(maxFileSize);
    } catch (RuntimeException exception) {
    // We are going to ignore failure of setMaxFileSize here, as
    // a) The composer selected may simply not support it, or
    // b) The underlying media framework may not handle 64-bit range
    // on the size restriction.
    }
    // See android.hardware.Camera.Parameters.setRotation for
    // documentation.
    // Note that mOrientation here is the device orientation, which is the opposite of
    // what activity.getWindowManager().getDefaultDisplay().getRotation() would return,
    // which is the orientation the graphics need to rotate in order to render correctly.
    int rotation = 0;
    if (mOrientation != OrientationEventListener.ORIENTATION_UNKNOWN) {
        CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
        if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
            rotation = (info.orientation - mOrientation - mOrientationOffset + 360) % 360;
        } else {
            // back-facing camera
            rotation = (info.orientation + mOrientation + mOrientationOffset) % 360;
        }
    }
    mMediaRecorder.setOrientationHint(rotation);
    setupMediaRecorderPreviewDisplay();
    try {
        mMediaRecorder.prepare();
    } catch (IOException e) {
        Log.e(TAG, "prepare failed for " + mVideoFilename, e);
        releaseMediaRecorder();
        throw new RuntimeException(e);
    }
    mMediaRecorder.setOnErrorListener(this);
    mMediaRecorder.setOnInfoListener(this);
}
Also used : Bundle(android.os.Bundle) Intent(android.content.Intent) IOException(java.io.IOException) Uri(android.net.Uri) CameraInfo(android.hardware.Camera.CameraInfo) VideoEncoderCap(android.media.EncoderCapabilities.VideoEncoderCap) MediaRecorder(android.media.MediaRecorder)

Aggregations

VideoEncoderCap (android.media.EncoderCapabilities.VideoEncoderCap)11 Paint (android.graphics.Paint)6 AudioEncoderCap (android.media.EncoderCapabilities.AudioEncoderCap)6 LargeTest (android.test.suitebuilder.annotation.LargeTest)6 Intent (android.content.Intent)2 MediaRecorder (android.media.MediaRecorder)2 Uri (android.net.Uri)2 Bundle (android.os.Bundle)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 Point (android.graphics.Point)1 CameraInfo (android.hardware.Camera.CameraInfo)1 Location (android.location.Location)1 Range (android.util.Range)1 FileNotFoundException (java.io.FileNotFoundException)1