Search in sources :

Example 56 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class StreamConfigurationMap method appendInputsString.

private void appendInputsString(StringBuilder sb) {
    sb.append("Inputs(");
    int[] formats = getInputFormats();
    for (int format : formats) {
        Size[] sizes = getInputSizes(format);
        for (Size size : sizes) {
            sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(), size.getHeight(), formatToString(format), format));
        }
    }
    // Remove the pending ", "
    if (sb.charAt(sb.length() - 1) == ' ') {
        sb.delete(sb.length() - 2, sb.length());
    }
    sb.append(")");
}
Also used : Size(android.util.Size)

Example 57 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class StreamConfigurationMap method getInternalFormatSizes.

private Size[] getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes) {
    // All depth formats are non-high-res.
    if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
        return new Size[0];
    }
    SparseIntArray formatsMap = !output ? mInputFormats : dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats : highRes ? mHighResOutputFormats : mOutputFormats;
    int sizesCount = formatsMap.get(format);
    if (((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) || (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) {
        // Only throw if this is really not supported at all
        throw new IllegalArgumentException("format not available");
    }
    Size[] sizes = new Size[sizesCount];
    int sizeIndex = 0;
    StreamConfiguration[] configurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
    StreamConfigurationDuration[] minFrameDurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations;
    for (StreamConfiguration config : configurations) {
        int fmt = config.getFormat();
        if (fmt == format && config.isOutput() == output) {
            if (output && mListHighResolution) {
                // Filter slow high-res output formats; include for
                // highRes, remove for !highRes
                long duration = 0;
                for (int i = 0; i < minFrameDurations.length; i++) {
                    StreamConfigurationDuration d = minFrameDurations[i];
                    if (d.getFormat() == fmt && d.getWidth() == config.getSize().getWidth() && d.getHeight() == config.getSize().getHeight()) {
                        duration = d.getDuration();
                        break;
                    }
                }
                if (dataspace != HAL_DATASPACE_DEPTH && highRes != (duration > DURATION_20FPS_NS)) {
                    continue;
                }
            }
            sizes[sizeIndex++] = config.getSize();
        }
    }
    if (sizeIndex != sizesCount) {
        throw new AssertionError("Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
    }
    return sizes;
}
Also used : SparseIntArray(android.util.SparseIntArray) Size(android.util.Size)

Example 58 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class StreamConfigurationMap method appendHighSpeedVideoConfigurationsString.

private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
    sb.append("HighSpeedVideoConfigurations(");
    Size[] sizes = getHighSpeedVideoSizes();
    for (Size size : sizes) {
        Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
        for (Range<Integer> range : ranges) {
            sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(), size.getHeight(), range.getLower(), range.getUpper()));
        }
    }
    // Remove the pending ", "
    if (sb.charAt(sb.length() - 1) == ' ') {
        sb.delete(sb.length() - 2, sb.length());
    }
    sb.append(")");
}
Also used : Size(android.util.Size) Range(android.util.Range)

Example 59 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class LegacyMetadataMapper method mapScalerStreamConfigs.

private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
    ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
    /*
         * Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
         * YUV_420_888 cpu callbacks -> use camera1 preview sizes
         * Other preview callbacks (CPU) -> use camera1 preview sizes
         * JPEG still capture -> use camera1 still capture sizes
         *
         * Use platform-internal format constants here, since StreamConfigurationMap does the
         * remapping to public format constants.
         */
    List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
    List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
    /*
         * Work-around for b/17589233:
         * - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
         * - This causes a large amount of problems with focus/metering because it's relative to
         *   preview, making the difference between the JPEG and preview viewport inaccessible
         * - This boils down to metering or focusing areas being "arbitrarily" cropped
         *   in the capture result.
         * - Work-around the HAL limitations by removing all of the largest preview sizes
         *   until we get one with the same aspect ratio as the jpeg size.
         */
    {
        SizeAreaComparator areaComparator = new SizeAreaComparator();
        // Sort preview to min->max
        Collections.sort(previewSizes, areaComparator);
        Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
        float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
        if (DEBUG) {
            Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
        }
        // Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
        while (!previewSizes.isEmpty()) {
            // max is always at the end
            int index = previewSizes.size() - 1;
            Camera.Size size = previewSizes.get(index);
            float previewAspectRatio = size.width * 1.0f / size.height;
            if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
                // Assume removing from end is O(1)
                previewSizes.remove(index);
                if (DEBUG) {
                    Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
                }
            } else {
                break;
            }
        }
        if (previewSizes.isEmpty()) {
            // Fall-back to the original faulty behavior, but at least work
            Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
            previewSizes = p.getSupportedPreviewSizes();
        }
        // Sort again, this time in descending order max->min
        Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
    }
    appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
    appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
    for (int format : p.getSupportedPreviewFormats()) {
        if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
            appendStreamConfig(availableStreamConfigs, format, previewSizes);
        } else if (DEBUG) {
            /*
                 *  Do not add any formats unknown to us
                 * (since it would fail runtime checks in StreamConfigurationMap)
                 */
            Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
        }
    }
    appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
    /*
         * scaler.availableStreamConfigurations
         */
    m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
    /*
         * scaler.availableMinFrameDurations
         */
    // No frame durations available
    m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
    StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
    int i = 0;
    long longestStallDuration = -1;
    for (Camera.Size s : jpegSizes) {
        long stallDuration = calculateJpegStallDuration(s);
        jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
        if (longestStallDuration < stallDuration) {
            longestStallDuration = stallDuration;
        }
    }
    /*
         * scaler.availableStallDurations
         */
    // Set stall durations for jpeg, other formats use default stall duration
    m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
    /*
         * sensor.info.maxFrameDuration
         */
    m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
Also used : StreamConfigurationDuration(android.hardware.camera2.params.StreamConfigurationDuration) Size(android.util.Size) ArrayList(java.util.ArrayList) StreamConfiguration(android.hardware.camera2.params.StreamConfiguration) Camera(android.hardware.Camera)

Example 60 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class LegacyFaceDetectMapper method mapResultFaces.

/**
     * Update the {@code result} camera metadata map with the new value for the
     * {@code statistics.faces} and {@code statistics.faceDetectMode}.
     *
     * <p>Face detect callbacks are processed in the background, and each call to
     * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
     *
     * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled,
     * the camera will still run face detection in the background, but no faces will be reported
     * in the capture result.</p>
     *
     * @param result a non-{@code null} result
     * @param legacyRequest a non-{@code null} request (read-only)
     */
public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
    checkNotNull(result, "result must not be null");
    checkNotNull(legacyRequest, "legacyRequest must not be null");
    Camera.Face[] faces, previousFaces;
    int fdMode;
    boolean fdScenePriority;
    synchronized (mLock) {
        fdMode = mFaceDetectReporting ? STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
        if (mFaceDetectReporting) {
            faces = mFaces;
        } else {
            faces = null;
        }
        fdScenePriority = mFaceDetectScenePriority;
        previousFaces = mFacesPrev;
        mFacesPrev = faces;
    }
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
    List<Face> convertedFaces = new ArrayList<>();
    if (faces != null) {
        for (Camera.Face face : faces) {
            if (face != null) {
                convertedFaces.add(ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
            } else {
                Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
            }
        }
    }
    if (DEBUG && previousFaces != faces) {
        // Log only in verbose and IF the faces changed
        Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
    }
    result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
    result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
    // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY
    if (fdScenePriority) {
        result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY);
    }
}
Also used : Rect(android.graphics.Rect) Size(android.util.Size) ArrayList(java.util.ArrayList) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ZoomData(android.hardware.camera2.legacy.ParameterUtils.ZoomData) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera) Face(android.hardware.camera2.params.Face)

Aggregations

Size (android.util.Size)320 ArrayList (java.util.ArrayList)66 StreamConfigurationMap (android.hardware.camera2.params.StreamConfigurationMap)41 Rect (android.graphics.Rect)40 CaptureRequest (android.hardware.camera2.CaptureRequest)40 Range (android.util.Range)40 Surface (android.view.Surface)35 SimpleCaptureCallback (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)35 Camera (android.hardware.Camera)30 CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)26 Point (android.graphics.Point)22 Image (android.media.Image)21 SimpleImageReaderListener (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener)20 MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)15 CamcorderProfile (android.media.CamcorderProfile)15 Pair (android.util.Pair)15 CameraTestUtils.getDataFromImage (com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage)15 SurfaceTexture (android.graphics.SurfaceTexture)10 Parameters (android.hardware.Camera.Parameters)10 CaptureResult (android.hardware.camera2.CaptureResult)10