use of android.test.suitebuilder.annotation.SmallTest in project platform_frameworks_base by android.
the class IconTest method testWithFile.
@SmallTest
public void testWithFile() throws Exception {
final Bitmap bit1 = ((BitmapDrawable) getContext().getDrawable(R.drawable.landscape)).getBitmap();
final File dir = getContext().getExternalFilesDir(null);
final File file1 = new File(dir, "file1-original.png");
bit1.compress(Bitmap.CompressFormat.PNG, 100, new FileOutputStream(file1));
final Icon im1 = Icon.createWithFilePath(file1.toString());
final Drawable draw1 = im1.loadDrawable(mContext);
final Bitmap test1 = Bitmap.createBitmap(draw1.getIntrinsicWidth(), draw1.getIntrinsicHeight(), Bitmap.Config.ARGB_8888);
draw1.setBounds(0, 0, test1.getWidth(), test1.getHeight());
draw1.draw(new Canvas(test1));
test1.compress(Bitmap.CompressFormat.PNG, 100, new FileOutputStream(new File(dir, "file1-test.png")));
if (!equalBitmaps(bit1, test1)) {
findBitmapDifferences(bit1, test1);
fail("testWithFile: file1 differs, check " + dir);
}
}
use of android.test.suitebuilder.annotation.SmallTest in project platform_frameworks_base by android.
the class CameraMetadataTest method testReadWriteHighSpeedVideoConfiguration.
@SmallTest
public void testReadWriteHighSpeedVideoConfiguration() {
// int32 x 5 x 1
checkKeyMarshal("android.control.availableHighSpeedVideoConfigurations", new HighSpeedVideoConfiguration(/*width*/
1000, /*height*/
255, /*fpsMin*/
30, /*fpsMax*/
200, /*batchSizeMax*/
8), /* width, height, fpsMin, fpsMax */
toByteArray(1000, 255, 30, 200, 8));
// int32 x 5 x 3
checkKeyMarshal("android.control.availableHighSpeedVideoConfigurations", new HighSpeedVideoConfiguration[] { new HighSpeedVideoConfiguration(/*width*/
1280, /*height*/
720, /*fpsMin*/
60, /*fpsMax*/
120, /*batchSizeMax*/
8), new HighSpeedVideoConfiguration(/*width*/
123, /*height*/
456, /*fpsMin*/
1, /*fpsMax*/
200, /*batchSizeMax*/
4), new HighSpeedVideoConfiguration(/*width*/
4096, /*height*/
2592, /*fpsMin*/
30, /*fpsMax*/
60, /*batchSizeMax*/
2) }, toByteArray(1280, 720, 60, 120, 8, 123, 456, 1, 200, 4, 4096, 2592, 30, 60, 2));
}
use of android.test.suitebuilder.annotation.SmallTest in project platform_frameworks_base by android.
the class CameraMetadataTest method testReadWriteReprocessFormatsMap.
@SmallTest
public void testReadWriteReprocessFormatsMap() {
// final int RAW_OPAQUE = 0x24; // TODO: add RAW_OPAQUE to ImageFormat
final int RAW16 = ImageFormat.RAW_SENSOR;
final int YUV_420_888 = ImageFormat.YUV_420_888;
final int BLOB = 0x21;
// TODO: also test HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as an output
int[] contents = new int[] { YUV_420_888, 3, YUV_420_888, ImageFormat.NV21, BLOB, RAW16, 2, YUV_420_888, BLOB };
// int32 x n
Key<ReprocessFormatsMap> key = new Key<ReprocessFormatsMap>("android.scaler.availableInputOutputFormatsMap", ReprocessFormatsMap.class);
mMetadata.writeValues(key.getTag(), toByteArray(contents));
ReprocessFormatsMap map = mMetadata.get(key);
/*
* Make sure the inputs/outputs were what we expected.
* - Use public image format constants here.
*/
int[] expectedInputs = new int[] { YUV_420_888, RAW16 };
assertArrayEquals(expectedInputs, map.getInputs());
int[] expectedYuvOutputs = new int[] { YUV_420_888, ImageFormat.NV21, ImageFormat.JPEG };
assertArrayEquals(expectedYuvOutputs, map.getOutputs(ImageFormat.YUV_420_888));
int[] expectedRaw16Outputs = new int[] { YUV_420_888, ImageFormat.JPEG };
assertArrayEquals(expectedRaw16Outputs, map.getOutputs(ImageFormat.RAW_SENSOR));
// Finally, do a round-trip check as a sanity
checkKeyMarshal("android.scaler.availableInputOutputFormatsMap", new ReprocessFormatsMap(contents), toByteArray(contents));
}
use of android.test.suitebuilder.annotation.SmallTest in project platform_frameworks_base by android.
the class CameraMetadataTest method testReadWriteRectangle.
@SmallTest
public void testReadWriteRectangle() {
// int32 x n
checkKeyMarshal("android.scaler.cropRegion", // x1, y1, x2, y2
new Rect(10, 11, 1280, 1024), // x, y, width, height
toByteArray(10, 11, 1280 - 10, 1024 - 11));
// int32 x 2 x n [actually not array, but we pretend it is]
checkKeyMarshal("android.scaler.cropRegion", new Rect[] { new Rect(110, 111, 11280, 11024), new Rect(210, 111, 21280, 21024), new Rect(310, 111, 31280, 31024) }, toByteArray(110, 111, 11280 - 110, 11024 - 111, 210, 111, 21280 - 210, 21024 - 111, 310, 111, 31280 - 310, 31024 - 111));
}
use of android.test.suitebuilder.annotation.SmallTest in project platform_frameworks_base by android.
the class CameraMetadataTest method testOverrideStreamConfigurationMap.
/**
* Set the raw native value of the available stream configurations; ensure that
* the read-out managed value is consistent with what we write in.
*/
@SmallTest
public void testOverrideStreamConfigurationMap() {
/*
* First, write all the raw values:
* - availableStreamConfigurations
* - availableMinFrameDurations
* - availableStallDurations
*
* Then, read this out as a synthetic multi-key 'streamConfigurationMap'
*
* Finally, validate that the map was unmarshaled correctly
* and is converting the internal formats to public formats properly.
*/
//
// android.scaler.availableStreamConfigurations (int x n x 4 array)
//
final int OUTPUT = 0;
final int INPUT = 1;
int[] rawAvailableStreamConfigs = new int[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
OUTPUT, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
OUTPUT, // IMPLEMENTATION_DEFINED
0x22, // IMPLEMENTATION_DEFINED
640, // IMPLEMENTATION_DEFINED
480, // IMPLEMENTATION_DEFINED
OUTPUT, // RAW16
0x20, // RAW16
320, // RAW16
240, // RAW16
INPUT };
Key<StreamConfiguration[]> configKey = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS.getNativeKey();
mMetadata.writeValues(configKey.getTag(), toByteArray(rawAvailableStreamConfigs));
//
// android.scaler.availableMinFrameDurations (int x n x 4 array)
//
long[] expectedAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
long[] rawAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> durationKey = CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS.getNativeKey();
mMetadata.writeValues(durationKey.getTag(), toByteArray(rawAvailableMinDurations));
//
// android.scaler.availableStallDurations (int x n x 4 array)
//
long[] expectedAvailableStallDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
0, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
// Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0
long[] rawAvailableStallDurations = new long[] { // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> stallDurationKey = CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS.getNativeKey();
mMetadata.writeValues(stallDurationKey.getTag(), toByteArray(rawAvailableStallDurations));
//
// android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap)
//
StreamConfigurationMap streamConfigMap = mMetadata.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Inputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/
false);
// Outputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/
true);
// Min Frame Durations
final int DURATION_TUPLE_SIZE = 4;
for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableMinDurations[i], (int) expectedAvailableMinDurations[i + 1], (int) expectedAvailableMinDurations[i + 2], Duration.MinFrame, expectedAvailableMinDurations[i + 3]);
}
for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableStallDurations[i], (int) expectedAvailableStallDurations[i + 1], (int) expectedAvailableStallDurations[i + 2], Duration.Stall, expectedAvailableStallDurations[i + 3]);
}
}
Aggregations