use of java.nio.ByteBuffer in project platform_frameworks_base by android.
the class CameraMetadataTest method testReadWriteEnumWithCustomValues.
@SmallTest
public void testReadWriteEnumWithCustomValues() {
MarshalQueryableEnum.registerEnumValues(AeAntibandingMode.class, new int[] { 0, 10, 20, 30 });
// byte (single)
checkKeyGetAndSet("android.control.aeAntibandingMode", AeAntibandingMode.class, AeAntibandingMode.AUTO);
// byte (n)
checkKeyGetAndSet("android.control.aeAvailableAntibandingModes", AeAntibandingMode[].class, new AeAntibandingMode[] { AeAntibandingMode.OFF, AeAntibandingMode._50HZ, AeAntibandingMode._60HZ, AeAntibandingMode.AUTO });
byte[] aeAntibandingModeValues = mMetadata.readValues(CameraMetadataNative.getTag("android.control.aeAvailableAntibandingModes"));
byte[] expectedValues = new byte[] { 0, 10, 20, 30 };
assertArrayEquals(expectedValues, aeAntibandingModeValues);
/**
* Stranger cases that don't use byte enums
*/
// int (n)
MarshalQueryableEnum.registerEnumValues(AvailableFormat.class, new int[] { 0x20, 0x32315659, 0x11, 0x22, 0x23, 0x21 });
checkKeyGetAndSet("android.scaler.availableFormats", AvailableFormat[].class, new AvailableFormat[] { AvailableFormat.RAW_SENSOR, AvailableFormat.YV12, AvailableFormat.IMPLEMENTATION_DEFINED, AvailableFormat.YCbCr_420_888, AvailableFormat.BLOB });
Key<AvailableFormat[]> availableFormatsKey = new Key<AvailableFormat[]>("android.scaler.availableFormats", AvailableFormat[].class);
byte[] availableFormatValues = mMetadata.readValues(CameraMetadataNative.getTag(availableFormatsKey.getName()));
int[] expectedIntValues = new int[] { 0x20, 0x32315659, 0x22, 0x23, 0x21 };
ByteBuffer bf = ByteBuffer.wrap(availableFormatValues).order(ByteOrder.nativeOrder());
assertEquals(expectedIntValues.length * 4, availableFormatValues.length);
for (int i = 0; i < expectedIntValues.length; ++i) {
assertEquals(expectedIntValues[i], bf.getInt());
}
}
use of java.nio.ByteBuffer in project platform_frameworks_base by android.
the class ETC1Util method loadTexture.
/**
* Convenience method to load an ETC1 texture whether or not the active OpenGL context
* supports the ETC1 texture compression format.
* @param target the texture target.
* @param level the texture level
* @param border the border size. Typically 0.
* @param fallbackFormat the format to use if ETC1 texture compression is not supported.
* Must be GL_RGB.
* @param fallbackType the type to use if ETC1 texture compression is not supported.
* Can be either GL_UNSIGNED_SHORT_5_6_5, which implies 16-bits-per-pixel,
* or GL_UNSIGNED_BYTE, which implies 24-bits-per-pixel.
* @param texture the ETC1 to load.
*/
public static void loadTexture(int target, int level, int border, int fallbackFormat, int fallbackType, ETC1Texture texture) {
if (fallbackFormat != GLES10.GL_RGB) {
throw new IllegalArgumentException("fallbackFormat must be GL_RGB");
}
if (!(fallbackType == GLES10.GL_UNSIGNED_SHORT_5_6_5 || fallbackType == GLES10.GL_UNSIGNED_BYTE)) {
throw new IllegalArgumentException("Unsupported fallbackType");
}
int width = texture.getWidth();
int height = texture.getHeight();
Buffer data = texture.getData();
if (isETC1Supported()) {
int imageSize = data.remaining();
GLES10.glCompressedTexImage2D(target, level, ETC1.ETC1_RGB8_OES, width, height, border, imageSize, data);
} else {
boolean useShorts = fallbackType != GLES10.GL_UNSIGNED_BYTE;
int pixelSize = useShorts ? 2 : 3;
int stride = pixelSize * width;
ByteBuffer decodedData = ByteBuffer.allocateDirect(stride * height).order(ByteOrder.nativeOrder());
ETC1.decodeImage(data, decodedData, width, height, pixelSize, stride);
GLES10.glTexImage2D(target, level, fallbackFormat, width, height, border, fallbackFormat, fallbackType, decodedData);
}
}
use of java.nio.ByteBuffer in project platform_frameworks_base by android.
the class ColorFade method createNativeFloatBuffer.
private static FloatBuffer createNativeFloatBuffer(int size) {
ByteBuffer bb = ByteBuffer.allocateDirect(size * 4);
bb.order(ByteOrder.nativeOrder());
return bb.asFloatBuffer();
}
use of java.nio.ByteBuffer in project platform_frameworks_base by android.
the class InputStreamSource method process.
@Override
public void process(FilterContext context) {
int fileSize = 0;
ByteBuffer byteBuffer = null;
// Read the file
try {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int bytesRead;
while ((bytesRead = mInputStream.read(buffer)) > 0) {
byteStream.write(buffer, 0, bytesRead);
fileSize += bytesRead;
}
byteBuffer = ByteBuffer.wrap(byteStream.toByteArray());
} catch (IOException exception) {
throw new RuntimeException("InputStreamSource: Could not read stream: " + exception.getMessage() + "!");
}
// Put it into a frame
mOutputFormat.setDimensions(fileSize);
Frame output = context.getFrameManager().newFrame(mOutputFormat);
output.setData(byteBuffer);
// Push output
pushOutput("data", output);
// Release pushed frame
output.release();
// Close output port as we are done here
closeOutputPort("data");
}
use of java.nio.ByteBuffer in project platform_frameworks_base by android.
the class OutputStreamTarget method process.
@Override
public void process(FilterContext context) {
Frame input = pullInput("data");
ByteBuffer data;
if (input.getFormat().getObjectClass() == String.class) {
String stringVal = (String) input.getObjectValue();
data = ByteBuffer.wrap(stringVal.getBytes());
} else {
data = input.getData();
}
try {
mOutputStream.write(data.array(), 0, data.limit());
mOutputStream.flush();
} catch (IOException exception) {
throw new RuntimeException("OutputStreamTarget: Could not write to stream: " + exception.getMessage() + "!");
}
}
Aggregations