use of com.facebook.react.bridge.ReadableMap in project gl-react-native by ProjectSeptemberInc.
the class GLCanvas method recSyncData.
private GLRenderData recSyncData(GLData data, HashMap<Uri, GLImage> images) {
Map<Uri, GLImage> prevImages = this.images;
GLShader shader = getShader(data.shader);
if (shader == null || !shader.ensureCompile())
return null;
Map<String, Integer> uniformsInteger = new HashMap<>();
Map<String, Float> uniformsFloat = new HashMap<>();
Map<String, IntBuffer> uniformsIntBuffer = new HashMap<>();
Map<String, FloatBuffer> uniformsFloatBuffer = new HashMap<>();
Map<String, GLTexture> textures = new HashMap<>();
List<GLRenderData> contextChildren = new ArrayList<>();
List<GLRenderData> children = new ArrayList<>();
for (GLData child : data.contextChildren) {
GLRenderData node = recSyncData(child, images);
if (node == null)
return null;
contextChildren.add(node);
}
for (GLData child : data.children) {
GLRenderData node = recSyncData(child, images);
if (node == null)
return null;
children.add(node);
}
Map<String, Integer> uniformTypes = shader.getUniformTypes();
List<String> uniformNames = shader.getUniformNames();
Map<String, Integer> uniformSizes = shader.getUniformSizes();
int units = 0;
ReadableMapKeySetIterator iterator = data.uniforms.keySetIterator();
while (iterator.hasNextKey()) {
String uniformName = iterator.nextKey();
int type = uniformTypes.get(uniformName);
int size = uniformSizes.get(uniformName);
ReadableMap dataUniforms = data.uniforms;
if (type == GL_SAMPLER_2D || type == GL_SAMPLER_CUBE) {
uniformsInteger.put(uniformName, units++);
if (dataUniforms.isNull(uniformName)) {
GLTexture emptyTexture = new GLTexture(this);
emptyTexture.setPixelsEmpty();
textures.put(uniformName, emptyTexture);
} else {
ReadableMap value = null;
try {
value = dataUniforms.getMap(uniformName);
} catch (Exception e) {
shader.runtimeException("texture uniform '" + uniformName + "': you cannot directly give require('./img.png') " + "to gl-react, use resolveAssetSource(require('./img.png')) instead.");
return null;
}
String t = value.getString("type");
if (t.equals("content")) {
int id = value.getInt("id");
if (id >= contentTextures.size()) {
resizeUniformContentTextures(id + 1);
}
textures.put(uniformName, contentTextures.get(id));
} else if (t.equals("fbo")) {
int id = value.getInt("id");
GLFBO fbo = getFBO(id);
textures.put(uniformName, fbo.color.get(0));
} else if (t.equals("uri")) {
final Uri src = srcResource(value);
if (src == null) {
shader.runtimeException("texture uniform '" + uniformName + "': Invalid uri format '" + value + "'");
}
GLImage image = images.get(src);
if (image == null) {
image = prevImages.get(src);
if (image != null)
images.put(src, image);
}
if (image == null) {
image = new GLImage(this, executorSupplier.forDecode(), new Runnable() {
public void run() {
onImageLoad(src);
}
});
image.setSrc(src);
images.put(src, image);
}
textures.put(uniformName, image.getTexture());
} else {
shader.runtimeException("texture uniform '" + uniformName + "': Unexpected type '" + type + "'");
}
}
} else {
if (size == 1) {
switch(type) {
case GL_INT:
uniformsInteger.put(uniformName, dataUniforms.getInt(uniformName));
break;
case GL_BOOL:
uniformsInteger.put(uniformName, dataUniforms.getBoolean(uniformName) ? 1 : 0);
break;
case GL_FLOAT:
uniformsFloat.put(uniformName, (float) dataUniforms.getDouble(uniformName));
break;
case GL_FLOAT_VEC2:
case GL_FLOAT_VEC3:
case GL_FLOAT_VEC4:
case GL_FLOAT_MAT2:
case GL_FLOAT_MAT3:
case GL_FLOAT_MAT4:
ReadableArray arr = dataUniforms.getArray(uniformName);
if (arraySizeForType(type) != arr.size()) {
shader.runtimeException("uniform '" + uniformName + "': Invalid array size: " + arr.size() + ". Expected: " + arraySizeForType(type));
}
uniformsFloatBuffer.put(uniformName, parseAsFloatArray(arr));
break;
case GL_INT_VEC2:
case GL_INT_VEC3:
case GL_INT_VEC4:
case GL_BOOL_VEC2:
case GL_BOOL_VEC3:
case GL_BOOL_VEC4:
ReadableArray arr2 = dataUniforms.getArray(uniformName);
if (arraySizeForType(type) != arr2.size()) {
shader.runtimeException("uniform '" + uniformName + "': Invalid array size: " + arr2.size() + ". Expected: " + arraySizeForType(type));
}
uniformsIntBuffer.put(uniformName, parseAsIntArray(arr2));
break;
default:
shader.runtimeException("uniform '" + uniformName + "': type not supported: " + type);
}
} else {
ReadableArray array = dataUniforms.getArray(uniformName);
if (size != array.size()) {
shader.runtimeException("uniform '" + uniformName + "': Invalid array size: " + array.size() + ". Expected: " + size);
}
for (int i = 0; i < size; i++) {
String name = uniformName + "[" + i + "]";
switch(type) {
case GL_INT:
uniformsInteger.put(name, array.getInt(i));
break;
case GL_BOOL:
uniformsInteger.put(name, array.getBoolean(i) ? 1 : 0);
break;
case GL_FLOAT:
uniformsFloat.put(name, (float) array.getDouble(i));
break;
case GL_FLOAT_VEC2:
case GL_FLOAT_VEC3:
case GL_FLOAT_VEC4:
case GL_FLOAT_MAT2:
case GL_FLOAT_MAT3:
case GL_FLOAT_MAT4:
ReadableArray arr = array.getArray(i);
if (arraySizeForType(type) != arr.size()) {
shader.runtimeException("uniform '" + name + "': Invalid array size: " + arr.size() + ". Expected: " + arraySizeForType(type));
}
uniformsFloatBuffer.put(name, parseAsFloatArray(arr));
break;
case GL_INT_VEC2:
case GL_INT_VEC3:
case GL_INT_VEC4:
case GL_BOOL_VEC2:
case GL_BOOL_VEC3:
case GL_BOOL_VEC4:
ReadableArray arr2 = array.getArray(i);
if (arraySizeForType(type) != arr2.size()) {
shader.runtimeException("uniform '" + name + "': Invalid array size: " + arr2.size() + ". Expected: " + arraySizeForType(type));
}
uniformsIntBuffer.put(name, parseAsIntArray(arr2));
break;
default:
shader.runtimeException("uniform '" + name + "': type not supported: " + type);
}
}
}
}
}
int[] maxTextureUnits = new int[1];
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureUnits, 0);
if (units > maxTextureUnits[0]) {
shader.runtimeException("Maximum number of texture reach. got " + units + " >= max " + maxTextureUnits);
}
for (String uniformName : uniformNames) {
int size = uniformSizes.get(uniformName);
if (size == 1) {
if (!uniformsFloat.containsKey(uniformName) && !uniformsInteger.containsKey(uniformName) && !uniformsFloatBuffer.containsKey(uniformName) && !uniformsIntBuffer.containsKey(uniformName)) {
shader.runtimeException("All defined uniforms must be provided. Missing '" + uniformName + "'");
}
} else {
for (int i = 0; i < size; i++) {
String name = uniformName + "[" + i + "]";
if (!uniformsFloat.containsKey(name) && !uniformsInteger.containsKey(name) && !uniformsFloatBuffer.containsKey(name) && !uniformsIntBuffer.containsKey(name)) {
shader.runtimeException("All defined uniforms must be provided. Missing '" + name + "'");
}
}
}
}
return new GLRenderData(shader, uniformsInteger, uniformsFloat, uniformsIntBuffer, uniformsFloatBuffer, textures, (int) (data.width * data.pixelRatio), (int) (data.height * data.pixelRatio), data.fboId, contextChildren, children);
}
use of com.facebook.react.bridge.ReadableMap in project react-native-camera by lwansbrough.
the class ResolveTakenPictureAsyncTask method doInBackground.
@Override
protected WritableMap doInBackground(Void... voids) {
WritableMap response = Arguments.createMap();
ByteArrayInputStream inputStream = null;
ExifInterface exifInterface = null;
WritableMap exifData = null;
ReadableMap exifExtraData = null;
boolean exifOrientationFixed = false;
response.putInt("deviceOrientation", mDeviceOrientation);
response.putInt("pictureOrientation", mOptions.hasKey("orientation") ? mOptions.getInt("orientation") : mDeviceOrientation);
try {
// this replaces the skipProcessing flag, we will process only if needed, and in
// an orderly manner, so that skipProcessing is the default behaviour if no options are given
// and this behaves more like the iOS version.
// We will load all data lazily only when needed.
// this should not incur in any overhead if not read/used
inputStream = new ByteArrayInputStream(mImageData);
if (mSoftwareRotation != 0) {
loadBitmap();
mBitmap = rotateBitmap(mBitmap, mSoftwareRotation);
}
// Rotate the bitmap to the proper orientation if requested
if (mOptions.hasKey("fixOrientation") && mOptions.getBoolean("fixOrientation")) {
exifInterface = new ExifInterface(inputStream);
// Get orientation of the image from mImageData via inputStream
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
if (orientation != ExifInterface.ORIENTATION_UNDEFINED && getImageRotation(orientation) != 0) {
loadBitmap();
int angle = getImageRotation(orientation);
mBitmap = rotateBitmap(mBitmap, angle);
exifOrientationFixed = true;
}
}
if (mOptions.hasKey("width")) {
loadBitmap();
mBitmap = resizeBitmap(mBitmap, mOptions.getInt("width"));
}
if (mOptions.hasKey("mirrorImage") && mOptions.getBoolean("mirrorImage")) {
loadBitmap();
mBitmap = flipHorizontally(mBitmap);
}
// EXIF code - we will adjust exif info later if we manipulated the bitmap
boolean writeExifToResponse = mOptions.hasKey("exif") && mOptions.getBoolean("exif");
// default to true if not provided so it is consistent with iOS and with what happens if no
// processing is done and the image is saved as is.
boolean writeExifToFile = true;
if (mOptions.hasKey("writeExif")) {
switch(mOptions.getType("writeExif")) {
case Boolean:
writeExifToFile = mOptions.getBoolean("writeExif");
break;
case Map:
exifExtraData = mOptions.getMap("writeExif");
writeExifToFile = true;
break;
}
}
// Read Exif data if needed
if (writeExifToResponse || writeExifToFile) {
// Otherwise we can just use w/e exif data we have right now in our byte array
if (mBitmap != null || exifExtraData != null || writeExifToResponse) {
if (exifInterface == null) {
exifInterface = new ExifInterface(inputStream);
}
exifData = RNCameraViewHelper.getExifData(exifInterface);
if (exifExtraData != null) {
exifData.merge(exifExtraData);
}
}
// if we did anything to the bitmap, adjust exif
if (mBitmap != null) {
exifData.putInt("width", mBitmap.getWidth());
exifData.putInt("height", mBitmap.getHeight());
if (exifOrientationFixed) {
exifData.putInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
}
}
// Write Exif data to the response if requested
if (writeExifToResponse) {
final WritableMap exifDataCopy = Arguments.createMap();
exifDataCopy.merge(exifData);
response.putMap("exif", exifDataCopy);
}
}
// Based on whether or not we loaded the full bitmap into memory, final processing differs
if (mBitmap == null) {
// set response dimensions. If we haven't read our bitmap, get it efficiently
// without loading the actual bitmap into memory
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(mImageData, 0, mImageData.length, options);
if (options != null) {
response.putInt("width", options.outWidth);
response.putInt("height", options.outHeight);
}
// save to file if requested
if (!mOptions.hasKey("doNotSave") || !mOptions.getBoolean("doNotSave")) {
// Prepare file output
File imageFile = new File(getImagePath());
imageFile.createNewFile();
FileOutputStream fOut = new FileOutputStream(imageFile);
// Save byte array (it is already a JPEG)
fOut.write(mImageData);
fOut.flush();
fOut.close();
// Since we didn't modify the image, we only update if we have extra exif info
if (writeExifToFile && exifExtraData != null) {
ExifInterface fileExifInterface = new ExifInterface(imageFile.getAbsolutePath());
RNCameraViewHelper.setExifData(fileExifInterface, exifExtraData);
fileExifInterface.saveAttributes();
} else if (!writeExifToFile) {
// if we were requested to NOT store exif, we actually need to
// clear the exif tags
ExifInterface fileExifInterface = new ExifInterface(imageFile.getAbsolutePath());
RNCameraViewHelper.clearExifData(fileExifInterface);
fileExifInterface.saveAttributes();
}
// else: exif is unmodified, no need to update anything
// Return file system URI
String fileUri = Uri.fromFile(imageFile).toString();
response.putString("uri", fileUri);
}
if (mOptions.hasKey("base64") && mOptions.getBoolean("base64")) {
response.putString("base64", Base64.encodeToString(mImageData, Base64.NO_WRAP));
}
} else {
// get response dimensions right from the bitmap if we have it
response.putInt("width", mBitmap.getWidth());
response.putInt("height", mBitmap.getHeight());
// Cache compressed image in imageStream
ByteArrayOutputStream imageStream = new ByteArrayOutputStream();
if (!mBitmap.compress(Bitmap.CompressFormat.JPEG, getQuality(), imageStream)) {
mPromise.reject(ERROR_TAG, "Could not compress image to JPEG");
return null;
}
// Write compressed image to file in cache directory unless otherwise specified
if (!mOptions.hasKey("doNotSave") || !mOptions.getBoolean("doNotSave")) {
String filePath = writeStreamToFile(imageStream);
// to add it if requested
if (writeExifToFile && exifData != null) {
ExifInterface fileExifInterface = new ExifInterface(filePath);
RNCameraViewHelper.setExifData(fileExifInterface, exifData);
fileExifInterface.saveAttributes();
}
File imageFile = new File(filePath);
String fileUri = Uri.fromFile(imageFile).toString();
response.putString("uri", fileUri);
}
// Write base64-encoded image to the response if requested
if (mOptions.hasKey("base64") && mOptions.getBoolean("base64")) {
response.putString("base64", Base64.encodeToString(imageStream.toByteArray(), Base64.NO_WRAP));
}
}
return response;
} catch (Resources.NotFoundException e) {
mPromise.reject(ERROR_TAG, "Documents directory of the app could not be found.", e);
e.printStackTrace();
} catch (IOException e) {
mPromise.reject(ERROR_TAG, "An unknown I/O exception has occurred.", e);
e.printStackTrace();
} finally {
try {
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
use of com.facebook.react.bridge.ReadableMap in project react-native-camera by lwansbrough.
the class TextRecognizerAsyncTask method rotateTextX.
private WritableMap rotateTextX(WritableMap text) {
ReadableMap faceBounds = text.getMap("bounds");
ReadableMap oldOrigin = faceBounds.getMap("origin");
WritableMap mirroredOrigin = positionMirroredHorizontally(oldOrigin, mImageDimensions.getWidth(), mScaleX);
double translateX = -faceBounds.getMap("size").getDouble("width");
WritableMap translatedMirroredOrigin = positionTranslatedHorizontally(mirroredOrigin, translateX);
WritableMap newBounds = Arguments.createMap();
newBounds.merge(faceBounds);
newBounds.putMap("origin", translatedMirroredOrigin);
text.putMap("bounds", newBounds);
ReadableArray oldComponents = text.getArray("components");
WritableArray newComponents = Arguments.createArray();
for (int i = 0; i < oldComponents.size(); ++i) {
WritableMap component = Arguments.createMap();
component.merge(oldComponents.getMap(i));
rotateTextX(component);
newComponents.pushMap(component);
}
text.putArray("components", newComponents);
return text;
}
use of com.facebook.react.bridge.ReadableMap in project react-native-camera by lwansbrough.
the class MutableImage method writeLocationExifData.
private void writeLocationExifData(ReadableMap options, ExifInterface exif) {
if (!options.hasKey("metadata"))
return;
ReadableMap metadata = options.getMap("metadata");
if (!metadata.hasKey("location"))
return;
ReadableMap location = metadata.getMap("location");
if (!location.hasKey("coords"))
return;
try {
ReadableMap coords = location.getMap("coords");
double latitude = coords.getDouble("latitude");
double longitude = coords.getDouble("longitude");
GPS.writeExifData(latitude, longitude, exif);
} catch (IOException e) {
Log.e(TAG, "Couldn't write location data", e);
}
}
use of com.facebook.react.bridge.ReadableMap in project native-navigation by airbnb.
the class NativeFragment method onCreateView.
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_native, container, false);
final int count = getArguments().getInt(ARG_COUNT);
Toolbar toolbar = (Toolbar) view.findViewById(R.id.toolbar);
((AppCompatActivity) getActivity()).setSupportActionBar(toolbar);
toolbar.setTitle("Fragment " + count);
toolbar.setNavigationIcon(R.drawable.n2_ic_arrow_back_white);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getActivity().onBackPressed();
}
});
view.findViewById(R.id.push).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getScreenCoordinator().pushScreen(newInstance(count + 1));
}
});
view.findViewById(R.id.present).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Promise promise = new PromiseImpl(new Callback() {
@Override
public void invoke(Object... args) {
WritableMap map = (WritableMap) args[0];
ReadableMap payload = map.getMap("payload");
if (payload != null) {
String text = "Result: " + payload.getString(RESULT_TEXT);
Toast.makeText(getContext(), text, Toast.LENGTH_LONG).show();
}
}
}, new Callback() {
@Override
public void invoke(Object... args) {
Toast.makeText(getContext(), "Promise was rejected.", Toast.LENGTH_LONG).show();
}
});
getScreenCoordinator().presentScreen(newInstance(0), promise);
}
});
view.findViewById(R.id.push_rn).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getScreenCoordinator().pushScreen("ScreenOne");
}
});
view.findViewById(R.id.present_rn).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getScreenCoordinator().presentScreen("ScreenOne");
}
});
view.findViewById(R.id.pop).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getScreenCoordinator().pop();
}
});
final EditText editText = (EditText) view.findViewById(R.id.payload);
view.findViewById(R.id.dismiss).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Map<String, Object> payload = new HashMap<>();
payload.put(RESULT_TEXT, editText.getText().toString());
getScreenCoordinator().dismiss(Activity.RESULT_OK, payload);
}
});
return view;
}
Aggregations