use of org.chromium.base.CalledByNative in project chromeview by pwnall.
the class SelectFileDialog method selectFile.
/**
* Creates and starts an intent based on the passed fileTypes and capture value.
* @param fileTypes MIME types requested (i.e. "image/*")
* @param capture The capture value as described in http://www.w3.org/TR/html-media-capture/
* @param window The WindowAndroid that can show intents
*/
@CalledByNative
private void selectFile(String[] fileTypes, String capture, WindowAndroid window) {
mFileTypes = new ArrayList<String>(Arrays.asList(fileTypes));
mCapture = capture;
Intent chooser = new Intent(Intent.ACTION_CHOOSER);
Intent camera = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
mCameraOutputUri = Uri.fromFile(getFileForImageCapture());
camera.putExtra(MediaStore.EXTRA_OUTPUT, mCameraOutputUri);
Intent camcorder = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
Intent soundRecorder = new Intent(MediaStore.Audio.Media.RECORD_SOUND_ACTION);
String lowMemoryError = window.getContext().getString(R.string.low_memory_error);
// accept type and then display that to the user.
if (captureCamera()) {
if (window.showIntent(camera, this, lowMemoryError))
return;
} else if (captureCamcorder()) {
if (window.showIntent(camcorder, this, lowMemoryError))
return;
} else if (captureMicrophone()) {
if (window.showIntent(soundRecorder, this, lowMemoryError))
return;
}
Intent getContentIntent = new Intent(Intent.ACTION_GET_CONTENT);
getContentIntent.addCategory(Intent.CATEGORY_OPENABLE);
ArrayList<Intent> extraIntents = new ArrayList<Intent>();
if (!noSpecificType()) {
// chooser above.
if (shouldShowImageTypes()) {
extraIntents.add(camera);
getContentIntent.setType("image/*");
} else if (shouldShowVideoTypes()) {
extraIntents.add(camcorder);
getContentIntent.setType("video/*");
} else if (shouldShowAudioTypes()) {
extraIntents.add(soundRecorder);
getContentIntent.setType("audio/*");
}
}
if (extraIntents.isEmpty()) {
// We couldn't resolve an accept type, so fallback to a generic chooser.
getContentIntent.setType("*/*");
extraIntents.add(camera);
extraIntents.add(camcorder);
extraIntents.add(soundRecorder);
}
chooser.putExtra(Intent.EXTRA_INITIAL_INTENTS, extraIntents.toArray(new Intent[] {}));
chooser.putExtra(Intent.EXTRA_INTENT, getContentIntent);
if (!window.showIntent(chooser, this, lowMemoryError))
onFileNotSelected();
}
use of org.chromium.base.CalledByNative in project chromeview by pwnall.
the class DeviceDisplayInfo method getBitsPerPixel.
/**
* @return Bits per pixel.
*/
@CalledByNative
public int getBitsPerPixel() {
int format = getPixelFormat();
PixelFormat info = new PixelFormat();
PixelFormat.getPixelFormatInfo(format, info);
return info.bitsPerPixel;
}
use of org.chromium.base.CalledByNative in project chromeview by pwnall.
the class AndroidNetworkLibrary method storeKeyPair.
/**
* Stores the key pair through the CertInstaller activity.
* @param context: current application context.
* @param public_key: The public key bytes as DER-encoded SubjectPublicKeyInfo (X.509)
* @param private_key: The private key as DER-encoded PrivateKeyInfo (PKCS#8).
* @return: true on success, false on failure.
*
* Note that failure means that the function could not launch the CertInstaller
* activity. Whether the keys are valid or properly installed will be indicated
* by the CertInstaller UI itself.
*/
@CalledByNative
public static boolean storeKeyPair(Context context, byte[] public_key, byte[] private_key) {
// for them. b/5859651
try {
Intent intent = KeyChain.createInstallIntent();
intent.putExtra("PKEY", private_key);
intent.putExtra("KEY", public_key);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
return true;
} catch (ActivityNotFoundException e) {
Log.w(TAG, "could not store key pair: " + e);
}
return false;
}
use of org.chromium.base.CalledByNative in project chromeview by pwnall.
the class AndroidNetworkLibrary method getNetworkList.
/**
* @return the network interfaces list (if any) string. The items in
* the list string are delimited by a semicolon ";", each item
* is a network interface name and address pair and formatted
* as "name,address". e.g.
* eth0,10.0.0.2;eth0,fe80::5054:ff:fe12:3456
* represents a network list string which containts two items.
*/
@CalledByNative
public static String getNetworkList() {
Enumeration<NetworkInterface> list = null;
try {
list = NetworkInterface.getNetworkInterfaces();
if (list == null)
return "";
} catch (SocketException e) {
Log.w(TAG, "Unable to get network interfaces: " + e);
return "";
}
StringBuilder result = new StringBuilder();
while (list.hasMoreElements()) {
NetworkInterface netIf = list.nextElement();
try {
// Skip loopback interfaces, and ones which are down.
if (!netIf.isUp() || netIf.isLoopback())
continue;
Enumeration<InetAddress> addressList = netIf.getInetAddresses();
while (addressList.hasMoreElements()) {
InetAddress address = addressList.nextElement();
// Skip loopback addresses configured on non-loopback interfaces.
if (address.isLoopbackAddress())
continue;
StringBuilder addressString = new StringBuilder();
addressString.append(netIf.getName());
addressString.append(",");
String ipAddress = address.getHostAddress();
if (address instanceof Inet6Address && ipAddress.contains("%")) {
ipAddress = ipAddress.substring(0, ipAddress.lastIndexOf("%"));
}
addressString.append(ipAddress);
if (result.length() != 0)
result.append(";");
result.append(addressString.toString());
}
} catch (SocketException e) {
continue;
}
}
return result.toString();
}
use of org.chromium.base.CalledByNative in project chromeview by pwnall.
the class VideoCapture method allocate.
// Returns true on success, false otherwise.
@CalledByNative
public boolean allocate(int width, int height, int frameRate) {
Log.d(TAG, "allocate: requested width=" + width + ", height=" + height + ", frameRate=" + frameRate);
try {
mCamera = Camera.open(mId);
Camera.CameraInfo camera_info = new Camera.CameraInfo();
Camera.getCameraInfo(mId, camera_info);
mCameraOrientation = camera_info.orientation;
mCameraFacing = camera_info.facing;
mDeviceOrientation = getDeviceOrientation();
Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation + ", camera orientation=" + mCameraOrientation + ", facing=" + mCameraFacing);
Camera.Parameters parameters = mCamera.getParameters();
// Calculate fps.
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
int frameRateInMs = frameRate * 1000;
boolean fpsIsSupported = false;
int fpsMin = 0;
int fpsMax = 0;
Iterator itFpsRange = listFpsRange.iterator();
while (itFpsRange.hasNext()) {
int[] fpsRange = (int[]) itFpsRange.next();
if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
fpsIsSupported = true;
fpsMin = fpsRange[0];
fpsMax = fpsRange[1];
break;
}
}
if (!fpsIsSupported) {
Log.e(TAG, "allocate: fps " + frameRate + " is not supported");
return false;
}
mCurrentCapability = new CaptureCapability();
mCurrentCapability.mDesiredFps = frameRate;
// Calculate size.
List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes();
int minDiff = Integer.MAX_VALUE;
int matchedWidth = width;
int matchedHeight = height;
Iterator itCameraSize = listCameraSize.iterator();
while (itCameraSize.hasNext()) {
Camera.Size size = (Camera.Size) itCameraSize.next();
int diff = Math.abs(size.width - width) + Math.abs(size.height - height);
Log.d(TAG, "allocate: support resolution (" + size.width + ", " + size.height + "), diff=" + diff);
// (i.e., with no padding).
if (diff < minDiff && (size.width % 32 == 0)) {
minDiff = diff;
matchedWidth = size.width;
matchedHeight = size.height;
}
}
if (minDiff == Integer.MAX_VALUE) {
Log.e(TAG, "allocate: can not find a resolution whose width " + "is multiple of 32");
return false;
}
mCurrentCapability.mWidth = matchedWidth;
mCurrentCapability.mHeight = matchedHeight;
Log.d(TAG, "allocate: matched width=" + matchedWidth + ", height=" + matchedHeight);
parameters.setPreviewSize(matchedWidth, matchedHeight);
parameters.setPreviewFormat(mPixelFormat);
parameters.setPreviewFpsRange(fpsMin, fpsMax);
mCamera.setParameters(parameters);
// Set SurfaceTexture.
mGlTextures = new int[1];
// Generate one texture pointer and bind it as an external texture.
GLES20.glGenTextures(1, mGlTextures, 0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
// No mip-mapping with camera source.
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// Clamp to edge is only option.
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
mSurfaceTexture.setOnFrameAvailableListener(null);
mCamera.setPreviewTexture(mSurfaceTexture);
int bufSize = matchedWidth * matchedHeight * ImageFormat.getBitsPerPixel(mPixelFormat) / 8;
for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
byte[] buffer = new byte[bufSize];
mCamera.addCallbackBuffer(buffer);
}
mExpectedFrameSize = bufSize;
} catch (IOException ex) {
Log.e(TAG, "allocate: " + ex);
return false;
}
return true;
}
Aggregations