use of android.renderscript.Allocation in project OpenCamera by ageback.
the class HDRProcessor method computeHistogram.
/**
* @param avg If true, compute the color value as the average of the rgb values. If false,
* compute the color value as the maximum of the rgb values.
*/
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public int[] computeHistogram(Bitmap bitmap, boolean avg) {
if (MyDebug.LOG)
Log.d(TAG, "computeHistogram");
long time_s = System.currentTimeMillis();
Allocation allocation_in = Allocation.createFromBitmap(rs, bitmap);
if (MyDebug.LOG)
Log.d(TAG, "time after createFromBitmap: " + (System.currentTimeMillis() - time_s));
return computeHistogram(allocation_in, avg, false);
}
use of android.renderscript.Allocation in project OpenCamera by ageback.
the class HDRProcessor method processHDRCore.
/**
* Core implementation of HDR algorithm.
* Requires Android 4.4 (API level 19, Kitkat), due to using Renderscript without the support libraries.
* And we now need Android 5.0 (API level 21, Lollipop) for forEach_Dot with LaunchOptions.
* Using the support libraries (set via project.properties renderscript.support.mode) would bloat the APK
* by around 1799KB! We don't care about pre-Android 4.4 (HDR requires CameraController2 which requires
* Android 5.0 anyway; even if we later added support for CameraController1, we can simply say HDR requires
* Android 5.0).
*/
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void processHDRCore(List<Bitmap> bitmaps, boolean release_bitmaps, Bitmap output_bitmap, boolean assume_sorted, SortCallback sort_cb, float hdr_alpha, int n_tiles, TonemappingAlgorithm tonemapping_algorithm) {
if (MyDebug.LOG)
Log.d(TAG, "processHDRCore");
long time_s = System.currentTimeMillis();
int n_bitmaps = bitmaps.size();
int width = bitmaps.get(0).getWidth();
int height = bitmaps.get(0).getHeight();
// ResponseFunction for each image (the ResponseFunction entry can be left null to indicate the Identity)
ResponseFunction[] response_functions = new ResponseFunction[n_bitmaps];
offsets_x = new int[n_bitmaps];
offsets_y = new int[n_bitmaps];
/*int [][] buffers = new int[n_bitmaps][];
for(int i=0;i<n_bitmaps;i++) {
buffers[i] = new int[bm.getWidth()];
}*/
// float [] hdr = new float[3];
// int [] rgb = new int[3];
initRenderscript();
if (MyDebug.LOG)
Log.d(TAG, "### time after creating renderscript: " + (System.currentTimeMillis() - time_s));
// create allocations
Allocation[] allocations = new Allocation[n_bitmaps];
for (int i = 0; i < n_bitmaps; i++) {
allocations[i] = Allocation.createFromBitmap(rs, bitmaps.get(i));
}
if (MyDebug.LOG)
Log.d(TAG, "### time after creating allocations from bitmaps: " + (System.currentTimeMillis() - time_s));
// index of the bitmap with the base exposure and offsets
final int base_bitmap = (n_bitmaps - 1) / 2;
// perform auto-alignment
// if assume_sorted if false, this function will also sort the allocations and bitmaps from darkest to brightest.
BrightnessDetails brightnessDetails = autoAlignment(offsets_x, offsets_y, allocations, width, height, bitmaps, base_bitmap, assume_sorted, sort_cb, true, false, time_s);
int median_brightness = brightnessDetails.median_brightness;
if (MyDebug.LOG) {
Log.d(TAG, "### time after autoAlignment: " + (System.currentTimeMillis() - time_s));
Log.d(TAG, "median_brightness: " + median_brightness);
}
// final boolean use_hdr_n = true; // test always using hdr_n
final boolean use_hdr_n = n_bitmaps > 3;
// compute response_functions
for (int i = 0; i < n_bitmaps; i++) {
ResponseFunction function = null;
if (i != base_bitmap) {
function = createFunctionFromBitmaps(i, bitmaps.get(i), bitmaps.get(base_bitmap), offsets_x[i], offsets_y[i]);
} else if (use_hdr_n) {
// for hdr_n, need to still create the identity response function
function = ResponseFunction.createIdentity();
}
response_functions[i] = function;
}
if (MyDebug.LOG)
Log.d(TAG, "### time after creating response functions: " + (System.currentTimeMillis() - time_s));
/*
// calculate average luminance by sampling
final int n_samples_c = 100;
final int n_w_samples = (int)Math.sqrt(n_samples_c);
final int n_h_samples = n_samples_c/n_w_samples;
double sum_log_luminance = 0.0;
int count = 0;
for(int y=0;y<n_h_samples;y++) {
double alpha = ((double)y+1.0) / ((double)n_h_samples+1.0);
int y_coord = (int)(alpha * bm.getHeight());
for(int i=0;i<n_bitmaps;i++) {
bitmaps.get(i).getPixels(buffers[i], 0, bm.getWidth(), 0, y_coord, bm.getWidth(), 1);
}
for(int x=0;x<n_w_samples;x++) {
double beta = ((double)x+1.0) / ((double)n_w_samples+1.0);
int x_coord = (int)(beta * bm.getWidth());
if( MyDebug.LOG )
Log.d(TAG, "sample luminance from " + x_coord + " , " + y_coord);
calculateHDR(hdr, n_bitmaps, buffers, x_coord, response_functions);
double luminance = calculateLuminance(hdr[0], hdr[1], hdr[2]) + 1.0; // add 1 so we don't take log of 0..;
sum_log_luminance += Math.log(luminance);
count++;
}
}
float avg_luminance = (float)(Math.exp( sum_log_luminance / count ));
if( MyDebug.LOG )
Log.d(TAG, "avg_luminance: " + avg_luminance);
if( MyDebug.LOG )
Log.d(TAG, "time after calculating average luminance: " + (System.currentTimeMillis() - time_s));
*/
// write new hdr image
// create RenderScript
ScriptC_process_hdr processHDRScript = new ScriptC_process_hdr(rs);
// set allocations
processHDRScript.set_bitmap0(allocations[0]);
processHDRScript.set_bitmap2(allocations[2]);
// set offsets
processHDRScript.set_offset_x0(offsets_x[0]);
processHDRScript.set_offset_y0(offsets_y[0]);
// no offset for middle image
processHDRScript.set_offset_x2(offsets_x[2]);
processHDRScript.set_offset_y2(offsets_y[2]);
// set response functions
processHDRScript.set_parameter_A0(response_functions[0].parameter_A);
processHDRScript.set_parameter_B0(response_functions[0].parameter_B);
// no response function for middle image
processHDRScript.set_parameter_A2(response_functions[2].parameter_A);
processHDRScript.set_parameter_B2(response_functions[2].parameter_B);
if (use_hdr_n) {
// now need to set values for image 1
processHDRScript.set_bitmap1(allocations[1]);
processHDRScript.set_offset_x1(offsets_x[1]);
processHDRScript.set_offset_y1(offsets_y[1]);
processHDRScript.set_parameter_A1(response_functions[1].parameter_A);
processHDRScript.set_parameter_B1(response_functions[1].parameter_B);
}
if (n_bitmaps > 3) {
processHDRScript.set_bitmap3(allocations[3]);
processHDRScript.set_offset_x3(offsets_x[3]);
processHDRScript.set_offset_y3(offsets_y[3]);
processHDRScript.set_parameter_A3(response_functions[3].parameter_A);
processHDRScript.set_parameter_B3(response_functions[3].parameter_B);
processHDRScript.set_bitmap4(allocations[4]);
processHDRScript.set_offset_x4(offsets_x[4]);
processHDRScript.set_offset_y4(offsets_y[4]);
processHDRScript.set_parameter_A4(response_functions[4].parameter_A);
processHDRScript.set_parameter_B4(response_functions[4].parameter_B);
}
// set tonemapping algorithm
switch(tonemapping_algorithm) {
case TONEMAPALGORITHM_CLAMP:
if (MyDebug.LOG)
Log.d(TAG, "tonemapping algorithm: clamp");
processHDRScript.set_tonemap_algorithm(processHDRScript.get_tonemap_algorithm_clamp_c());
break;
case TONEMAPALGORITHM_EXPONENTIAL:
if (MyDebug.LOG)
Log.d(TAG, "tonemapping algorithm: exponential");
processHDRScript.set_tonemap_algorithm(processHDRScript.get_tonemap_algorithm_exponential_c());
break;
case TONEMAPALGORITHM_REINHARD:
if (MyDebug.LOG)
Log.d(TAG, "tonemapping algorithm: reinhard");
processHDRScript.set_tonemap_algorithm(processHDRScript.get_tonemap_algorithm_reinhard_c());
break;
case TONEMAPALGORITHM_FILMIC:
if (MyDebug.LOG)
Log.d(TAG, "tonemapping algorithm: filmic");
processHDRScript.set_tonemap_algorithm(processHDRScript.get_tonemap_algorithm_filmic_c());
break;
case TONEMAPALGORITHM_ACES:
if (MyDebug.LOG)
Log.d(TAG, "tonemapping algorithm: aces");
processHDRScript.set_tonemap_algorithm(processHDRScript.get_tonemap_algorithm_aces_c());
break;
}
float max_possible_value = response_functions[0].parameter_A * 255 + response_functions[0].parameter_B;
// float max_possible_value = response_functions[base_bitmap - 1].parameter_A * 255 + response_functions[base_bitmap - 1].parameter_B;
if (MyDebug.LOG)
Log.d(TAG, "max_possible_value: " + max_possible_value);
if (max_possible_value < 255.0f) {
// don't make dark images too bright, see below about linear_scale for more details
max_possible_value = 255.0f;
if (MyDebug.LOG)
Log.d(TAG, "clamp max_possible_value to: " + max_possible_value);
}
// hdr_alpha = 0.0f; // test
// final float tonemap_scale_c = avg_luminance / 0.8f; // lower values tend to result in too dark pictures; higher values risk over exposed bright areas
// final float tonemap_scale_c = 255.0f;
// final float tonemap_scale_c = 255.0f - median_brightness;
float tonemap_scale_c = 255.0f;
if (median_brightness <= 0)
median_brightness = 1;
if (MyDebug.LOG)
Log.d(TAG, "median_brightness: " + median_brightness);
int median_target = Math.min(119, 2 * median_brightness);
// don't make median darker
median_target = Math.max(median_brightness, median_target);
if (MyDebug.LOG) {
Log.d(TAG, "median_target: " + median_target);
Log.d(TAG, "compare: " + 255.0f / max_possible_value);
Log.d(TAG, "to: " + (((float) median_target) / (float) median_brightness + median_target / 255.0f - 1.0f));
}
if (255.0f / max_possible_value < ((float) median_target) / (float) median_brightness + median_target / 255.0f - 1.0f) {
// For Reinhard tonemapping:
// As noted below, we have f(V) = V.S / (V+C), where V is the HDR value, C is tonemap_scale_c
// and S = (Vmax + C)/Vmax (see below)
// Ideally we try to choose C such that we map median value M to target T:
// f(M) = T
// => T = M . (Vmax + C) / (Vmax . (M + C))
// => (T/M).(M + C) = (Vmax + C) / Vmax = 1 + C/Vmax
// => C . ( T/M - 1/Vmax ) = 1 - T
// => C = (1-T) / (T/M - 1/Vmax)
// Since we want C <= 1, we must have:
// 1-T <= T/M - 1/Vmax
// => 1/Vmax <= T/M + T - 1
// If this isn't the case, we set C to 1 (to preserve the median as close as possible).
// Note that if we weren't doing the linear scaling below, this would reduce to choosing
// C = M(1-T)/T. We also tend to that as max_possible_value tends to infinity. So even though
// we only sometimes enter this case, it's important for cases where max_possible_value
// might be estimated too large (also consider that if we ever support more than 3 images,
// we'd risk having too large values).
// If T=M, then this simplifies to C = 1-M.
// I've tested that using "C = 1-M" always (and no linear scaling) also gives good results:
// much better compared to Open Camera 1.39, though not quite as good as doing both this
// and linear scaling (testHDR18, testHDR26, testHDR32 look too grey and/or bright).
final float tonemap_denom = ((float) median_target) / (float) median_brightness - (255.0f / max_possible_value);
if (MyDebug.LOG)
Log.d(TAG, "tonemap_denom: " + tonemap_denom);
if (// just in case
tonemap_denom != 0.0f)
tonemap_scale_c = (255.0f - median_target) / tonemap_denom;
// throw new RuntimeException(); // test
}
// (tonemap_scale_c==255 means therefore that colours will only be made darker).
if (MyDebug.LOG)
Log.d(TAG, "tonemap_scale_c: " + tonemap_scale_c);
processHDRScript.set_tonemap_scale(tonemap_scale_c);
// algorithm specific parameters
switch(tonemapping_algorithm) {
case TONEMAPALGORITHM_EXPONENTIAL:
{
// The basic algorithm is f(V) = 1 - exp( - E * V ), where V is the HDR value, E is a
// constant. This maps [0, infinity] to [0, 1]. However we have an estimate of the maximum
// possible value, Vmax, so we can set a linear scaling S so that [0, Vmax] maps to [0, 1]
// f(V) = S . (1 - exp( - E * V ))
// so 1 = S . (1 - exp( - E * Vmax ))
// => S = 1 / (1 - exp( - E * Vmax ))
// Note that Vmax should be set to a minimum of 255, else we'll make darker images brighter.
float E = processHDRScript.get_exposure();
float linear_scale = (float) (1.0 / (1.0 - Math.exp(-E * max_possible_value / 255.0)));
if (MyDebug.LOG)
Log.d(TAG, "linear_scale: " + linear_scale);
processHDRScript.set_linear_scale(linear_scale);
break;
}
case TONEMAPALGORITHM_REINHARD:
{
// The basic algorithm is f(V) = V / (V+C), where V is the HDR value, C is tonemap_scale_c
// This was used until Open Camera 1.39, but has the problem of making images too dark: it
// maps [0, infinity] to [0, 1], but since in practice we never have very large V values, we
// won't use the full [0, 1] range. So we apply a linear scale S:
// f(V) = V.S / (V+C)
// S is chosen such that the maximum possible value, Vmax, maps to 1. So:
// 1 = Vmax . S / (Vmax + C)
// => S = (Vmax + C)/Vmax
// Note that we don't actually know the maximum HDR value, but instead we estimate it with
// max_possible_value, which gives the maximum value we'd have if even the darkest image was
// 255.0.
// Note that if max_possible_value was less than 255, we'd end up scaling a max value less than
// 1, to [0, 1], i.e., making dark images brighter, which we don't want, which is why above we
// set max_possible_value to a minimum of 255. In practice, this is unlikely to ever happen
// since max_possible_value is calculated as a maximum possible based on the response functions
// (as opposed to the real brightest HDR value), so even for dark photos we'd expect to have
// max_possible_value >= 255.
// Note that the original Reinhard tonemapping paper describes a non-linear scaling by (1 + CV/Vmax^2),
// though this is poorer performance (in terms of calculation time).
float linear_scale = (max_possible_value + tonemap_scale_c) / max_possible_value;
if (MyDebug.LOG)
Log.d(TAG, "linear_scale: " + linear_scale);
processHDRScript.set_linear_scale(linear_scale);
break;
}
case TONEMAPALGORITHM_FILMIC:
{
// For filmic, we have f(V) = U(EV) / U(W), where V is the HDR value, U is a function.
// We want f(Vmax) = 1, so EVmax = W
float E = processHDRScript.get_filmic_exposure_bias();
float W = E * max_possible_value;
if (MyDebug.LOG)
Log.d(TAG, "filmic W: " + W);
processHDRScript.set_W(W);
break;
}
}
if (MyDebug.LOG)
Log.d(TAG, "call processHDRScript");
Allocation output_allocation;
if (release_bitmaps) {
// must use allocations[base_bitmap] as the output, as that's the image guaranteed to have no offset (otherwise we'll have
// problems due to the output being equal to one of the inputs)
output_allocation = allocations[base_bitmap];
} else {
output_allocation = Allocation.createFromBitmap(rs, output_bitmap);
}
if (MyDebug.LOG)
Log.d(TAG, "### time before processHDRScript: " + (System.currentTimeMillis() - time_s));
if (use_hdr_n) {
processHDRScript.set_n_bitmaps_g(n_bitmaps);
processHDRScript.forEach_hdr_n(allocations[base_bitmap], output_allocation);
} else {
processHDRScript.forEach_hdr(allocations[base_bitmap], output_allocation);
}
/*processHDRScript.set_n_bitmaps_g(n_bitmaps);
processHDRScript.forEach_hdr_n(allocations[base_bitmap], output_allocation);*/
if (MyDebug.LOG)
Log.d(TAG, "### time after processHDRScript: " + (System.currentTimeMillis() - time_s));
if (release_bitmaps) {
if (MyDebug.LOG)
Log.d(TAG, "release bitmaps");
// bitmaps.get(base_bitmap) will store HDR image, so free up the rest of the memory asap - we no longer need the remaining bitmaps
for (int i = 0; i < bitmaps.size(); i++) {
if (i != base_bitmap) {
Bitmap bitmap = bitmaps.get(i);
bitmap.recycle();
}
}
}
if (hdr_alpha != 0.0f) {
adjustHistogram(output_allocation, output_allocation, width, height, hdr_alpha, n_tiles, time_s);
if (MyDebug.LOG)
Log.d(TAG, "### time after adjustHistogram: " + (System.currentTimeMillis() - time_s));
}
if (release_bitmaps) {
// must be the base_bitmap we copy to - see note above about using allocations[base_bitmap] as the output
allocations[base_bitmap].copyTo(bitmaps.get(base_bitmap));
if (MyDebug.LOG)
Log.d(TAG, "### time after copying to bitmap: " + (System.currentTimeMillis() - time_s));
// make it so that we store the output bitmap as first in the list
bitmaps.set(0, bitmaps.get(base_bitmap));
for (int i = 1; i < bitmaps.size(); i++) {
bitmaps.set(i, null);
}
} else {
output_allocation.copyTo(output_bitmap);
if (MyDebug.LOG)
Log.d(TAG, "### time after copying to bitmap: " + (System.currentTimeMillis() - time_s));
}
if (MyDebug.LOG)
Log.d(TAG, "### time for processHDRCore: " + (System.currentTimeMillis() - time_s));
}
use of android.renderscript.Allocation in project OpenCamera by ageback.
the class HDRProcessor method computeHistogram.
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private int[] computeHistogram(Allocation allocation, boolean avg, boolean floating_point) {
if (MyDebug.LOG)
Log.d(TAG, "computeHistogram");
long time_s = System.currentTimeMillis();
int[] histogram = new int[256];
Allocation histogramAllocation = computeHistogramAllocation(allocation, avg, floating_point, time_s);
histogramAllocation.copyTo(histogram);
return histogram;
}
use of android.renderscript.Allocation in project ForPDA by RadiationX.
the class BitmapUtils method rsBlur.
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static Bitmap rsBlur(Context context, Bitmap sentBitmap, int radius) {
Bitmap overlay = Bitmap.createBitmap(sentBitmap.getWidth(), sentBitmap.getHeight(), Bitmap.Config.ARGB_8888);
RenderScript rs = RenderScript.create(context);
Allocation overlayAlloc = Allocation.createFromBitmap(rs, sentBitmap);
ScriptIntrinsicBlur blur = ScriptIntrinsicBlur.create(rs, overlayAlloc.getElement());
blur.setInput(overlayAlloc);
blur.setRadius(radius);
blur.forEach(overlayAlloc);
overlayAlloc.copyTo(overlay);
rs.destroy();
return overlay;
}
use of android.renderscript.Allocation in project Phonograph by kabouzeid.
the class BlurTransformation method transform.
@Override
protected Bitmap transform(BitmapPool pool, Bitmap toTransform, int outWidth, int outHeight) {
int sampling;
if (this.sampling == 0) {
sampling = ImageUtil.calculateInSampleSize(toTransform.getWidth(), toTransform.getHeight(), 100);
} else {
sampling = this.sampling;
}
int width = toTransform.getWidth();
int height = toTransform.getHeight();
int scaledWidth = width / sampling;
int scaledHeight = height / sampling;
Bitmap out = pool.get(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
if (out == null) {
out = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
}
Canvas canvas = new Canvas(out);
canvas.scale(1 / (float) sampling, 1 / (float) sampling);
Paint paint = new Paint();
paint.setFlags(Paint.FILTER_BITMAP_FLAG);
canvas.drawBitmap(toTransform, 0, 0, paint);
if (Build.VERSION.SDK_INT >= 17) {
try {
final RenderScript rs = RenderScript.create(context.getApplicationContext());
final Allocation input = Allocation.createFromBitmap(rs, out, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);
final Allocation output = Allocation.createTyped(rs, input.getType());
final ScriptIntrinsicBlur script = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs));
script.setRadius(blurRadius);
script.setInput(input);
script.forEach(output);
output.copyTo(out);
rs.destroy();
return out;
} catch (RSRuntimeException e) {
// on some devices RenderScript.create() throws: android.support.v8.renderscript.RSRuntimeException: Error loading libRSSupport library
if (BuildConfig.DEBUG)
e.printStackTrace();
}
}
return StackBlur.blur(out, blurRadius);
}
Aggregations