Search in sources :

Example 46 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class MainActivityTest method subTestAvg.

/**
 * The following testAvgX tests test the Avg noise reduction algorithm on a given set of input images.
 *  By testing on a fixed sample, this makes it easier to finetune the algorithm for quality and performance.
 *  To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
 *  folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
 *  time to transfer to the device everytime we run the tests.
 */
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private HistogramDetails subTestAvg(List<String> inputs, String output_name, int iso, TestAvgCallback cb) throws IOException, InterruptedException {
    Log.d(TAG, "subTestAvg");
    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
        Log.d(TAG, "renderscript requires Android Lollipop or better");
        return null;
    }
    // wait for camera to open
    Thread.sleep(1000);
    /*Bitmap nr_bitmap = getBitmapFromFile(inputs.get(0));
    	long time_s = System.currentTimeMillis();
		try {
			for(int i=1;i<inputs.size();i++) {
				Log.d(TAG, "processAvg for image: " + i);
				Bitmap new_bitmap = getBitmapFromFile(inputs.get(i));
				float avg_factor = (float)i;
				mActivity.getApplicationInterface().getHDRProcessor().processAvg(nr_bitmap, new_bitmap, avg_factor, true);
				// processAvg recycles new_bitmap
				if( cb != null ) {
					cb.doneProcessAvg(i);
				}
				//break; // test
			}
			//mActivity.getApplicationInterface().getHDRProcessor().processAvgMulti(inputs, hdr_strength, 4);
		}
		catch(HDRProcessorException e) {
			e.printStackTrace();
			throw new RuntimeException();
		}
		Log.d(TAG, "Avg time: " + (System.currentTimeMillis() - time_s));

        {
            mActivity.getApplicationInterface().getHDRProcessor().avgBrighten(nr_bitmap);
            Log.d(TAG, "time after brighten: " + (System.currentTimeMillis() - time_s));
        }*/
    Bitmap nr_bitmap;
    try {
        // initialise allocation from first two bitmaps
        Bitmap bitmap0 = getBitmapFromFile(inputs.get(0));
        Bitmap bitmap1 = getBitmapFromFile(inputs.get(1));
        int width = bitmap0.getWidth();
        int height = bitmap0.getHeight();
        float avg_factor = 1.0f;
        Allocation allocation = mActivity.getApplicationInterface().getHDRProcessor().processAvg(bitmap0, bitmap1, avg_factor, iso, true);
        // processAvg recycles both bitmaps
        if (cb != null) {
            cb.doneProcessAvg(1);
        }
        for (int i = 2; i < inputs.size(); i++) {
            Log.d(TAG, "processAvg for image: " + i);
            Bitmap new_bitmap = getBitmapFromFile(inputs.get(i));
            avg_factor = (float) i;
            mActivity.getApplicationInterface().getHDRProcessor().updateAvg(allocation, width, height, new_bitmap, avg_factor, iso, true);
            // updateAvg recycles new_bitmap
            if (cb != null) {
                cb.doneProcessAvg(i);
            }
        }
        nr_bitmap = mActivity.getApplicationInterface().getHDRProcessor().avgBrighten(allocation, width, height, iso);
    } catch (HDRProcessorException e) {
        e.printStackTrace();
        throw new RuntimeException();
    }
    File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/" + output_name);
    OutputStream outputStream = new FileOutputStream(file);
    nr_bitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream);
    outputStream.close();
    mActivity.getStorageUtils().broadcastFile(file, true, false, true);
    HistogramDetails hdrHistogramDetails = checkHistogram(nr_bitmap);
    nr_bitmap.recycle();
    System.gc();
    inputs.clear();
    Thread.sleep(500);
    return hdrHistogramDetails;
}
Also used : HDRProcessorException(net.sourceforge.opencamera.HDRProcessorException) Bitmap(android.graphics.Bitmap) Allocation(android.renderscript.Allocation) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) File(java.io.File) SuppressLint(android.annotation.SuppressLint) Point(android.graphics.Point) TargetApi(android.annotation.TargetApi)

Example 47 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method autoAlignment.

/**
 * @param bitmaps       Only required if use_mtb is true, otherwise may be null.
 * @param base_bitmap   Index of bitmap in bitmaps that should be kept fixed; the other bitmaps
 *                      will be aligned relative to this.
 * @param assume_sorted If assume_sorted if false, and use_mtb is true, this function will also
 *                      sort the allocations and bitmaps from darkest to brightest.
 * @param use_mtb       Whether to align based on the median threshold bitmaps or not.
 * @param floating_point If true, the first allocation is in floating point (F32_3) format.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private BrightnessDetails autoAlignment(int[] offsets_x, int[] offsets_y, Allocation[] allocations, int width, int height, List<Bitmap> bitmaps, int base_bitmap, boolean assume_sorted, SortCallback sort_cb, boolean use_mtb, boolean floating_point, long time_s) {
    if (MyDebug.LOG)
        Log.d(TAG, "autoAlignment");
    // initialise
    for (int i = 0; i < offsets_x.length; i++) {
        offsets_x[i] = 0;
        offsets_y[i] = 0;
    }
    Allocation[] mtb_allocations = new Allocation[allocations.length];
    if (MyDebug.LOG)
        Log.d(TAG, "### time after creating mtb_allocations: " + (System.currentTimeMillis() - time_s));
    // Testing shows that in practice we get good results by only aligning the centre quarter of the images. This gives better
    // performance, and uses less memory.
    int mtb_width = width / 2;
    int mtb_height = height / 2;
    int mtb_x = mtb_width / 2;
    int mtb_y = mtb_height / 2;
    /*int mtb_width = width;
		int mtb_height = height;
		int mtb_x = 0;
		int mtb_y = 0;*/
    // create RenderScript
    ScriptC_create_mtb createMTBScript = new ScriptC_create_mtb(rs);
    LuminanceInfo[] luminanceInfos = null;
    if (use_mtb) {
        luminanceInfos = new LuminanceInfo[allocations.length];
        for (int i = 0; i < allocations.length; i++) {
            luminanceInfos[i] = computeMedianLuminance(bitmaps.get(i), mtb_x, mtb_y, mtb_width, mtb_height);
            if (MyDebug.LOG)
                Log.d(TAG, i + ": median_value: " + luminanceInfos[i].median_value);
        }
        if (MyDebug.LOG)
            Log.d(TAG, "time after computeMedianLuminance: " + (System.currentTimeMillis() - time_s));
    }
    if (!assume_sorted && use_mtb) {
        if (MyDebug.LOG)
            Log.d(TAG, "sort bitmaps");
        class BitmapInfo {

            final LuminanceInfo luminanceInfo;

            final Bitmap bitmap;

            final Allocation allocation;

            final int index;

            BitmapInfo(LuminanceInfo luminanceInfo, Bitmap bitmap, Allocation allocation, int index) {
                this.luminanceInfo = luminanceInfo;
                this.bitmap = bitmap;
                this.allocation = allocation;
                this.index = index;
            }
        }
        List<BitmapInfo> bitmapInfos = new ArrayList<>(bitmaps.size());
        for (int i = 0; i < bitmaps.size(); i++) {
            BitmapInfo bitmapInfo = new BitmapInfo(luminanceInfos[i], bitmaps.get(i), allocations[i], i);
            bitmapInfos.add(bitmapInfo);
        }
        Collections.sort(bitmapInfos, new Comparator<BitmapInfo>() {

            @Override
            public int compare(BitmapInfo o1, BitmapInfo o2) {
                return o1.luminanceInfo.median_value - o2.luminanceInfo.median_value;
            }
        });
        bitmaps.clear();
        for (int i = 0; i < bitmapInfos.size(); i++) {
            bitmaps.add(bitmapInfos.get(i).bitmap);
            luminanceInfos[i] = bitmapInfos.get(i).luminanceInfo;
            allocations[i] = bitmapInfos.get(i).allocation;
        }
        if (MyDebug.LOG) {
            for (int i = 0; i < allocations.length; i++) {
                Log.d(TAG, i + ": median_value: " + luminanceInfos[i].median_value);
            }
        }
        if (sort_cb != null) {
            List<Integer> sort_order = new ArrayList<>();
            for (int i = 0; i < bitmapInfos.size(); i++) {
                sort_order.add(bitmapInfos.get(i).index);
            }
            if (MyDebug.LOG)
                Log.d(TAG, "sort_order: " + sort_order);
            sort_cb.sortOrder(sort_order);
        }
    }
    int median_brightness = -1;
    if (use_mtb) {
        median_brightness = luminanceInfos[base_bitmap].median_value;
        if (MyDebug.LOG)
            Log.d(TAG, "median_brightness: " + median_brightness);
    }
    for (int i = 0; i < allocations.length; i++) {
        int median_value = -1;
        if (use_mtb) {
            median_value = luminanceInfos[i].median_value;
            if (MyDebug.LOG)
                Log.d(TAG, i + ": median_value: " + median_value);
        /*if( median_value < 16 ) {
					// needed for testHDR2, testHDR28
					if( MyDebug.LOG )
						Log.d(TAG, "image too dark to do alignment");
					mtb_allocations[i] = null;
					continue;
				}*/
        }
        if (use_mtb && luminanceInfos[i].noisy) {
            if (MyDebug.LOG)
                Log.d(TAG, "unable to compute median luminance safely");
            mtb_allocations[i] = null;
            continue;
        }
        mtb_allocations[i] = Allocation.createTyped(rs, Type.createXY(rs, Element.U8(rs), mtb_width, mtb_height));
        // set parameters
        if (use_mtb)
            createMTBScript.set_median_value(median_value);
        createMTBScript.set_start_x(mtb_x);
        createMTBScript.set_start_y(mtb_y);
        createMTBScript.set_out_bitmap(mtb_allocations[i]);
        if (MyDebug.LOG)
            Log.d(TAG, "call createMTBScript");
        Script.LaunchOptions launch_options = new Script.LaunchOptions();
        // launch_options.setX((int)(width*0.25), (int)(width*0.75));
        // launch_options.setY((int)(height*0.25), (int)(height*0.75));
        // createMTBScript.forEach_create_mtb(allocations[i], mtb_allocations[i], launch_options);
        launch_options.setX(mtb_x, mtb_x + mtb_width);
        launch_options.setY(mtb_y, mtb_y + mtb_height);
        if (use_mtb)
            createMTBScript.forEach_create_mtb(allocations[i], launch_options);
        else {
            if (floating_point && i == 0)
                createMTBScript.forEach_create_greyscale_f(allocations[i], launch_options);
            else
                createMTBScript.forEach_create_greyscale(allocations[i], launch_options);
        }
        if (MyDebug.LOG)
            Log.d(TAG, "time after createMTBScript: " + (System.currentTimeMillis() - time_s));
    /*if( MyDebug.LOG ) {
				// debugging
				byte [] mtb_bytes = new byte[mtb_width*mtb_height];
				mtb_allocations[i].copyTo(mtb_bytes);
				int [] pixels = new int[mtb_width*mtb_height];
				for(int j=0;j<mtb_width*mtb_height;j++) {
					byte b = mtb_bytes[j];
					pixels[j] = Color.argb(255, b, b, b);
				}
				Bitmap mtb_bitmap = Bitmap.createBitmap(pixels, mtb_width, mtb_height, Bitmap.Config.ARGB_8888);
				File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/mtb_bitmap" + i + ".jpg");
				try {
					OutputStream outputStream = new FileOutputStream(file);
					mtb_bitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream);
					outputStream.close();
					MainActivity mActivity = (MainActivity) context;
					mActivity.getStorageUtils().broadcastFile(file, true, false, true);
				}
				catch(IOException e) {
					e.printStackTrace();
				}
				mtb_bitmap.recycle();
			}*/
    }
    if (MyDebug.LOG)
        Log.d(TAG, "### time after all createMTBScript: " + (System.currentTimeMillis() - time_s));
    // The initial step_size N should be a power of 2; the maximum offset we can achieve by the algorithm is N-1.
    // For pictures resolution 4160x3120, this gives max_ideal_size 27, and initial_step_size 32.
    // On tests testHDR1 to testHDR35, the max required offset was 24 pixels (for testHDR33) even when using
    // inital_step_size of 64.
    // Note, there isn't really a performance cost in allowing higher initial step sizes (as larger sizes have less
    // sampling - since we sample every step_size pixels - though there might be some overhead for every extra call
    // to renderscript that we do). But high step sizes have a risk of producing really bad results if we were
    // to misidentify cases as needing a large offset.
    // n.b., use the full width and height here, not the mtb_width, height
    int max_dim = Math.max(width, height);
    int max_ideal_size = max_dim / 150;
    int initial_step_size = 1;
    while (initial_step_size < max_ideal_size) {
        initial_step_size *= 2;
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "max_dim: " + max_dim);
        Log.d(TAG, "max_ideal_size: " + max_ideal_size);
        Log.d(TAG, "initial_step_size: " + initial_step_size);
    }
    if (mtb_allocations[base_bitmap] == null) {
        if (MyDebug.LOG)
            Log.d(TAG, "base image not suitable for image alignment");
        return new BrightnessDetails(median_brightness);
    }
    // create RenderScript
    ScriptC_align_mtb alignMTBScript = new ScriptC_align_mtb(rs);
    // set parameters
    alignMTBScript.set_bitmap0(mtb_allocations[base_bitmap]);
    for (int i = 0; i < allocations.length; i++) {
        if (i == base_bitmap) {
            // don't need to align the "base" reference image
            continue;
        }
        if (mtb_allocations[i] == null) {
            if (MyDebug.LOG)
                Log.d(TAG, "image " + i + " not suitable for image alignment");
            continue;
        }
        alignMTBScript.set_bitmap1(mtb_allocations[i]);
        int step_size = initial_step_size;
        while (step_size > 1) {
            step_size /= 2;
            alignMTBScript.set_off_x(offsets_x[i]);
            alignMTBScript.set_off_y(offsets_y[i]);
            alignMTBScript.set_step_size(step_size);
            if (MyDebug.LOG) {
                Log.d(TAG, "call alignMTBScript for image: " + i);
                Log.d(TAG, "    versus base image: " + base_bitmap);
                Log.d(TAG, "step_size: " + step_size);
            }
            Allocation errorsAllocation = Allocation.createSized(rs, Element.I32(rs), 9);
            alignMTBScript.bind_errors(errorsAllocation);
            alignMTBScript.invoke_init_errors();
            // see note inside align_mtb.rs/align_mtb() for why we sample over a subset of the image
            Script.LaunchOptions launch_options = new Script.LaunchOptions();
            int stop_x = mtb_width / step_size;
            int stop_y = mtb_height / step_size;
            if (MyDebug.LOG) {
                Log.d(TAG, "stop_x: " + stop_x);
                Log.d(TAG, "stop_y: " + stop_y);
            }
            // launch_options.setX((int)(stop_x*0.25), (int)(stop_x*0.75));
            // launch_options.setY((int)(stop_y*0.25), (int)(stop_y*0.75));
            launch_options.setX(0, stop_x);
            launch_options.setY(0, stop_y);
            if (use_mtb)
                alignMTBScript.forEach_align_mtb(mtb_allocations[base_bitmap], launch_options);
            else
                alignMTBScript.forEach_align(mtb_allocations[base_bitmap], launch_options);
            if (MyDebug.LOG)
                Log.d(TAG, "time after alignMTBScript: " + (System.currentTimeMillis() - time_s));
            int best_error = -1;
            int best_id = -1;
            int[] errors = new int[9];
            errorsAllocation.copyTo(errors);
            for (int j = 0; j < 9; j++) {
                int this_error = errors[j];
                if (MyDebug.LOG)
                    Log.d(TAG, "    errors[" + j + "]: " + this_error);
                if (best_id == -1 || this_error < best_error) {
                    best_error = this_error;
                    best_id = j;
                }
            }
            if (MyDebug.LOG)
                Log.d(TAG, "    best_id " + best_id + " error: " + best_error);
            if (best_id != -1) {
                int this_off_x = best_id % 3;
                int this_off_y = best_id / 3;
                this_off_x--;
                this_off_y--;
                if (MyDebug.LOG) {
                    Log.d(TAG, "this_off_x: " + this_off_x);
                    Log.d(TAG, "this_off_y: " + this_off_y);
                }
                offsets_x[i] += this_off_x * step_size;
                offsets_y[i] += this_off_y * step_size;
                if (MyDebug.LOG) {
                    Log.d(TAG, "offsets_x is now: " + offsets_x[i]);
                    Log.d(TAG, "offsets_y is now: " + offsets_y[i]);
                }
            }
        }
    }
    /*for(int i=0;i<allocations.length;i++) {
			offsets_x[i] = 0;
			offsets_y[i] = 0;
		}*/
    return new BrightnessDetails(median_brightness);
}
Also used : Script(android.renderscript.Script) RenderScript(android.renderscript.RenderScript) ArrayList(java.util.ArrayList) Bitmap(android.graphics.Bitmap) Allocation(android.renderscript.Allocation) RequiresApi(android.support.annotation.RequiresApi)

Example 48 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method computeHistogramAllocation.

/**
 * @param avg If true, compute the color value as the average of the rgb values. If false,
 *            compute the color value as the maximum of the rgb values.
 * @param floating_point Whether the allocation_in is in floating point (F32_3) format, or
 *                       RGBA_8888 format.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private Allocation computeHistogramAllocation(Allocation allocation_in, boolean avg, boolean floating_point, long time_s) {
    if (MyDebug.LOG)
        Log.d(TAG, "computeHistogramAllocation");
    Allocation histogramAllocation = Allocation.createSized(rs, Element.I32(rs), 256);
    // final boolean use_custom_histogram = false;
    final boolean use_custom_histogram = true;
    if (use_custom_histogram) {
        if (MyDebug.LOG)
            Log.d(TAG, "create histogramScript");
        ScriptC_histogram_compute histogramScript = new ScriptC_histogram_compute(rs);
        if (MyDebug.LOG)
            Log.d(TAG, "bind histogram allocation");
        histogramScript.bind_histogram(histogramAllocation);
        histogramScript.invoke_init_histogram();
        if (MyDebug.LOG)
            Log.d(TAG, "call histogramScript");
        if (MyDebug.LOG)
            Log.d(TAG, "time before histogramScript: " + (System.currentTimeMillis() - time_s));
        if (avg) {
            if (floating_point)
                histogramScript.forEach_histogram_compute_avg_f(allocation_in);
            else
                histogramScript.forEach_histogram_compute_avg(allocation_in);
        } else {
            if (floating_point)
                histogramScript.forEach_histogram_compute_f(allocation_in);
            else
                histogramScript.forEach_histogram_compute(allocation_in);
        }
        if (MyDebug.LOG)
            Log.d(TAG, "time after histogramScript: " + (System.currentTimeMillis() - time_s));
    } else {
        ScriptIntrinsicHistogram histogramScript = ScriptIntrinsicHistogram.create(rs, Element.U8_4(rs));
        histogramScript.setOutput(histogramAllocation);
        if (MyDebug.LOG)
            Log.d(TAG, "call histogramScript");
        // use forEach_dot(); using forEach would simply compute a histogram for red values!
        histogramScript.forEach_Dot(allocation_in);
    }
    // histogramAllocation.setAutoPadding(true);
    return histogramAllocation;
}
Also used : Allocation(android.renderscript.Allocation) ScriptIntrinsicHistogram(android.renderscript.ScriptIntrinsicHistogram) RequiresApi(android.support.annotation.RequiresApi)

Example 49 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method avgBrighten.

/**
 * @param input         The allocation in floating point format.
 * @param width         Width of the input.
 * @param height        Height of the input.
 * @param iso           ISO used for the original images.
 * @return              Resultant bitmap.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public Bitmap avgBrighten(Allocation input, int width, int height, int iso) {
    if (MyDebug.LOG) {
        Log.d(TAG, "avgBrighten");
        Log.d(TAG, "iso: " + iso);
    }
    initRenderscript();
    long time_s = System.currentTimeMillis();
    int[] histo = computeHistogram(input, false, true);
    HistogramInfo histogramInfo = getHistogramInfo(histo);
    int median_brightness = histogramInfo.median_brightness;
    int max_brightness = histogramInfo.max_brightness;
    if (MyDebug.LOG)
        Log.d(TAG, "### time after computeHistogram: " + (System.currentTimeMillis() - time_s));
    int max_gain_factor = 4;
    /*if( iso <= 150 ) {
			max_gain_factor = 4;
		}*/
    int median_target = getMedianTarget(median_brightness, max_gain_factor);
    // int max_target = Math.min(255, (int)((max_brightness*median_target)/(float)median_brightness + 0.5f) );
    if (MyDebug.LOG) {
        Log.d(TAG, "median brightness: " + median_brightness);
        Log.d(TAG, "max brightness: " + max_brightness);
        Log.d(TAG, "median target: " + median_target);
    // Log.d(TAG, "max target: " + max_target);
    }
    /* We use a combination of gain and gamma to brighten images if required. Gain works best for
		 * dark images (e.g., see testAvg8), gamma works better for bright images (e.g., testAvg12).
		 */
    float gain = median_target / (float) median_brightness;
    if (MyDebug.LOG)
        Log.d(TAG, "gain " + gain);
    if (gain < 1.0f) {
        gain = 1.0f;
        if (MyDebug.LOG) {
            Log.d(TAG, "clamped gain to: " + gain);
        }
    }
    float gamma = 1.0f;
    float max_possible_value = gain * max_brightness;
    if (MyDebug.LOG)
        Log.d(TAG, "max_possible_value: " + max_possible_value);
    if (max_possible_value > 255.0f) {
        gain = 255.0f / max_brightness;
        if (MyDebug.LOG)
            Log.d(TAG, "limit gain to: " + gain);
        // use gamma correction for the remainder
        if (median_target > gain * median_brightness) {
            gamma = (float) (Math.log(median_target / 255.0f) / Math.log(gain * median_brightness / 255.0f));
        }
    }
    // float gamma = (float)(Math.log(median_target/255.0f) / Math.log(median_brightness/255.0f));
    if (MyDebug.LOG)
        Log.d(TAG, "gamma " + gamma);
    final float min_gamma_non_bright_c = 0.75f;
    if (gamma > 1.0f) {
        gamma = 1.0f;
        if (MyDebug.LOG) {
            Log.d(TAG, "clamped gamma to : " + gamma);
        }
    } else if (iso > 150 && gamma < min_gamma_non_bright_c) {
        // too small gamma on non-bright reduces contrast too much (e.g., see testAvg9)
        gamma = min_gamma_non_bright_c;
        if (MyDebug.LOG) {
            Log.d(TAG, "clamped gamma to : " + gamma);
        }
    }
    // float gain = median_target / (float)median_brightness;
    /*float gamma = (float)(Math.log(max_target/(float)median_target) / Math.log(max_brightness/(float)median_brightness));
		float gain = median_target / ((float)Math.pow(median_brightness/255.0f, gamma) * 255.0f);
		if( MyDebug.LOG ) {
			Log.d(TAG, "gamma " + gamma);
			Log.d(TAG, "gain " + gain);
			Log.d(TAG, "gain2 " + max_target / ((float)Math.pow(max_brightness/255.0f, gamma) * 255.0f));
		}*/
    /*float gain = median_target / (float)median_brightness;
		if( MyDebug.LOG ) {
			Log.d(TAG, "gain: " + gain);
		}
		if( gain < 1.0f ) {
			gain = 1.0f;
			if( MyDebug.LOG ) {
				Log.d(TAG, "clamped gain to : " + gain);
			}
		}*/
    ScriptC_avg_brighten script = new ScriptC_avg_brighten(rs);
    script.set_bitmap(input);
    float black_level = 0.0f;
    if (iso >= 700) {
        black_level = 4.0f;
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "black_level: " + black_level);
    }
    script.invoke_setBlackLevel(black_level);
    script.set_gamma(gamma);
    script.set_gain(gain);
    /*float tonemap_scale_c = 255.0f;
		if( MyDebug.LOG )
			Log.d(TAG, "tonemap_scale_c: " + tonemap_scale_c);
		script.set_tonemap_scale(tonemap_scale_c);

		float max_possible_value = gain*max_brightness;
		if( MyDebug.LOG )
			Log.d(TAG, "max_possible_value: " + max_possible_value);
		if( max_possible_value < 255.0f ) {
			max_possible_value = 255.0f; // don't make dark images too bright
			if( MyDebug.LOG )
				Log.d(TAG, "clamp max_possible_value to: " + max_possible_value);
		}
		float linear_scale = (max_possible_value + tonemap_scale_c) / max_possible_value;
		if( MyDebug.LOG )
			Log.d(TAG, "linear_scale: " + linear_scale);
		script.set_linear_scale(linear_scale);*/
    Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    Allocation allocation_out = Allocation.createFromBitmap(rs, bitmap);
    if (MyDebug.LOG)
        Log.d(TAG, "### time after creating allocation_out: " + (System.currentTimeMillis() - time_s));
    script.forEach_avg_brighten_f(input, allocation_out);
    if (MyDebug.LOG)
        Log.d(TAG, "### time after avg_brighten: " + (System.currentTimeMillis() - time_s));
    if (iso <= 150) {
        // for bright scenes, local contrast enhancement helps improve the quality of images (especially where we may have both
        // dark and bright regions, e.g., testAvg12); but for dark scenes, it just blows up the noise too much
        adjustHistogram(allocation_out, allocation_out, width, height, 0.5f, 4, time_s);
        if (MyDebug.LOG)
            Log.d(TAG, "### time after adjustHistogram: " + (System.currentTimeMillis() - time_s));
    }
    allocation_out.copyTo(bitmap);
    if (MyDebug.LOG)
        Log.d(TAG, "### total time for avgBrighten: " + (System.currentTimeMillis() - time_s));
    return bitmap;
}
Also used : Bitmap(android.graphics.Bitmap) Allocation(android.renderscript.Allocation) RequiresApi(android.support.annotation.RequiresApi)

Example 50 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method processAvg.

/**
 * Combines two images by averaging them. Each pixel of bitmap_avg is modified to contain:
 *      (avg_factor * bitmap_avg + bitmap_new)/(avg_factor+1)
 *  A simple average is therefore obtained by calling this function with avg_factor = 1.0f.
 *  For averaging multiple images, call this function repeatedly, with avg_factor increasing by
 *  1.0 each time.
 *  The reason we do it this way (rather than just receiving a list of bitmaps) is so that we
 *  can average multiple images without having to keep them all in memory simultaneously.
 * @param bitmap_avg     One of the input images; the result is written to this bitmap.
 * @param bitmap_new     The other input image.
 * @param avg_factor     The weighting factor for bitmap_avg.
 * @param release_bitmap If true, bitmap_new will be recycled.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public Allocation processAvg(Bitmap bitmap_avg, Bitmap bitmap_new, float avg_factor, int iso, boolean release_bitmap) throws HDRProcessorException {
    if (MyDebug.LOG) {
        Log.d(TAG, "processAvg");
        Log.d(TAG, "avg_factor: " + avg_factor);
    }
    if (bitmap_avg.getWidth() != bitmap_new.getWidth() || bitmap_avg.getHeight() != bitmap_new.getHeight()) {
        if (MyDebug.LOG) {
            Log.e(TAG, "bitmaps not of same resolution");
        }
        throw new HDRProcessorException(HDRProcessorException.UNEQUAL_SIZES);
    }
    long time_s = System.currentTimeMillis();
    int width = bitmap_avg.getWidth();
    int height = bitmap_avg.getHeight();
    initRenderscript();
    if (MyDebug.LOG)
        Log.d(TAG, "### time after creating renderscript: " + (System.currentTimeMillis() - time_s));
    // create allocations
    Allocation allocation_avg = Allocation.createFromBitmap(rs, bitmap_avg);
    Allocation allocation_new = Allocation.createFromBitmap(rs, bitmap_new);
    Allocation allocation_out = Allocation.createTyped(rs, Type.createXY(rs, Element.F32_3(rs), width, height));
    if (MyDebug.LOG)
        Log.d(TAG, "### time after creating allocations from bitmaps: " + (System.currentTimeMillis() - time_s));
    float sharpness_avg = computeSharpness(allocation_avg, width, time_s);
    float sharpness_new = computeSharpness(allocation_new, width, time_s);
    if (sharpness_new > sharpness_avg) {
        if (MyDebug.LOG)
            Log.d(TAG, "use new image as reference");
        Allocation dummy_allocation = allocation_avg;
        allocation_avg = allocation_new;
        allocation_new = dummy_allocation;
        Bitmap dummy_bitmap = bitmap_avg;
        bitmap_avg = bitmap_new;
        bitmap_new = dummy_bitmap;
        sharp_index = 1;
    } else {
        sharp_index = 0;
    }
    if (MyDebug.LOG)
        Log.d(TAG, "sharp_index: " + sharp_index);
    /*LuminanceInfo luminanceInfo = computeMedianLuminance(bitmap_avg, 0, 0, width, height);
		if( MyDebug.LOG )
			Log.d(TAG, "median: " + luminanceInfo.median_value);*/
    processAvgCore(allocation_out, allocation_avg, allocation_new, width, height, avg_factor, iso, true);
    if (release_bitmap) {
        if (MyDebug.LOG)
            Log.d(TAG, "release bitmaps");
        bitmap_avg.recycle();
        bitmap_new.recycle();
    }
    if (MyDebug.LOG)
        Log.d(TAG, "### time for processAvg: " + (System.currentTimeMillis() - time_s));
    return allocation_out;
}
Also used : Bitmap(android.graphics.Bitmap) Allocation(android.renderscript.Allocation) RequiresApi(android.support.annotation.RequiresApi)

Aggregations

Allocation (android.renderscript.Allocation)71 Bitmap (android.graphics.Bitmap)42 RenderScript (android.renderscript.RenderScript)42 ScriptIntrinsicBlur (android.renderscript.ScriptIntrinsicBlur)41 SuppressLint (android.annotation.SuppressLint)14 RequiresApi (android.support.annotation.RequiresApi)14 TargetApi (android.annotation.TargetApi)10 Type (android.renderscript.Type)6 SdkConstantType (android.annotation.SdkConstant.SdkConstantType)5 Paint (android.graphics.Paint)4 BitmapDrawable (android.graphics.drawable.BitmapDrawable)4 BitmapFactory (android.graphics.BitmapFactory)3 ByteArrayInputStream (java.io.ByteArrayInputStream)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3 Canvas (android.graphics.Canvas)2 Point (android.graphics.Point)2 Element (android.renderscript.Element)2 Script (android.renderscript.Script)2 RequiresApi (androidx.annotation.RequiresApi)2 ArrayList (java.util.ArrayList)2