Search in sources :

Example 16 with Allocation

use of android.renderscript.Allocation in project RxTools by vondear.

the class RxImageTool method renderScriptBlur.

/**
 * renderScript模糊图片
 * <p>API大于17</p>
 *
 * @param src     源图片
 * @param radius  模糊度(0...25)
 * @return 模糊后的图片
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static Bitmap renderScriptBlur(Bitmap src, float radius) {
    if (isEmptyBitmap(src))
        return null;
    RenderScript rs = null;
    try {
        rs = RenderScript.create(RxTool.getContext());
        rs.setMessageHandler(new RenderScript.RSMessageHandler());
        Allocation input = Allocation.createFromBitmap(rs, src, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);
        Allocation output = Allocation.createTyped(rs, input.getType());
        ScriptIntrinsicBlur blurScript = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs));
        if (radius > 25) {
            radius = 25.0f;
        } else if (radius <= 0) {
            radius = 1.0f;
        }
        blurScript.setInput(input);
        blurScript.setRadius(radius);
        blurScript.forEach(output);
        output.copyTo(src);
    } finally {
        if (rs != null) {
            rs.destroy();
        }
    }
    return src;
}
Also used : RenderScript(android.renderscript.RenderScript) Allocation(android.renderscript.Allocation) ScriptIntrinsicBlur(android.renderscript.ScriptIntrinsicBlur) TargetApi(android.annotation.TargetApi)

Example 17 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class ImageSaver method saveImageNow.

/**
 * May be run in saver thread or picture callback thread (depending on whether running in background).
 */
private boolean saveImageNow(final Request request) {
    if (MyDebug.LOG)
        Log.d(TAG, "saveImageNow");
    if (request.type != Request.Type.JPEG) {
        if (MyDebug.LOG)
            Log.d(TAG, "saveImageNow called with non-jpeg request");
        // throw runtime exception, as this is a programming error
        throw new RuntimeException();
    } else if (request.jpeg_images.size() == 0) {
        if (MyDebug.LOG)
            Log.d(TAG, "saveImageNow called with zero images");
        // throw runtime exception, as this is a programming error
        throw new RuntimeException();
    }
    boolean success;
    if (request.process_type == Request.ProcessType.AVERAGE) {
        if (MyDebug.LOG)
            Log.d(TAG, "average");
        saveBaseImages(request, "_");
        main_activity.savingImage(true);
        /*List<Bitmap> bitmaps = loadBitmaps(request.jpeg_images, 0);
			if (bitmaps == null) {
				if (MyDebug.LOG)
					Log.e(TAG, "failed to load bitmaps");
				main_activity.savingImage(false);
				return false;
			}*/
        /*Bitmap nr_bitmap = loadBitmap(request.jpeg_images.get(0), true);

			if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP ) {
				try {
					for(int i = 1; i < request.jpeg_images.size(); i++) {
						Log.d(TAG, "processAvg for image: " + i);
						Bitmap new_bitmap = loadBitmap(request.jpeg_images.get(i), false);
						float avg_factor = (float) i;
						hdrProcessor.processAvg(nr_bitmap, new_bitmap, avg_factor, true);
						// processAvg recycles new_bitmap
					}
					//hdrProcessor.processAvgMulti(bitmaps, hdr_strength, 4);
					//hdrProcessor.avgBrighten(nr_bitmap);
				}
				catch(HDRProcessorException e) {
					e.printStackTrace();
					throw new RuntimeException();
				}
			}
			else {
				Log.e(TAG, "shouldn't have offered NoiseReduction as an option if not on Android 5");
				throw new RuntimeException();
			}*/
        Bitmap nr_bitmap;
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            try {
                // initialise allocation from first two bitmaps
                Bitmap bitmap0 = loadBitmap(request.jpeg_images.get(0), false);
                Bitmap bitmap1 = loadBitmap(request.jpeg_images.get(1), false);
                int width = bitmap0.getWidth();
                int height = bitmap0.getHeight();
                float avg_factor = 1.0f;
                int iso = 800;
                if (main_activity.getPreview().getCameraController() != null) {
                    if (main_activity.getPreview().getCameraController().captureResultHasIso()) {
                        iso = main_activity.getPreview().getCameraController().captureResultIso();
                        if (MyDebug.LOG)
                            Log.d(TAG, "iso: " + iso);
                    }
                }
                Allocation allocation = hdrProcessor.processAvg(bitmap0, bitmap1, avg_factor, iso, true);
                for (int i = 2; i < request.jpeg_images.size(); i++) {
                    if (MyDebug.LOG)
                        Log.d(TAG, "processAvg for image: " + i);
                    Bitmap new_bitmap = loadBitmap(request.jpeg_images.get(i), false);
                    avg_factor = (float) i;
                    hdrProcessor.updateAvg(allocation, width, height, new_bitmap, avg_factor, iso, true);
                // updateAvg recycles new_bitmap
                }
                nr_bitmap = hdrProcessor.avgBrighten(allocation, width, height, iso);
            } catch (HDRProcessorException e) {
                e.printStackTrace();
                throw new RuntimeException();
            }
        } else {
            Log.e(TAG, "shouldn't have offered NoiseReduction as an option if not on Android 5");
            throw new RuntimeException();
        }
        if (MyDebug.LOG)
            Log.d(TAG, "nr_bitmap: " + nr_bitmap + " is mutable? " + nr_bitmap.isMutable());
        System.gc();
        main_activity.savingImage(false);
        if (MyDebug.LOG)
            Log.d(TAG, "save NR image");
        String suffix = "_NR";
        success = saveSingleImageNow(request, request.jpeg_images.get(0), nr_bitmap, suffix, true, true);
        if (MyDebug.LOG && !success)
            Log.e(TAG, "saveSingleImageNow failed for nr image");
        nr_bitmap.recycle();
        System.gc();
    } else if (request.process_type == Request.ProcessType.HDR) {
        if (MyDebug.LOG)
            Log.d(TAG, "hdr");
        if (request.jpeg_images.size() != 1 && request.jpeg_images.size() != 3) {
            if (MyDebug.LOG)
                Log.d(TAG, "saveImageNow expected either 1 or 3 images for hdr, not " + request.jpeg_images.size());
            // throw runtime exception, as this is a programming error
            throw new RuntimeException();
        }
        long time_s = System.currentTimeMillis();
        if (request.jpeg_images.size() > 1) {
            // if there's only 1 image, we're in DRO mode, and shouldn't save the base image
            // note that in earlier Open Camera versions, we used "_EXP" as the suffix. We now use just "_" from 1.42 onwards, so Google
            // Photos will group them together. (Unfortunately using "_EXP_" doesn't work, the images aren't grouped!)
            saveBaseImages(request, "_");
            if (MyDebug.LOG) {
                Log.d(TAG, "HDR performance: time after saving base exposures: " + (System.currentTimeMillis() - time_s));
            }
        }
        // note, even if we failed saving some of the expo images, still try to save the HDR image
        if (MyDebug.LOG)
            Log.d(TAG, "create HDR image");
        main_activity.savingImage(true);
        // see documentation for HDRProcessor.processHDR() - because we're using release_bitmaps==true, we need to make sure that
        // the bitmap that will hold the output HDR image is mutable (in case of options like photo stamp)
        // see test testTakePhotoHDRPhotoStamp.
        int base_bitmap = (request.jpeg_images.size() - 1) / 2;
        if (MyDebug.LOG)
            Log.d(TAG, "base_bitmap: " + base_bitmap);
        List<Bitmap> bitmaps = loadBitmaps(request.jpeg_images, base_bitmap);
        if (bitmaps == null) {
            if (MyDebug.LOG)
                Log.e(TAG, "failed to load bitmaps");
            main_activity.savingImage(false);
            return false;
        }
        if (MyDebug.LOG) {
            Log.d(TAG, "HDR performance: time after decompressing base exposures: " + (System.currentTimeMillis() - time_s));
        }
        if (MyDebug.LOG)
            Log.d(TAG, "before HDR first bitmap: " + bitmaps.get(0) + " is mutable? " + bitmaps.get(0).isMutable());
        try {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
                // this will recycle all the bitmaps except bitmaps.get(0), which will contain the hdr image
                hdrProcessor.processHDR(bitmaps, true, null, true, null, 0.5f, 4, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD);
            } else {
                Log.e(TAG, "shouldn't have offered HDR as an option if not on Android 5");
                throw new RuntimeException();
            }
        } catch (HDRProcessorException e) {
            Log.e(TAG, "HDRProcessorException from processHDR: " + e.getCode());
            e.printStackTrace();
            if (e.getCode() == HDRProcessorException.UNEQUAL_SIZES) {
                // this can happen on OnePlus 3T with old camera API with front camera, seems to be a bug that resolution changes when exposure compensation is set!
                main_activity.getPreview().showToast(null, R.string.failed_to_process_hdr);
                Log.e(TAG, "UNEQUAL_SIZES");
                bitmaps.clear();
                System.gc();
                main_activity.savingImage(false);
                return false;
            } else {
                // throw RuntimeException, as we shouldn't ever get the error INVALID_N_IMAGES, if we do it's a programming error
                throw new RuntimeException();
            }
        }
        if (MyDebug.LOG) {
            Log.d(TAG, "HDR performance: time after creating HDR image: " + (System.currentTimeMillis() - time_s));
        }
        if (MyDebug.LOG)
            Log.d(TAG, "after HDR first bitmap: " + bitmaps.get(0) + " is mutable? " + bitmaps.get(0).isMutable());
        Bitmap hdr_bitmap = bitmaps.get(0);
        if (MyDebug.LOG)
            Log.d(TAG, "hdr_bitmap: " + hdr_bitmap + " is mutable? " + hdr_bitmap.isMutable());
        bitmaps.clear();
        System.gc();
        main_activity.savingImage(false);
        if (MyDebug.LOG)
            Log.d(TAG, "save HDR image");
        int base_image_id = ((request.jpeg_images.size() - 1) / 2);
        if (MyDebug.LOG)
            Log.d(TAG, "base_image_id: " + base_image_id);
        String suffix = request.jpeg_images.size() == 1 ? "_DRO" : "_HDR";
        success = saveSingleImageNow(request, request.jpeg_images.get(base_image_id), hdr_bitmap, suffix, true, true);
        if (MyDebug.LOG && !success)
            Log.e(TAG, "saveSingleImageNow failed for hdr image");
        if (MyDebug.LOG) {
            Log.d(TAG, "HDR performance: time after saving HDR image: " + (System.currentTimeMillis() - time_s));
        }
        hdr_bitmap.recycle();
        System.gc();
    } else {
        // see note above how we used to use "_EXP" for the suffix for multiple images
        // String suffix = "_EXP";
        String suffix = "_";
        success = saveImages(request, suffix, false, true, true);
    }
    return success;
}
Also used : Bitmap(android.graphics.Bitmap) Allocation(android.renderscript.Allocation) ArrayList(java.util.ArrayList) List(java.util.List) SuppressLint(android.annotation.SuppressLint) Paint(android.graphics.Paint)

Example 18 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method adjustHistogram.

@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void adjustHistogram(Allocation allocation_in, Allocation allocation_out, int width, int height, float hdr_alpha, int n_tiles, long time_s) {
    if (MyDebug.LOG)
        Log.d(TAG, "adjustHistogram");
    final boolean adjust_histogram = false;
    if (adjust_histogram) {
        // create histogram
        int[] histogram = new int[256];
        if (MyDebug.LOG)
            Log.d(TAG, "time before creating histogram: " + (System.currentTimeMillis() - time_s));
        Allocation histogramAllocation = computeHistogramAllocation(allocation_in, false, false, time_s);
        if (MyDebug.LOG)
            Log.d(TAG, "time after creating histogram: " + (System.currentTimeMillis() - time_s));
        histogramAllocation.copyTo(histogram);
        /*if( MyDebug.LOG ) {
					// compare/adjust
					allocations[0].copyTo(bm);
					int [] debug_histogram = new int[256];
					for(int i=0;i<256;i++) {
						debug_histogram[i] = 0;
					}
					int [] debug_buffer = new int[width];
					for(int y=0;y<height;y++) {
						bm.getPixels(debug_buffer, 0, width, 0, y, width, 1);
						for(int x=0;x<width;x++) {
							int color = debug_buffer[x];
							float r = (float)((color & 0xFF0000) >> 16);
							float g = (float)((color & 0xFF00) >> 8);
							float b = (float)(color & 0xFF);
							//float value = 0.299f*r + 0.587f*g + 0.114f*b; // matches ScriptIntrinsicHistogram default behaviour
							float value = Math.max(r, g);
							value = Math.max(value, b);
							int i_value = (int)value;
							i_value = Math.min(255, i_value); // just in case
							debug_histogram[i_value]++;
						}
					}
					for(int x=0;x<256;x++) {
						Log.d(TAG, "histogram[" + x + "] = " + histogram[x] + " debug_histogram: " + debug_histogram[x]);
						//histogram[x] = debug_histogram[x];
					}
				}*/
        int[] c_histogram = new int[256];
        c_histogram[0] = histogram[0];
        for (int x = 1; x < 256; x++) {
            c_histogram[x] = c_histogram[x - 1] + histogram[x];
        }
        /*if( MyDebug.LOG ) {
					for(int x=0;x<256;x++) {
						Log.d(TAG, "histogram[" + x + "] = " + histogram[x] + " cumulative: " + c_histogram[x]);
					}
				}*/
        histogramAllocation.copyFrom(c_histogram);
        ScriptC_histogram_adjust histogramAdjustScript = new ScriptC_histogram_adjust(rs);
        histogramAdjustScript.set_c_histogram(histogramAllocation);
        histogramAdjustScript.set_hdr_alpha(hdr_alpha);
        if (MyDebug.LOG)
            Log.d(TAG, "call histogramAdjustScript");
        histogramAdjustScript.forEach_histogram_adjust(allocation_in, allocation_out);
        if (MyDebug.LOG)
            Log.d(TAG, "time after histogramAdjustScript: " + (System.currentTimeMillis() - time_s));
    }
    // final boolean adjust_histogram_local = false;
    final boolean adjust_histogram_local = true;
    if (adjust_histogram_local) {
        // Contrast Limited Adaptive Histogram Equalisation
        // Note we don't fully equalise the histogram, rather the resultant image is the mid-point of the non-equalised and fully-equalised images
        // See https://en.wikipedia.org/wiki/Adaptive_histogram_equalization#Contrast_Limited_AHE
        // Also see "Adaptive Histogram Equalization and its Variations" ( http://www.cs.unc.edu/Research/MIDAG/pubs/papers/Adaptive%20Histogram%20Equalization%20and%20Its%20Variations.pdf ),
        // Pizer, Amburn, Austin, Cromartie, Geselowitz, Greer, ter Haar Romeny, Zimmerman, Zuiderveld (1987).
        // create histograms
        Allocation histogramAllocation = Allocation.createSized(rs, Element.I32(rs), 256);
        if (MyDebug.LOG)
            Log.d(TAG, "create histogramScript");
        ScriptC_histogram_compute histogramScript = new ScriptC_histogram_compute(rs);
        if (MyDebug.LOG)
            Log.d(TAG, "bind histogram allocation");
        histogramScript.bind_histogram(histogramAllocation);
        // final int n_tiles_c = 8;
        // final int n_tiles_c = 4;
        // final int n_tiles_c = 1;
        int[] c_histogram = new int[n_tiles * n_tiles * 256];
        for (int i = 0; i < n_tiles; i++) {
            double a0 = ((double) i) / (double) n_tiles;
            double a1 = ((double) i + 1.0) / (double) n_tiles;
            int start_x = (int) (a0 * width);
            int stop_x = (int) (a1 * width);
            if (stop_x == start_x)
                continue;
            for (int j = 0; j < n_tiles; j++) {
                double b0 = ((double) j) / (double) n_tiles;
                double b1 = ((double) j + 1.0) / (double) n_tiles;
                int start_y = (int) (b0 * height);
                int stop_y = (int) (b1 * height);
                if (stop_y == start_y)
                    continue;
                /*if( MyDebug.LOG )
							Log.d(TAG, i + " , " + j + " : " + start_x + " , " + start_y + " to " + stop_x + " , " + stop_y);*/
                Script.LaunchOptions launch_options = new Script.LaunchOptions();
                launch_options.setX(start_x, stop_x);
                launch_options.setY(start_y, stop_y);
                /*if( MyDebug.LOG )
							Log.d(TAG, "call histogramScript");*/
                histogramScript.invoke_init_histogram();
                histogramScript.forEach_histogram_compute(allocation_in, launch_options);
                int[] histogram = new int[256];
                histogramAllocation.copyTo(histogram);
                /*if( MyDebug.LOG ) {
							// compare/adjust
							allocations[0].copyTo(bm);
							int [] debug_histogram = new int[256];
							for(int k=0;k<256;k++) {
								debug_histogram[k] = 0;
							}
							int [] debug_buffer = new int[width];
							for(int y=start_y;y<stop_y;y++) {
								bm.getPixels(debug_buffer, 0, width, 0, y, width, 1);
								for(int x=start_x;x<stop_x;x++) {
									int color = debug_buffer[x];
									float r = (float)((color & 0xFF0000) >> 16);
									float g = (float)((color & 0xFF00) >> 8);
									float b = (float)(color & 0xFF);
									//float value = 0.299f*r + 0.587f*g + 0.114f*b; // matches ScriptIntrinsicHistogram default behaviour
									float value = Math.max(r, g);
									value = Math.max(value, b);
									int i_value = (int)value;
									i_value = Math.min(255, i_value); // just in case
									debug_histogram[i_value]++;
								}
							}
							for(int x=0;x<256;x++) {
								Log.d(TAG, "histogram[" + x + "] = " + histogram[x] + " debug_histogram: " + debug_histogram[x]);
								//histogram[x] = debug_histogram[x];
							}
						}*/
                // clip histogram, for Contrast Limited AHE algorithm
                int n_pixels = (stop_x - start_x) * (stop_y - start_y);
                int clip_limit = (5 * n_pixels) / 256;
                /*if( MyDebug.LOG )
							Log.d(TAG, "clip_limit: " + clip_limit);*/
                {
                    // find real clip limit
                    int bottom = 0, top = clip_limit;
                    while (top - bottom > 1) {
                        int middle = (top + bottom) / 2;
                        int sum = 0;
                        for (int x = 0; x < 256; x++) {
                            if (histogram[x] > middle) {
                                sum += (histogram[x] - clip_limit);
                            }
                        }
                        if (sum > (clip_limit - middle) * 256)
                            top = middle;
                        else
                            bottom = middle;
                    }
                    clip_limit = (top + bottom) / 2;
                /*if( MyDebug.LOG )
								Log.d(TAG, "updated clip_limit: " + clip_limit);*/
                }
                int n_clipped = 0;
                for (int x = 0; x < 256; x++) {
                    if (histogram[x] > clip_limit) {
                        n_clipped += (histogram[x] - clip_limit);
                        histogram[x] = clip_limit;
                    }
                }
                int n_clipped_per_bucket = n_clipped / 256;
                /*if( MyDebug.LOG ) {
							Log.d(TAG, "n_clipped: " + n_clipped);
							Log.d(TAG, "n_clipped_per_bucket: " + n_clipped_per_bucket);
						}*/
                for (int x = 0; x < 256; x++) {
                    histogram[x] += n_clipped_per_bucket;
                }
                int histogram_offset = 256 * (i * n_tiles + j);
                c_histogram[histogram_offset] = histogram[0];
                for (int x = 1; x < 256; x++) {
                    c_histogram[histogram_offset + x] = c_histogram[histogram_offset + x - 1] + histogram[x];
                }
            /*if( MyDebug.LOG ) {
							for(int x=0;x<256;x++) {
								Log.d(TAG, "histogram[" + x + "] = " + histogram[x] + " cumulative: " + c_histogram[histogram_offset+x]);
							}
						}*/
            }
        }
        if (MyDebug.LOG)
            Log.d(TAG, "time after creating histograms: " + (System.currentTimeMillis() - time_s));
        Allocation c_histogramAllocation = Allocation.createSized(rs, Element.I32(rs), n_tiles * n_tiles * 256);
        c_histogramAllocation.copyFrom(c_histogram);
        ScriptC_histogram_adjust histogramAdjustScript = new ScriptC_histogram_adjust(rs);
        histogramAdjustScript.set_c_histogram(c_histogramAllocation);
        histogramAdjustScript.set_hdr_alpha(hdr_alpha);
        histogramAdjustScript.set_n_tiles(n_tiles);
        histogramAdjustScript.set_width(width);
        histogramAdjustScript.set_height(height);
        if (MyDebug.LOG)
            Log.d(TAG, "call histogramAdjustScript");
        histogramAdjustScript.forEach_histogram_adjust(allocation_in, allocation_out);
        if (MyDebug.LOG)
            Log.d(TAG, "time after histogramAdjustScript: " + (System.currentTimeMillis() - time_s));
    }
}
Also used : Script(android.renderscript.Script) RenderScript(android.renderscript.RenderScript) Allocation(android.renderscript.Allocation) RequiresApi(android.support.annotation.RequiresApi)

Example 19 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method processAvgMulti.

/**
 * Combines multiple images by averaging them.
 * @param bitmaps Input bitmaps. The resultant bitmap will be stored as the first bitmap on exit,
 *                the other input bitmaps will be recycled.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public void processAvgMulti(List<Bitmap> bitmaps, float hdr_alpha, int n_tiles) throws HDRProcessorException {
    if (MyDebug.LOG) {
        Log.d(TAG, "processAvgMulti");
        Log.d(TAG, "hdr_alpha: " + hdr_alpha);
    }
    int n_bitmaps = bitmaps.size();
    if (n_bitmaps != 8) {
        if (MyDebug.LOG)
            Log.e(TAG, "n_bitmaps should be 8, not " + n_bitmaps);
        throw new HDRProcessorException(HDRProcessorException.INVALID_N_IMAGES);
    }
    for (int i = 1; i < n_bitmaps; i++) {
        if (bitmaps.get(i).getWidth() != bitmaps.get(0).getWidth() || bitmaps.get(i).getHeight() != bitmaps.get(0).getHeight()) {
            if (MyDebug.LOG) {
                Log.e(TAG, "bitmaps not of same resolution");
                for (int j = 0; j < n_bitmaps; j++) {
                    Log.e(TAG, "bitmaps " + j + " : " + bitmaps.get(j).getWidth() + " x " + bitmaps.get(j).getHeight());
                }
            }
            throw new HDRProcessorException(HDRProcessorException.UNEQUAL_SIZES);
        }
    }
    long time_s = System.currentTimeMillis();
    int width = bitmaps.get(0).getWidth();
    int height = bitmaps.get(0).getHeight();
    initRenderscript();
    if (MyDebug.LOG)
        Log.d(TAG, "### time after creating renderscript: " + (System.currentTimeMillis() - time_s));
    // create allocations
    Allocation allocation0 = Allocation.createFromBitmap(rs, bitmaps.get(0));
    Allocation allocation1 = Allocation.createFromBitmap(rs, bitmaps.get(1));
    Allocation allocation2 = Allocation.createFromBitmap(rs, bitmaps.get(2));
    Allocation allocation3 = Allocation.createFromBitmap(rs, bitmaps.get(3));
    Allocation allocation4 = Allocation.createFromBitmap(rs, bitmaps.get(4));
    Allocation allocation5 = Allocation.createFromBitmap(rs, bitmaps.get(5));
    Allocation allocation6 = Allocation.createFromBitmap(rs, bitmaps.get(6));
    Allocation allocation7 = Allocation.createFromBitmap(rs, bitmaps.get(7));
    if (MyDebug.LOG)
        Log.d(TAG, "### time after creating allocations from bitmaps: " + (System.currentTimeMillis() - time_s));
    // perform auto-alignment
    /*for(int i=1;i<bitmaps.size();i++) {
		{
			List<Bitmap> bitmaps2 = new ArrayList<>();
			bitmaps2.add(bitmaps.get(0));
			bitmaps2.add(bitmap.get(i));
			Allocation [] allocations = new Allocation[2];
			allocations[0] = allocation_avg;
			allocations[1] = allocation_new;
			BrightnessDetails brightnessDetails = autoAlignment(offsets_x, offsets_y, allocations, width, height, bitmaps, 0, true, null, true, time_s);
			int median_brightness = brightnessDetails.median_brightness;
			if( MyDebug.LOG ) {
				Log.d(TAG, "### time after autoAlignment: " + (System.currentTimeMillis() - time_s));
				Log.d(TAG, "median_brightness: " + median_brightness);
			}
		}*/
    // write new avg image
    // create RenderScript
    ScriptC_process_avg processAvgScript = new ScriptC_process_avg(rs);
    // set allocations
    processAvgScript.set_bitmap1(allocation1);
    processAvgScript.set_bitmap2(allocation2);
    processAvgScript.set_bitmap3(allocation3);
    processAvgScript.set_bitmap4(allocation4);
    processAvgScript.set_bitmap5(allocation5);
    processAvgScript.set_bitmap6(allocation6);
    processAvgScript.set_bitmap7(allocation7);
    if (MyDebug.LOG)
        Log.d(TAG, "call processAvgScript");
    if (MyDebug.LOG)
        Log.d(TAG, "### time before processAvgScript: " + (System.currentTimeMillis() - time_s));
    processAvgScript.forEach_avg_multi(allocation0, allocation0);
    if (MyDebug.LOG)
        Log.d(TAG, "### time after processAvgScript: " + (System.currentTimeMillis() - time_s));
    {
        if (MyDebug.LOG)
            Log.d(TAG, "release bitmaps");
        for (int i = 1; i < bitmaps.size(); i++) {
            bitmaps.get(i).recycle();
        }
    }
    if (hdr_alpha != 0.0f) {
        adjustHistogram(allocation0, allocation0, width, height, hdr_alpha, n_tiles, time_s);
        if (MyDebug.LOG)
            Log.d(TAG, "### time after adjustHistogram: " + (System.currentTimeMillis() - time_s));
    }
    allocation0.copyTo(bitmaps.get(0));
    if (MyDebug.LOG)
        Log.d(TAG, "### time for processAvgMulti: " + (System.currentTimeMillis() - time_s));
}
Also used : Allocation(android.renderscript.Allocation) RequiresApi(android.support.annotation.RequiresApi)

Example 20 with Allocation

use of android.renderscript.Allocation in project OpenCamera by ageback.

the class HDRProcessor method computeSharpness.

/**
 * Computes a value for how sharp the image is perceived to be. The higher the value, the
 * sharper the image.
 * @param allocation_in The input allocation.
 * @param width         The width of the allocation.
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private float computeSharpness(Allocation allocation_in, int width, long time_s) {
    if (MyDebug.LOG)
        Log.d(TAG, "computeSharpness");
    Allocation sumsAllocation = Allocation.createSized(rs, Element.I32(rs), width);
    ScriptC_calculate_sharpness sharpnessScript = new ScriptC_calculate_sharpness(rs);
    if (MyDebug.LOG)
        Log.d(TAG, "bind sums allocation");
    sharpnessScript.bind_sums(sumsAllocation);
    sharpnessScript.set_bitmap(allocation_in);
    sharpnessScript.set_width(width);
    sharpnessScript.invoke_init_sums();
    if (MyDebug.LOG)
        Log.d(TAG, "call sharpnessScript");
    if (MyDebug.LOG)
        Log.d(TAG, "time before sharpnessScript: " + (System.currentTimeMillis() - time_s));
    sharpnessScript.forEach_calculate_sharpness(allocation_in);
    if (MyDebug.LOG)
        Log.d(TAG, "time after sharpnessScript: " + (System.currentTimeMillis() - time_s));
    int[] sums = new int[width];
    sumsAllocation.copyTo(sums);
    float total_sum = 0.0f;
    for (int i = 0; i < width; i++) {
        /*if( MyDebug.LOG )
				Log.d(TAG, "sums[" + i + "] = " + sums[i]);*/
        total_sum += (float) sums[i];
    }
    if (MyDebug.LOG)
        Log.d(TAG, "total_sum: " + total_sum);
    return total_sum;
}
Also used : Allocation(android.renderscript.Allocation) RequiresApi(android.support.annotation.RequiresApi)

Aggregations

Allocation (android.renderscript.Allocation)71 Bitmap (android.graphics.Bitmap)42 RenderScript (android.renderscript.RenderScript)42 ScriptIntrinsicBlur (android.renderscript.ScriptIntrinsicBlur)41 SuppressLint (android.annotation.SuppressLint)14 RequiresApi (android.support.annotation.RequiresApi)14 TargetApi (android.annotation.TargetApi)10 Type (android.renderscript.Type)6 SdkConstantType (android.annotation.SdkConstant.SdkConstantType)5 Paint (android.graphics.Paint)4 BitmapDrawable (android.graphics.drawable.BitmapDrawable)4 BitmapFactory (android.graphics.BitmapFactory)3 ByteArrayInputStream (java.io.ByteArrayInputStream)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3 Canvas (android.graphics.Canvas)2 Point (android.graphics.Point)2 Element (android.renderscript.Element)2 Script (android.renderscript.Script)2 RequiresApi (androidx.annotation.RequiresApi)2 ArrayList (java.util.ArrayList)2