use of net.sourceforge.opencamera.HDRProcessorException in project OpenCamera by ageback.
the class MainActivityTest method testDROZero.
/**
* Tests calling the DRO routine with 0.0 factor, and DROALGORITHM_NONE - and that the resultant image is identical.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void testDROZero() throws IOException, InterruptedException {
Log.d(TAG, "testDROZero");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
Log.d(TAG, "renderscript requires Android Lollipop or better");
return;
}
setToDefault();
Bitmap bitmap = getBitmapFromFile(hdr_images_path + "testHDR3/input1.jpg");
Bitmap bitmap_saved = bitmap.copy(bitmap.getConfig(), false);
// wait for camera to open
Thread.sleep(1000);
List<Bitmap> inputs = new ArrayList<>();
inputs.add(bitmap);
try {
mActivity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.0f, 4, true, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_NONE);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
saveBitmap(inputs.get(0), "droZerotestHDR3_output.jpg");
checkHistogram(bitmap);
// check bitmaps are the same
Log.d(TAG, "compare bitmap " + bitmap);
Log.d(TAG, "with bitmap_saved " + bitmap_saved);
// sameAs doesn't seem to work
// assertTrue( bitmap.sameAs(bitmap_saved) );
assertTrue(bitmap.getWidth() == bitmap_saved.getWidth());
assertTrue(bitmap.getHeight() == bitmap_saved.getHeight());
int[] old_row = new int[bitmap.getWidth()];
int[] new_row = new int[bitmap.getWidth()];
for (int y = 0; y < bitmap.getHeight(); y++) {
// Log.d(TAG, "check row " + y + " / " + bitmap.getHeight());
bitmap_saved.getPixels(old_row, 0, bitmap.getWidth(), 0, y, bitmap.getWidth(), 1);
bitmap.getPixels(new_row, 0, bitmap.getWidth(), 0, y, bitmap.getWidth(), 1);
for (int x = 0; x < bitmap.getWidth(); x++) {
// int old_pixel = bitmap_saved.getPixel(x, y);
// int new_pixel = bitmap.getPixel(x, y);
int old_pixel = old_row[x];
int new_pixel = new_row[x];
assertTrue(old_pixel == new_pixel);
}
}
bitmap.recycle();
bitmap_saved.recycle();
Thread.sleep(500);
}
use of net.sourceforge.opencamera.HDRProcessorException in project OpenCamera by ageback.
the class MainActivityTest method subTestAvg.
/**
* The following testAvgX tests test the Avg noise reduction algorithm on a given set of input images.
* By testing on a fixed sample, this makes it easier to finetune the algorithm for quality and performance.
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device everytime we run the tests.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private HistogramDetails subTestAvg(List<String> inputs, String output_name, int iso, long exposure_time, float zoom_factor, TestAvgCallback cb) throws IOException, InterruptedException {
Log.d(TAG, "subTestAvg");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
Log.d(TAG, "renderscript requires Android Lollipop or better");
return null;
}
// wait for camera to open
Thread.sleep(1000);
/*Bitmap nr_bitmap = getBitmapFromFile(inputs.get(0));
long time_s = System.currentTimeMillis();
try {
for(int i=1;i<inputs.size();i++) {
Log.d(TAG, "processAvg for image: " + i);
Bitmap new_bitmap = getBitmapFromFile(inputs.get(i));
float avg_factor = (float)i;
mActivity.getApplicationInterface().getHDRProcessor().processAvg(nr_bitmap, new_bitmap, avg_factor, true);
// processAvg recycles new_bitmap
if( cb != null ) {
cb.doneProcessAvg(i);
}
//break; // test
}
//mActivity.getApplicationInterface().getHDRProcessor().processAvgMulti(inputs, hdr_strength, 4);
}
catch(HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
Log.d(TAG, "Avg time: " + (System.currentTimeMillis() - time_s));
{
mActivity.getApplicationInterface().getHDRProcessor().avgBrighten(nr_bitmap);
Log.d(TAG, "time after brighten: " + (System.currentTimeMillis() - time_s));
}*/
Bitmap nr_bitmap;
try {
// initialise allocation from first two bitmaps
// int inSampleSize = mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize(inputs.size());
int inSampleSize = mActivity.getApplicationInterface().getHDRProcessor().getAvgSampleSize(iso);
Bitmap bitmap0 = getBitmapFromFile(inputs.get(0), inSampleSize);
Bitmap bitmap1 = getBitmapFromFile(inputs.get(1), inSampleSize);
int width = bitmap0.getWidth();
int height = bitmap0.getHeight();
float avg_factor = 1.0f;
List<Long> times = new ArrayList<>();
long time_s = System.currentTimeMillis();
HDRProcessor.AvgData avg_data = mActivity.getApplicationInterface().getHDRProcessor().processAvg(bitmap0, bitmap1, avg_factor, iso, zoom_factor);
Allocation allocation = avg_data.allocation_out;
times.add(System.currentTimeMillis() - time_s);
// processAvg recycles both bitmaps
if (cb != null) {
cb.doneProcessAvg(1);
}
for (int i = 2; i < inputs.size(); i++) {
Log.d(TAG, "processAvg for image: " + i);
Bitmap new_bitmap = getBitmapFromFile(inputs.get(i), inSampleSize);
avg_factor = (float) i;
time_s = System.currentTimeMillis();
mActivity.getApplicationInterface().getHDRProcessor().updateAvg(avg_data, width, height, new_bitmap, avg_factor, iso, zoom_factor);
times.add(System.currentTimeMillis() - time_s);
// updateAvg recycles new_bitmap
if (cb != null) {
cb.doneProcessAvg(i);
}
}
time_s = System.currentTimeMillis();
nr_bitmap = mActivity.getApplicationInterface().getHDRProcessor().avgBrighten(allocation, width, height, iso, exposure_time);
avg_data.destroy();
avg_data = null;
times.add(System.currentTimeMillis() - time_s);
long total_time = 0;
Log.d(TAG, "*** times are:");
for (long time : times) {
total_time += time;
Log.d(TAG, " " + time);
}
Log.d(TAG, " total: " + total_time);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
saveBitmap(nr_bitmap, output_name);
HistogramDetails hdrHistogramDetails = checkHistogram(nr_bitmap);
nr_bitmap.recycle();
System.gc();
inputs.clear();
Thread.sleep(500);
return hdrHistogramDetails;
}
use of net.sourceforge.opencamera.HDRProcessorException in project OpenCamera by ageback.
the class MainActivityTest method subTestHDR.
/**
* The following testHDRX tests test the HDR algorithm on a given set of input images.
* By testing on a fixed sample, this makes it easier to finetune the HDR algorithm for quality and performance.
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device everytime we run the tests.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private HistogramDetails subTestHDR(List<Bitmap> inputs, String output_name, boolean test_dro, HDRProcessor.TonemappingAlgorithm tonemapping_algorithm) throws IOException, InterruptedException {
Log.d(TAG, "subTestHDR");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
Log.d(TAG, "renderscript requires Android Lollipop or better");
return null;
}
// wait for camera to open
Thread.sleep(1000);
Bitmap dro_bitmap_in = null;
if (test_dro) {
// save copy of input bitmap to also test DRO (since the HDR routine will free the inputs)
int mid = (inputs.size() - 1) / 2;
dro_bitmap_in = inputs.get(mid);
dro_bitmap_in = dro_bitmap_in.copy(dro_bitmap_in.getConfig(), true);
}
HistogramDetails hdrHistogramDetails = null;
if (inputs.size() > 1) {
long time_s = System.currentTimeMillis();
try {
mActivity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.5f, 4, tonemapping_algorithm);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
Log.d(TAG, "HDR time: " + (System.currentTimeMillis() - time_s));
File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/" + output_name);
OutputStream outputStream = new FileOutputStream(file);
inputs.get(0).compress(Bitmap.CompressFormat.JPEG, 90, outputStream);
outputStream.close();
mActivity.getStorageUtils().broadcastFile(file, true, false, true);
hdrHistogramDetails = checkHistogram(inputs.get(0));
}
inputs.get(0).recycle();
inputs.clear();
if (test_dro) {
inputs.add(dro_bitmap_in);
long time_s = System.currentTimeMillis();
try {
mActivity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.5f, 4, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
Log.d(TAG, "DRO time: " + (System.currentTimeMillis() - time_s));
File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/dro" + output_name);
OutputStream outputStream = new FileOutputStream(file);
inputs.get(0).compress(Bitmap.CompressFormat.JPEG, 90, outputStream);
outputStream.close();
mActivity.getStorageUtils().broadcastFile(file, true, false, true);
checkHistogram(inputs.get(0));
inputs.get(0).recycle();
inputs.clear();
}
Thread.sleep(500);
return hdrHistogramDetails;
}
use of net.sourceforge.opencamera.HDRProcessorException in project OpenCamera by ageback.
the class MainActivityTest method subTestAvg.
/**
* The following testAvgX tests test the Avg noise reduction algorithm on a given set of input images.
* By testing on a fixed sample, this makes it easier to finetune the algorithm for quality and performance.
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device everytime we run the tests.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private HistogramDetails subTestAvg(List<String> inputs, String output_name, int iso, TestAvgCallback cb) throws IOException, InterruptedException {
Log.d(TAG, "subTestAvg");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
Log.d(TAG, "renderscript requires Android Lollipop or better");
return null;
}
// wait for camera to open
Thread.sleep(1000);
/*Bitmap nr_bitmap = getBitmapFromFile(inputs.get(0));
long time_s = System.currentTimeMillis();
try {
for(int i=1;i<inputs.size();i++) {
Log.d(TAG, "processAvg for image: " + i);
Bitmap new_bitmap = getBitmapFromFile(inputs.get(i));
float avg_factor = (float)i;
mActivity.getApplicationInterface().getHDRProcessor().processAvg(nr_bitmap, new_bitmap, avg_factor, true);
// processAvg recycles new_bitmap
if( cb != null ) {
cb.doneProcessAvg(i);
}
//break; // test
}
//mActivity.getApplicationInterface().getHDRProcessor().processAvgMulti(inputs, hdr_strength, 4);
}
catch(HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
Log.d(TAG, "Avg time: " + (System.currentTimeMillis() - time_s));
{
mActivity.getApplicationInterface().getHDRProcessor().avgBrighten(nr_bitmap);
Log.d(TAG, "time after brighten: " + (System.currentTimeMillis() - time_s));
}*/
Bitmap nr_bitmap;
try {
// initialise allocation from first two bitmaps
Bitmap bitmap0 = getBitmapFromFile(inputs.get(0));
Bitmap bitmap1 = getBitmapFromFile(inputs.get(1));
int width = bitmap0.getWidth();
int height = bitmap0.getHeight();
float avg_factor = 1.0f;
Allocation allocation = mActivity.getApplicationInterface().getHDRProcessor().processAvg(bitmap0, bitmap1, avg_factor, iso, true);
// processAvg recycles both bitmaps
if (cb != null) {
cb.doneProcessAvg(1);
}
for (int i = 2; i < inputs.size(); i++) {
Log.d(TAG, "processAvg for image: " + i);
Bitmap new_bitmap = getBitmapFromFile(inputs.get(i));
avg_factor = (float) i;
mActivity.getApplicationInterface().getHDRProcessor().updateAvg(allocation, width, height, new_bitmap, avg_factor, iso, true);
// updateAvg recycles new_bitmap
if (cb != null) {
cb.doneProcessAvg(i);
}
}
nr_bitmap = mActivity.getApplicationInterface().getHDRProcessor().avgBrighten(allocation, width, height, iso);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/" + output_name);
OutputStream outputStream = new FileOutputStream(file);
nr_bitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream);
outputStream.close();
mActivity.getStorageUtils().broadcastFile(file, true, false, true);
HistogramDetails hdrHistogramDetails = checkHistogram(nr_bitmap);
nr_bitmap.recycle();
System.gc();
inputs.clear();
Thread.sleep(500);
return hdrHistogramDetails;
}
use of net.sourceforge.opencamera.HDRProcessorException in project OpenCamera by ageback.
the class MainActivityTest method subTestHDR.
/**
* The following testHDRX tests test the HDR algorithm on a given set of input images.
* By testing on a fixed sample, this makes it easier to finetune the HDR algorithm for quality and performance.
* To use these tests, the testdata/ subfolder should be manually copied to the test device in the DCIM/testOpenCamera/
* folder (so you have DCIM/testOpenCamera/testdata/). We don't use assets/ as we'd end up with huge APK sizes which takes
* time to transfer to the device everytime we run the tests.
* @param iso The ISO of the middle image (for testing Open Camera's "smart" contrast enhancement). If set to -1, then use "always" contrast enhancement.
* @param exposure_time The exposure time of the middle image (for testing Open Camera's "smart" contrast enhancement)
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private HistogramDetails subTestHDR(List<Bitmap> inputs, String output_name, boolean test_dro, int iso, long exposure_time, HDRProcessor.TonemappingAlgorithm tonemapping_algorithm) throws IOException, InterruptedException {
Log.d(TAG, "subTestHDR");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
Log.d(TAG, "renderscript requires Android Lollipop or better");
return null;
}
// wait for camera to open
Thread.sleep(1000);
Bitmap dro_bitmap_in = null;
if (test_dro) {
// save copy of input bitmap to also test DRO (since the HDR routine will free the inputs)
int mid = (inputs.size() - 1) / 2;
dro_bitmap_in = inputs.get(mid);
dro_bitmap_in = dro_bitmap_in.copy(dro_bitmap_in.getConfig(), true);
}
HistogramDetails hdrHistogramDetails = null;
if (inputs.size() > 1) {
String preference_hdr_contrast_enhancement = (iso == -1) ? "preference_hdr_contrast_enhancement_always" : "preference_hdr_contrast_enhancement_smart";
float hdr_alpha = ImageSaver.getHDRAlpha(preference_hdr_contrast_enhancement, exposure_time, inputs.size());
long time_s = System.currentTimeMillis();
try {
mActivity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, hdr_alpha, 4, true, tonemapping_algorithm, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_GAINGAMMA);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
Log.d(TAG, "HDR time: " + (System.currentTimeMillis() - time_s));
saveBitmap(inputs.get(0), output_name);
hdrHistogramDetails = checkHistogram(inputs.get(0));
}
inputs.get(0).recycle();
inputs.clear();
if (test_dro) {
inputs.add(dro_bitmap_in);
long time_s = System.currentTimeMillis();
try {
mActivity.getApplicationInterface().getHDRProcessor().processHDR(inputs, true, null, true, null, 0.5f, 4, true, HDRProcessor.TonemappingAlgorithm.TONEMAPALGORITHM_REINHARD, HDRProcessor.DROTonemappingAlgorithm.DROALGORITHM_GAINGAMMA);
} catch (HDRProcessorException e) {
e.printStackTrace();
throw new RuntimeException();
}
Log.d(TAG, "DRO time: " + (System.currentTimeMillis() - time_s));
saveBitmap(inputs.get(0), "dro" + output_name);
checkHistogram(inputs.get(0));
inputs.get(0).recycle();
inputs.clear();
}
Thread.sleep(500);
return hdrHistogramDetails;
}
Aggregations