use of gdsc.smlm.filters.AverageFilter in project GDSC-SMLM by aherbert.
the class FilterTest method floatRollingBlockAverageNxNInternalIsFasterThanAreaAverageNxNInternal.
@Test
public void floatRollingBlockAverageNxNInternalIsFasterThanAreaAverageNxNInternal() {
org.junit.Assume.assumeTrue(TestSettings.RUN_SPEED_TESTS);
rand = new gdsc.core.utils.Random(-300519);
AverageFilter filter1 = new AverageFilter();
AreaAverageFilter filter2 = new AreaAverageFilter();
int iter = 10;
ArrayList<float[]> dataSet = new ArrayList<float[]>(iter);
for (int i = iter; i-- > 0; ) {
dataSet.add(floatCreateData(primes[0], primes[0]));
}
ArrayList<Long> fastTimes = new ArrayList<Long>();
for (int boxSize : boxSizes) for (int width : primes) for (int height : primes) {
ArrayList<float[]> dataSet2 = new ArrayList<float[]>(iter);
for (float[] data : dataSet) dataSet2.add(floatClone(data));
// Initialise
for (float[] data : dataSet2) filter1.rollingBlockAverageNxNInternal(data.clone(), width, height, boxSize);
long time = System.nanoTime();
for (float[] data : dataSet2) filter1.rollingBlockAverageNxNInternal(data, width, height, boxSize);
time = System.nanoTime() - time;
fastTimes.add(time);
}
long slowTotal = 0, fastTotal = 0;
int index = 0;
for (int boxSize : boxSizes) {
long boxSlowTotal = 0, boxFastTotal = 0;
for (int width : primes) for (int height : primes) {
ArrayList<float[]> dataSet2 = new ArrayList<float[]>(iter);
for (float[] data : dataSet) dataSet2.add(floatClone(data));
// Initialise
for (float[] data : dataSet2) filter2.areaAverageUsingAveragesInternal(data.clone(), width, height, boxSize - 0.05);
long time = System.nanoTime();
for (float[] data : dataSet2) filter2.areaAverageUsingAveragesInternal(data, width, height, boxSize - 0.05);
time = System.nanoTime() - time;
long fastTime = fastTimes.get(index++);
slowTotal += time;
fastTotal += fastTime;
boxSlowTotal += time;
boxFastTotal += fastTime;
if (debug)
System.out.printf("float areaAverageInternal [%dx%d] @ %d : %d => rollingBlockAverageNxNInternal %d = %.2fx\n", width, height, boxSize, time, fastTime, speedUpFactor(time, fastTime));
//Assert.assertTrue(String.format("Not faster: [%dx%d] @ %d : %d > %d", width, height, boxSize,
// blockTime, time), blockTime < time);
}
//if (debug)
System.out.printf("float areaAverageInternal %d : %d => rollingBlockAverageNxNInternal %d = %.2fx\n", boxSize, boxSlowTotal, boxFastTotal, speedUpFactor(boxSlowTotal, boxFastTotal));
// Assert.assertTrue(String.format("Not faster: Block %d : %d > %d", boxSize, boxFastTotal, boxSlowTotal),
// boxFastTotal < boxSlowTotal);
}
System.out.printf("float areaAverageInternal %d => rollingBlockAverageNxNInternal %d = %.2fx\n", slowTotal, fastTotal, speedUpFactor(slowTotal, fastTotal));
if (TestSettings.ASSERT_SPEED_TESTS)
Assert.assertTrue(String.format("Not faster: %d > %d", fastTotal, slowTotal), fastTotal < slowTotal);
}
use of gdsc.smlm.filters.AverageFilter in project GDSC-SMLM by aherbert.
the class GaussianFit method run.
/*
* (non-Javadoc)
*
* @see ij.plugin.filter.PlugInFilter#run(ij.process.ImageProcessor)
*/
public void run(ImageProcessor ip) {
Rectangle bounds = ip.getRoi();
// Crop to the ROI
float[] data = ImageConverter.getData(ip);
int width = bounds.width;
int height = bounds.height;
if (getSmooth() > 0) {
// No need for a copy since we are using a snapshot buffer
AverageFilter filter = new AverageFilter();
filter.stripedBlockAverage(data, width, height, (float) getSmooth());
}
maxIndices = getMaxima(data, width, height);
if (topN > 0 && maxIndices.length > topN) {
maxIndices = Sort.sort(maxIndices, data);
maxIndices = Arrays.copyOf(maxIndices, topN);
}
// Show an overlay of the indices
if (maxIndices.length > 0) {
int nMaxima = maxIndices.length;
int[] xpoints = new int[nMaxima];
int[] ypoints = new int[nMaxima];
int n = 0;
for (int index : maxIndices) {
xpoints[n] = bounds.x + index % width;
ypoints[n] = bounds.y + index / width;
n++;
}
setOverlay(nMaxima, xpoints, ypoints);
} else
imp.setOverlay(null);
for (int index = data.length; index-- > 0; ) {
ip.setf(bounds.x + index % width, bounds.y + index / width, data[index]);
}
}
use of gdsc.smlm.filters.AverageFilter in project GDSC-SMLM by aherbert.
the class GaussianFit method runFinal.
/**
* Perform fitting using the chosen maxima. Update the overlay if successful.
*
* @param ip
* The input image
*/
private void runFinal(ImageProcessor ip) {
ip.reset();
Rectangle bounds = ip.getRoi();
// Crop to the ROI
float[] data = ImageConverter.getData(ip);
int width = bounds.width;
int height = bounds.height;
// Sort the maxima
float[] smoothData = data;
if (getSmooth() > 0) {
// Smoothing destructively modifies the data so create a copy
smoothData = Arrays.copyOf(data, width * height);
AverageFilter filter = new AverageFilter();
//filter.blockAverage(smoothData, width, height, smooth);
if (smooth <= border)
filter.stripedBlockAverageInternal(smoothData, width, height, (float) smooth);
else
filter.stripedBlockAverage(smoothData, width, height, (float) smooth);
}
Sort.sort(maxIndices, smoothData);
// Show the candidate peaks
if (maxIndices.length > 0) {
String message = String.format("Identified %d peaks", maxIndices.length);
if (isLogProgress()) {
IJ.log(message);
for (int index : maxIndices) {
IJ.log(String.format(" %.2f @ [%d,%d]", data[index], bounds.x + index % width, bounds.y + index / width));
}
}
// Check whether to run if the number of peaks is large
if (maxIndices.length > 10) {
GenericDialog gd = new GenericDialog("Warning");
gd.addMessage(message + "\nDo you want to fit?");
gd.showDialog();
if (gd.wasCanceled())
return;
}
} else {
IJ.log("No maxima identified");
return;
}
results = new IJTablePeakResults(showDeviations, imp.getTitle() + " [" + imp.getCurrentSlice() + "]");
results.begin();
// Perform the Gaussian fit
long ellapsed = 0;
if (!singleFit) {
if (isLogProgress())
IJ.log("Combined fit");
// Estimate height from smoothed data
double[] estimatedHeights = new double[maxIndices.length];
for (int i = 0; i < estimatedHeights.length; i++) estimatedHeights[i] = smoothData[maxIndices[i]];
FitConfiguration config = new FitConfiguration();
setupPeakFiltering(config);
long time = System.nanoTime();
double[] params = fitMultiple(data, width, height, maxIndices, estimatedHeights);
ellapsed = System.nanoTime() - time;
if (params != null) {
// Copy all the valid parameters into a new array
double[] validParams = new double[params.length];
int c = 0;
int validPeaks = 0;
validParams[c++] = params[0];
double[] initialParams = convertParameters(fitResult.getInitialParameters());
double[] paramsDev = convertParameters(fitResult.getParameterStdDev());
Rectangle regionBounds = new Rectangle();
int[] xpoints = new int[maxIndices.length];
int[] ypoints = new int[maxIndices.length];
int nMaxima = 0;
for (int i = 1, n = 0; i < params.length; i += 6, n++) {
int y = maxIndices[n] / width;
int x = maxIndices[n] % width;
// Check the peak is a good fit
if (filterResults && config.validatePeak(n, initialParams, params) != FitStatus.OK)
continue;
if (showFit) {
// Copy the valid parameters
validPeaks++;
for (int ii = i, j = 0; j < 6; ii++, j++) validParams[c++] = params[ii];
}
double[] peakParams = extractParams(params, i);
double[] peakParamsDev = extractParams(paramsDev, i);
addResult(bounds, regionBounds, data, peakParams, peakParamsDev, nMaxima, x, y, data[maxIndices[n]]);
// Add fit result to the overlay - Coords are updated with the region offsets in addResult
double xf = peakParams[3];
double yf = peakParams[4];
xpoints[nMaxima] = (int) (xf + 0.5);
ypoints[nMaxima] = (int) (yf + 0.5);
nMaxima++;
}
setOverlay(nMaxima, xpoints, ypoints);
// Draw the fit
if (showFit && validPeaks != 0) {
double[] pixels = new double[data.length];
EllipticalGaussian2DFunction f = new EllipticalGaussian2DFunction(validPeaks, width, height);
invertParameters(validParams);
f.initialise(validParams);
for (int x = 0; x < pixels.length; x++) pixels[x] = f.eval(x);
FloatProcessor fp = new FloatProcessor(width, height, pixels);
// Insert into a full size image
FloatProcessor fp2 = new FloatProcessor(ip.getWidth(), ip.getHeight());
fp2.insert(fp, bounds.x, bounds.y);
Utils.display(TITLE, fp2);
}
} else {
if (isLogProgress()) {
IJ.log("Failed to fit " + Utils.pleural(maxIndices.length, "peak") + getReason(fitResult));
}
imp.setOverlay(null);
}
} else {
if (isLogProgress())
IJ.log("Individual fit");
int nMaxima = 0;
int[] xpoints = new int[maxIndices.length];
int[] ypoints = new int[maxIndices.length];
// Extract each peak and fit individually
ImageExtractor ie = new ImageExtractor(data, width, height);
float[] region = null;
Gaussian2DFitter gf = createGaussianFitter(filterResults);
for (int n = 0; n < maxIndices.length; n++) {
int y = maxIndices[n] / width;
int x = maxIndices[n] % width;
long time = System.nanoTime();
Rectangle regionBounds = ie.getBoxRegionBounds(x, y, singleRegionSize);
region = ie.crop(regionBounds, region);
int newIndex = (y - regionBounds.y) * regionBounds.width + x - regionBounds.x;
if (isLogProgress()) {
IJ.log("Fitting peak " + (n + 1));
}
double[] peakParams = fitSingle(gf, region, regionBounds.width, regionBounds.height, newIndex, smoothData[maxIndices[n]]);
ellapsed += System.nanoTime() - time;
// Output fit result
if (peakParams != null) {
double[] peakParamsDev = null;
if (showDeviations) {
peakParamsDev = convertParameters(fitResult.getParameterStdDev());
}
addResult(bounds, regionBounds, data, peakParams, peakParamsDev, n, x, y, data[maxIndices[n]]);
// Add fit result to the overlay - Coords are updated with the region offsets in addResult
double xf = peakParams[3];
double yf = peakParams[4];
xpoints[nMaxima] = (int) (xf + 0.5);
ypoints[nMaxima] = (int) (yf + 0.5);
nMaxima++;
} else {
if (isLogProgress()) {
IJ.log("Failed to fit peak " + (n + 1) + getReason(fitResult));
}
}
}
// Update the overlay
if (nMaxima > 0)
setOverlay(nMaxima, xpoints, ypoints);
else
imp.setOverlay(null);
}
results.end();
if (isLogProgress())
IJ.log("Time = " + (ellapsed / 1000000.0) + "ms");
}
use of gdsc.smlm.filters.AverageFilter in project GDSC-SMLM by aherbert.
the class FilterTest method floatRollingBlockSumNxNInternalIsFasterThanRollingBlockAverageNxNInternal.
@Test
public void floatRollingBlockSumNxNInternalIsFasterThanRollingBlockAverageNxNInternal() {
org.junit.Assume.assumeTrue(TestSettings.RUN_SPEED_TESTS);
rand = new gdsc.core.utils.Random(-300519);
SumFilter filter = new SumFilter();
AverageFilter filter2 = new AverageFilter();
int iter = 50;
ArrayList<float[]> dataSet = new ArrayList<float[]>(iter);
for (int i = iter; i-- > 0; ) {
dataSet.add(floatCreateData(primes[0], primes[0]));
}
ArrayList<Long> fastTimes = new ArrayList<Long>();
// Initialise
filter.rollingBlockSumNxNInternal(floatClone(dataSet.get(0)), primes[0], primes[0], boxSizes[0]);
filter2.rollingBlockAverageNxNInternal(floatClone(dataSet.get(0)), primes[0], primes[0], boxSizes[0]);
for (int boxSize : boxSizes) for (int width : primes) for (int height : primes) {
ArrayList<float[]> dataSet2 = new ArrayList<float[]>(iter);
for (float[] data : dataSet) dataSet2.add(floatClone(data));
long time = System.nanoTime();
for (float[] data : dataSet2) filter.rollingBlockSumNxNInternal(data, width, height, boxSize);
time = System.nanoTime() - time;
fastTimes.add(time);
}
long slowTotal = 0, fastTotal = 0;
int index = 0;
for (int boxSize : boxSizes) {
long boxSlowTotal = 0, boxFastTotal = 0;
for (int width : primes) for (int height : primes) {
ArrayList<float[]> dataSet2 = new ArrayList<float[]>(iter);
for (float[] data : dataSet) dataSet2.add(floatClone(data));
long time = System.nanoTime();
for (float[] data : dataSet2) filter2.rollingBlockAverageNxNInternal(data, width, height, boxSize);
time = System.nanoTime() - time;
long fastTime = fastTimes.get(index++);
slowTotal += time;
fastTotal += fastTime;
boxSlowTotal += time;
boxFastTotal += fastTime;
if (debug)
System.out.printf("float rollingBlockAverageNxNInternal [%dx%d] @ %d : %d => rollingBlockSumNxNInternal %d = %.2fx\n", width, height, boxSize, time, fastTime, speedUpFactor(time, fastTime));
//Assert.assertTrue(String.format("Not faster: [%dx%d] @ %d : %d > %d", width, height, boxSize,
// blockTime, time), blockTime < time);
}
//if (debug)
System.out.printf("float rollingBlockAverageNxNInternal %d : %d => rollingBlockSumNxNInternal %d = %.2fx\n", boxSize, boxSlowTotal, boxFastTotal, speedUpFactor(boxSlowTotal, boxFastTotal));
// Assert.assertTrue(String.format("Not faster: Block %d : %d > %d", boxSize, boxFastTotal, boxSlowTotal),
// boxFastTotal < boxSlowTotal);
}
System.out.printf("float rollingBlockAverageNxNInternal %d => rollingBlockSumNxNInternal %d = %.2fx\n", slowTotal, fastTotal, speedUpFactor(slowTotal, fastTotal));
if (TestSettings.ASSERT_SPEED_TESTS)
Assert.assertTrue(String.format("Not faster: %d > %d", fastTotal, slowTotal), fastTotal < slowTotal);
}
use of gdsc.smlm.filters.AverageFilter in project GDSC-SMLM by aherbert.
the class FilterTest method floatRollingBlockAverageNxNInternalIsFasterThanGaussianNxNInternal.
@Test
public void floatRollingBlockAverageNxNInternalIsFasterThanGaussianNxNInternal() {
org.junit.Assume.assumeTrue(TestSettings.RUN_SPEED_TESTS);
rand = new gdsc.core.utils.Random(-300519);
AverageFilter filter1 = new AverageFilter();
GaussianFilter filter2 = new GaussianFilter();
int iter = 10;
ArrayList<float[]> dataSet = new ArrayList<float[]>(iter);
for (int i = iter; i-- > 0; ) {
dataSet.add(floatCreateData(primes[0], primes[0]));
}
ArrayList<Long> fastTimes = new ArrayList<Long>();
// Initialise
filter1.rollingBlockAverageNxNInternal(floatClone(dataSet.get(0)), primes[0], primes[0], boxSizes[0]);
filter2.convolveInternal(floatClone(dataSet.get(0)), primes[0], primes[0], boxSizes[0] / 3.0);
for (int boxSize : boxSizes) for (int width : primes) for (int height : primes) {
ArrayList<float[]> dataSet2 = new ArrayList<float[]>(iter);
for (float[] data : dataSet) dataSet2.add(floatClone(data));
long time = System.nanoTime();
for (float[] data : dataSet2) filter1.rollingBlockAverageNxNInternal(data, width, height, boxSize);
time = System.nanoTime() - time;
fastTimes.add(time);
}
long slowTotal = 0, fastTotal = 0;
int index = 0;
for (int boxSize : boxSizes) {
long boxSlowTotal = 0, boxFastTotal = 0;
for (int width : primes) for (int height : primes) {
ArrayList<float[]> dataSet2 = new ArrayList<float[]>(iter);
for (float[] data : dataSet) dataSet2.add(floatClone(data));
long time = System.nanoTime();
for (float[] data : dataSet2) filter2.convolveInternal(data, width, height, boxSize / 3.0);
time = System.nanoTime() - time;
long fastTime = fastTimes.get(index++);
slowTotal += time;
fastTotal += fastTime;
boxSlowTotal += time;
boxFastTotal += fastTime;
if (debug)
System.out.printf("float convolveInternal [%dx%d] @ %d : %d => rollingBlockAverageNxNInternal %d = %.2fx\n", width, height, boxSize, time, fastTime, speedUpFactor(time, fastTime));
//Assert.assertTrue(String.format("Not faster: [%dx%d] @ %d : %d > %d", width, height, boxSize,
// blockTime, time), blockTime < time);
}
//if (debug)
System.out.printf("float convolveInternal %d : %d => rollingBlockAverageNxNInternal %d = %.2fx\n", boxSize, boxSlowTotal, boxFastTotal, speedUpFactor(boxSlowTotal, boxFastTotal));
// Assert.assertTrue(String.format("Not faster: Block %d : %d > %d", boxSize, boxFastTotal, boxSlowTotal),
// boxFastTotal < boxSlowTotal);
}
System.out.printf("float convolveInternal %d => rollingBlockAverageNxNInternal %d = %.2fx\n", slowTotal, fastTotal, speedUpFactor(slowTotal, fastTotal));
if (TestSettings.ASSERT_SPEED_TESTS)
Assert.assertTrue(String.format("Not faster: %d > %d", fastTotal, slowTotal), fastTotal < slowTotal);
}
Aggregations